repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
truthiswill/intellij-community
|
java/java-impl/src/com/intellij/codeInsight/editorActions/moveUpDown/CaseBlockMover.java
|
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.editorActions.moveUpDown;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.psi.*;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.text.CharArrayUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
public class CaseBlockMover extends StatementUpDownMover {
@Override
public boolean checkAvailable(@NotNull Editor editor, @NotNull PsiFile file, @NotNull MoveInfo info, boolean down) {
if (!(file instanceof PsiJavaFile)) return false;
PsiElement startElement = firstNonWhiteElement(editor.getSelectionModel().getSelectionStart(), file, true);
if (startElement == null) return false;
PsiElement endElement = firstNonWhiteElement(editor.getSelectionModel().getSelectionEnd(), file, false);
if (endElement == null) return false;
PsiSwitchLabelStatement caseStatement = PsiTreeUtil.getParentOfType(PsiTreeUtil.findCommonParent(startElement, endElement),
PsiSwitchLabelStatement.class, false);
if (caseStatement == null) return false;
PsiElement firstToMove = getThisCaseBlockStart(caseStatement);
PsiElement nextCaseBlockStart = getNextCaseBlockStart(caseStatement);
PsiElement lastToMove = PsiTreeUtil.skipWhitespacesBackward(nextCaseBlockStart);
assert lastToMove != null;
LineRange range = createRange(editor.getDocument(), firstToMove, lastToMove);
if (range == null) return info.prohibitMove();
info.toMove = range;
PsiElement firstToMove2;
PsiElement lastToMove2;
if (down) {
if (!(nextCaseBlockStart instanceof PsiSwitchLabelStatement) || nextCaseBlockStart == caseStatement) return info.prohibitMove();
firstToMove2 = nextCaseBlockStart;
nextCaseBlockStart = getNextCaseBlockStart((PsiSwitchLabelStatement)firstToMove2);
lastToMove2 = PsiTreeUtil.skipWhitespacesBackward(nextCaseBlockStart);
assert lastToMove2 != null;
}
else {
lastToMove2 = PsiTreeUtil.skipWhitespacesBackward(firstToMove);
if (lastToMove2 == null) return info.prohibitMove();
firstToMove2 = PsiTreeUtil.getPrevSiblingOfType(lastToMove2, PsiSwitchLabelStatement.class);
if (firstToMove2 == null) return info.prohibitMove();
firstToMove2 = getThisCaseBlockStart((PsiSwitchLabelStatement)firstToMove2);
}
LineRange range2 = createRange(editor.getDocument(), firstToMove2, lastToMove2);
if (range2 == null) return info.prohibitMove();
info.toMove2 = range2;
return true;
}
// returns PsiSwitchLabelStatement starting this case block
@NotNull
private static PsiSwitchLabelStatement getThisCaseBlockStart(PsiSwitchLabelStatement element) {
PsiElement tmp;
while ((tmp = PsiTreeUtil.skipWhitespacesBackward(element)) instanceof PsiSwitchLabelStatement) {
element = (PsiSwitchLabelStatement)tmp;
}
return element;
}
// returns PsiSwitchLabelStatement starting next case block, or switch block's closing brace, if there is no next case block
@NotNull
private static PsiElement getNextCaseBlockStart(PsiSwitchLabelStatement element) {
PsiElement result = element;
PsiElement tmp;
while ((tmp = PsiTreeUtil.skipWhitespacesForward(result)) instanceof PsiSwitchLabelStatement) {
result = tmp;
}
tmp = PsiTreeUtil.getNextSiblingOfType(result, PsiSwitchLabelStatement.class);
return tmp == null ? result.getParent().getLastChild() : tmp;
}
@Nullable
private static LineRange createRange(@NotNull Document document, @NotNull PsiElement startElement, @NotNull PsiElement endElement) {
CharSequence text = document.getImmutableCharSequence();
int startOffset = startElement.getTextRange().getStartOffset();
int startLine = document.getLineNumber(startOffset);
if (!CharArrayUtil.isEmptyOrSpaces(text, document.getLineStartOffset(startLine), startOffset)) {
return null;
}
int endOffset = endElement.getTextRange().getEndOffset();
int endLine = document.getLineNumber(endOffset);
if (!CharArrayUtil.isEmptyOrSpaces(text, endOffset, document.getLineEndOffset(endLine))) {
return null;
}
return new LineRange(startLine, endLine + 1);
}
}
|
ScalablyTyped/SlinkyTyped
|
o/office-js-preview/src/main/scala/typingsSlinky/officeJsPreview/Excel/Interfaces/TableColumnData.scala
|
<gh_stars>10-100
package typingsSlinky.officeJsPreview.Excel.Interfaces
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
/** An interface describing the data returned by calling `tableColumn.toJSON()`. */
@js.native
trait TableColumnData extends StObject {
/**
*
* Retrieve the filter applied to the column.
*
* [Api set: ExcelApi 1.2]
*/
var filter: js.UndefOr[FilterData] = js.native
/**
*
* Returns a unique key that identifies the column within the table.
*
* [Api set: ExcelApi 1.1]
*/
var id: js.UndefOr[Double] = js.native
/**
*
* Returns the index number of the column within the columns collection of the table. Zero-indexed.
*
* [Api set: ExcelApi 1.1]
*/
var index: js.UndefOr[Double] = js.native
/**
*
* Specifies the name of the table column.
*
* [Api set: ExcelApi 1.1 for getting the name; 1.4 for setting it.]
*/
var name: js.UndefOr[String] = js.native
/**
*
* Represents the raw values of the specified range. The data returned could be of type string, number, or a boolean. Cells that contain an error will return the error string.
*
* [Api set: ExcelApi 1.1]
*/
var values: js.UndefOr[js.Array[js.Array[_]]] = js.native
}
object TableColumnData {
@scala.inline
def apply(): TableColumnData = {
val __obj = js.Dynamic.literal()
__obj.asInstanceOf[TableColumnData]
}
@scala.inline
implicit class TableColumnDataMutableBuilder[Self <: TableColumnData] (val x: Self) extends AnyVal {
@scala.inline
def setFilter(value: FilterData): Self = StObject.set(x, "filter", value.asInstanceOf[js.Any])
@scala.inline
def setFilterUndefined: Self = StObject.set(x, "filter", js.undefined)
@scala.inline
def setId(value: Double): Self = StObject.set(x, "id", value.asInstanceOf[js.Any])
@scala.inline
def setIdUndefined: Self = StObject.set(x, "id", js.undefined)
@scala.inline
def setIndex(value: Double): Self = StObject.set(x, "index", value.asInstanceOf[js.Any])
@scala.inline
def setIndexUndefined: Self = StObject.set(x, "index", js.undefined)
@scala.inline
def setName(value: String): Self = StObject.set(x, "name", value.asInstanceOf[js.Any])
@scala.inline
def setNameUndefined: Self = StObject.set(x, "name", js.undefined)
@scala.inline
def setValues(value: js.Array[js.Array[_]]): Self = StObject.set(x, "values", value.asInstanceOf[js.Any])
@scala.inline
def setValuesUndefined: Self = StObject.set(x, "values", js.undefined)
@scala.inline
def setValuesVarargs(value: js.Array[js.Any]*): Self = StObject.set(x, "values", js.Array(value :_*))
}
}
|
sirinath/Harmony
|
classlib/modules/security/src/test/impl/java/org/apache/harmony/security/tests/UnresolvedPrincipalTest.java
|
<reponame>sirinath/Harmony<gh_stars>1-10
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author <NAME>
*/
package org.apache.harmony.security.tests;
import java.security.Principal;
import org.apache.harmony.security.UnresolvedPrincipal;
import junit.framework.TestCase;
/**
* Tests for <code>UnresolvedPrincipal</code>
*
*/
public class UnresolvedPrincipalTest extends TestCase {
public static void main(String[] args) {
junit.textui.TestRunner.run(UnresolvedPrincipalTest.class);
}
public void testCtor() {
String klass = "abc";
String name = "NAME";
UnresolvedPrincipal up = new UnresolvedPrincipal(klass, name);
assertEquals(klass, up.getClassName());
assertEquals(name, up.getName());
up = new UnresolvedPrincipal(klass, null);
assertEquals(klass, up.getClassName());
assertNull(up.getName());
try {
up = new UnresolvedPrincipal(null, name);
fail("No IllegalArgumentException is thrown");
} catch (IllegalArgumentException ok) {
}
}
public void testEquals_Principal() {
String name = "sgrt";
FakePrincipal fp = new FakePrincipal(name);
assertTrue(new UnresolvedPrincipal(FakePrincipal.class.getName(), name)
.equals(fp));
assertTrue(new UnresolvedPrincipal(UnresolvedPrincipal.WILDCARD, name)
.equals(fp));
assertTrue(new UnresolvedPrincipal(FakePrincipal.class.getName(),
UnresolvedPrincipal.WILDCARD).equals(fp));
assertFalse(new UnresolvedPrincipal(FakePrincipal.class.getName(),
"sdkljfgbkwe").equals(fp));
}
public void testEquals_Common() {
String klass = "abc";
String name = "NAME";
UnresolvedPrincipal up = new UnresolvedPrincipal(klass, name);
UnresolvedPrincipal up2 = new UnresolvedPrincipal(klass, name);
UnresolvedPrincipal up3 = new UnresolvedPrincipal(name, klass);
assertTrue(up.equals(up));
assertTrue(up.equals(up2));
assertEquals(up.hashCode(), up2.hashCode());
assertFalse(up.equals(up3));
assertFalse(up.equals(null));
assertFalse(up.equals(new Object()));
}
public void testImplies() {
String name = "sgrt";
FakePrincipal fp = new FakePrincipal(name);
assertTrue(new UnresolvedPrincipal(FakePrincipal.class.getName(), name)
.implies(fp));
assertTrue(new UnresolvedPrincipal(UnresolvedPrincipal.WILDCARD, name)
.implies(fp));
assertTrue(new UnresolvedPrincipal(FakePrincipal.class.getName(),
UnresolvedPrincipal.WILDCARD).implies(fp));
assertTrue(new UnresolvedPrincipal(UnresolvedPrincipal.WILDCARD,
UnresolvedPrincipal.WILDCARD).implies(fp));
assertFalse(new UnresolvedPrincipal(
UnresolvedPrincipal.class.getName(), name).implies(fp));
assertFalse(new UnresolvedPrincipal(FakePrincipal.class.getName(),
"hgfuytr765").implies(fp));
}
}
class FakePrincipal implements Principal {
private String name;
public FakePrincipal(String name) {
this.name = name;
}
public String getName() {
return name;
}
}
|
ooooo-youwillsee/leetcode
|
lcof_021/cpp_021/Solution1.h
|
<reponame>ooooo-youwillsee/leetcode
//
// Created by ooooo on 2020/3/15.
//
#ifndef CPP_021__SOLUTION1_H_
#define CPP_021__SOLUTION1_H_
#include <iostream>
#include <vector>
using namespace std;
/**
* 双指针
*/
class Solution {
public:
vector<int> exchange(vector<int> &nums) {
int l = 0, r = nums.size() - 1;
while (true) {
while (l < r && (nums[l] & 1) == 1) l++;
while (l < r && (nums[r] & 1) == 0) r--;
if (l >= r) break;
swap(nums[l], nums[r]);
l++;
r--;
}
return nums;
}
};
#endif //CPP_021__SOLUTION1_H_
|
Orcthanc/Kestrel
|
Kestrel/src/Platform/Vulkan/VKTerrainRenderer.hpp
|
#pragma once
#include <kstpch.hpp>
#include "Renderer/Terrain.hpp"
#include "Renderer/Mesh.hpp"
#include "Renderer/Material.hpp"
#include "Scene/Components.hpp"
namespace Kestrel {
struct KST_VK_CameraRenderer;
struct KST_VK_TerrainRenderer {
public:
static void init();
void drawTerrain( KST_VK_CameraRenderer* renderer, const TransformComponent& transform, const Terrain& terrain );
static KST_VK_TerrainRenderer& get(){
return instance;
}
private:
Mesh terrain_mesh;
Material terrain_material;
static KST_VK_TerrainRenderer instance;
};
}
|
Oaticus/Deltatron
|
src/render/src/render/gpu/util/result_to_string.cc
|
<gh_stars>0
#include <dt/render/gpu/util/result_to_string.hh>
std::string dt::vk_result_to_string(VkResult const& result) noexcept {
switch (result) {
case VK_ERROR_OUT_OF_DEVICE_MEMORY: return "\"Out of device memory\"";
case VK_ERROR_INITIALIZATION_FAILED: return "\"Initialization failed\"";
case VK_ERROR_DEVICE_LOST: return "\"Device lost\"";
case VK_ERROR_MEMORY_MAP_FAILED: return "\"Memory map failed\"";
case VK_ERROR_LAYER_NOT_PRESENT: return "\"Layer not present\"";
case VK_ERROR_EXTENSION_NOT_PRESENT: return "\"Extension not present\"";
case VK_ERROR_FEATURE_NOT_PRESENT: return "\"Feature not present\"";
case VK_ERROR_INCOMPATIBLE_DRIVER: return "\"Incompatible driver\"";
case VK_ERROR_TOO_MANY_OBJECTS: return "\"Too many objects\"";
case VK_ERROR_FORMAT_NOT_SUPPORTED: return "\"Format not supported\"";
case VK_ERROR_FRAGMENTED_POOL: return "\"Fragmented pool\"";
case VK_ERROR_UNKNOWN: return "\"Unknown error\"";
case VK_ERROR_OUT_OF_POOL_MEMORY: return "\"Out of pool memory\"";
case VK_ERROR_INVALID_EXTERNAL_HANDLE: return "\"Invalid external handle\"";
case VK_ERROR_FRAGMENTATION: return "\"Fragmentation\"";
case VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS: return "\"Invalid opaque capture address\"";
case VK_ERROR_SURFACE_LOST_KHR: return "\"Surface lost\"";
case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR: return "\"Native window in use\"";
case VK_ERROR_OUT_OF_DATE_KHR: return "\"Out of date\"";
case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR: return "\"Incompatible display\"";
case VK_ERROR_VALIDATION_FAILED_EXT: return "\"Validation failed\"";
case VK_ERROR_INVALID_SHADER_NV: return "\"Invalid shader\"";
case VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT: return "\"Invalid DRM format modifier plane layout extension\"";
case VK_ERROR_NOT_PERMITTED_EXT: return "\"Error not permitted\"";
case VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT: return "\"Full screen exclusive mode lost\"";
default: return "\"Undefined error\"";
}
}
|
silesiacoin/eth2-docker-compose
|
.docker/Prysm/prysm-spike/beacon-chain/sync/initial-sync/blocks_fetcher_utils.go
|
package initialsync
import (
"context"
"fmt"
"sort"
"github.com/libp2p/go-libp2p-core/peer"
"github.com/pkg/errors"
eth "github.com/prysmaticlabs/ethereumapis/eth/v1alpha1"
"github.com/prysmaticlabs/prysm/beacon-chain/core/helpers"
"github.com/prysmaticlabs/prysm/beacon-chain/flags"
p2pTypes "github.com/prysmaticlabs/prysm/beacon-chain/p2p/types"
p2ppb "github.com/prysmaticlabs/prysm/proto/beacon/p2p/v1"
"github.com/prysmaticlabs/prysm/shared/bytesutil"
"github.com/prysmaticlabs/prysm/shared/params"
"github.com/sirupsen/logrus"
"go.opencensus.io/trace"
)
// forkData represents alternative chain path supported by a given peer.
// Blocks are stored in an ascending slot order. The first block is guaranteed to have parent
// either in DB or initial sync cache.
type forkData struct {
peer peer.ID
blocks []*eth.SignedBeaconBlock
}
// nonSkippedSlotAfter checks slots after the given one in an attempt to find a non-empty future slot.
// For efficiency only one random slot is checked per epoch, so returned slot might not be the first
// non-skipped slot. This shouldn't be a problem, as in case of adversary peer, we might get incorrect
// data anyway, so code that relies on this function must be robust enough to re-request, if no progress
// is possible with a returned value.
func (f *blocksFetcher) nonSkippedSlotAfter(ctx context.Context, slot uint64) (uint64, error) {
ctx, span := trace.StartSpan(ctx, "initialsync.nonSkippedSlotAfter")
defer span.End()
headEpoch, targetEpoch, peers := f.calculateHeadAndTargetEpochs()
log.WithFields(logrus.Fields{
"start": slot,
"headEpoch": headEpoch,
"targetEpoch": targetEpoch,
}).Debug("Searching for non-skipped slot")
// Exit early if no peers with epoch higher than our known head are found.
if targetEpoch <= headEpoch {
return 0, errSlotIsTooHigh
}
// Transform peer list to avoid eclipsing (filter, shuffle, trim).
peers = f.filterPeers(ctx, peers, peersPercentagePerRequest)
return f.nonSkippedSlotAfterWithPeersTarget(ctx, slot, peers, targetEpoch)
}
// nonSkippedSlotWithPeersTarget traverse peers (supporting a given target epoch), in an attempt
// to find non-skipped slot among returned blocks.
func (f *blocksFetcher) nonSkippedSlotAfterWithPeersTarget(
ctx context.Context, slot uint64, peers []peer.ID, targetEpoch uint64,
) (uint64, error) {
// Exit early if no peers are ready.
if len(peers) == 0 {
return 0, errNoPeersAvailable
}
slotsPerEpoch := params.BeaconConfig().SlotsPerEpoch
pidInd := 0
fetch := func(pid peer.ID, start, count, step uint64) (uint64, error) {
req := &p2ppb.BeaconBlocksByRangeRequest{
StartSlot: start,
Count: count,
Step: step,
}
blocks, err := f.requestBlocks(ctx, req, pid)
if err != nil {
return 0, err
}
if len(blocks) > 0 {
for _, block := range blocks {
if block.Block.Slot > slot {
return block.Block.Slot, nil
}
}
}
return 0, nil
}
// Start by checking several epochs fully, w/o resorting to random sampling.
start := slot + 1
end := start + nonSkippedSlotsFullSearchEpochs*slotsPerEpoch
for ind := start; ind < end; ind += slotsPerEpoch {
nextSlot, err := fetch(peers[pidInd%len(peers)], ind, slotsPerEpoch, 1)
if err != nil {
return 0, err
}
if nextSlot > slot {
return nextSlot, nil
}
pidInd++
}
// Quickly find the close enough epoch where a non-empty slot definitely exists.
// Only single random slot per epoch is checked - allowing to move forward relatively quickly.
slot = slot + nonSkippedSlotsFullSearchEpochs*slotsPerEpoch
upperBoundSlot, err := helpers.StartSlot(targetEpoch + 1)
if err != nil {
return 0, err
}
for ind := slot + 1; ind < upperBoundSlot; ind += (slotsPerEpoch * slotsPerEpoch) / 2 {
start := ind + uint64(f.rand.Intn(int(slotsPerEpoch)))
nextSlot, err := fetch(peers[pidInd%len(peers)], start, slotsPerEpoch/2, slotsPerEpoch)
if err != nil {
return 0, err
}
pidInd++
if nextSlot > slot && upperBoundSlot >= nextSlot {
upperBoundSlot = nextSlot
break
}
}
// Epoch with non-empty slot is located. Check all slots within two nearby epochs.
if upperBoundSlot > slotsPerEpoch {
upperBoundSlot -= slotsPerEpoch
}
upperBoundSlot, err = helpers.StartSlot(helpers.SlotToEpoch(upperBoundSlot))
if err != nil {
return 0, err
}
nextSlot, err := fetch(peers[pidInd%len(peers)], upperBoundSlot, slotsPerEpoch*2, 1)
if err != nil {
return 0, err
}
s, err := helpers.StartSlot(targetEpoch + 1)
if err != nil {
return 0, err
}
if nextSlot < slot || s < nextSlot {
return 0, errors.New("invalid range for non-skipped slot")
}
return nextSlot, nil
}
// findFork queries all peers that have higher head slot, in an attempt to find
// ones that feature blocks from alternative branches. Once found, peer is further queried
// to find common ancestor slot. On success, all obtained blocks and peer is returned.
func (f *blocksFetcher) findFork(ctx context.Context, slot uint64) (*forkData, error) {
ctx, span := trace.StartSpan(ctx, "initialsync.findFork")
defer span.End()
// Safe-guard, since previous epoch is used when calculating.
slotsPerEpoch := params.BeaconConfig().SlotsPerEpoch
if slot < slotsPerEpoch*2 {
return nil, fmt.Errorf("slot is too low to backtrack, min. expected %d", slotsPerEpoch*2)
}
// The current slot's epoch must be after the finalization epoch,
// triggering backtracking on earlier epochs is unnecessary.
finalizedEpoch := f.chain.FinalizedCheckpt().Epoch
epoch := helpers.SlotToEpoch(slot)
if epoch <= finalizedEpoch {
return nil, errors.New("slot is not after the finalized epoch, no backtracking is necessary")
}
// Update slot to the beginning of the current epoch (preserve original slot for comparison).
slot, err := helpers.StartSlot(epoch)
if err != nil {
return nil, err
}
// Select peers that have higher head slot, and potentially blocks from more favourable fork.
// Exit early if no peers are ready.
_, peers := f.p2p.Peers().BestNonFinalized(1, epoch+1)
if len(peers) == 0 {
return nil, errNoPeersAvailable
}
f.rand.Shuffle(len(peers), func(i, j int) {
peers[i], peers[j] = peers[j], peers[i]
})
// Query all found peers, stop on peer with alternative blocks, and try backtracking.
for i, pid := range peers {
log.WithFields(logrus.Fields{
"peer": pid,
"step": fmt.Sprintf("%d/%d", i+1, len(peers)),
}).Debug("Searching for alternative blocks")
fork, err := f.findForkWithPeer(ctx, pid, slot)
if err != nil {
log.WithFields(logrus.Fields{
"peer": pid,
"error": err.Error(),
}).Debug("No alternative blocks found for peer")
continue
}
return fork, nil
}
return nil, errNoPeersWithAltBlocks
}
// findForkWithPeer loads some blocks from a peer in an attempt to find alternative blocks.
func (f *blocksFetcher) findForkWithPeer(ctx context.Context, pid peer.ID, slot uint64) (*forkData, error) {
// Safe-guard, since previous epoch is used when calculating.
slotsPerEpoch := params.BeaconConfig().SlotsPerEpoch
if slot < slotsPerEpoch*2 {
return nil, fmt.Errorf("slot is too low to backtrack, min. expected %d", slotsPerEpoch*2)
}
// Locate non-skipped slot, supported by a given peer (can survive long periods of empty slots).
// When searching for non-empty slot, start an epoch earlier - for those blocks we
// definitely have roots. So, spotting a fork will be easier. It is not a problem if unknown
// block of the current fork is found: we are searching for forks when FSMs are stuck, so
// being able to progress on any fork is good.
pidState, err := f.p2p.Peers().ChainState(pid)
if err != nil {
return nil, fmt.Errorf("cannot obtain peer's status: %w", err)
}
nonSkippedSlot, err := f.nonSkippedSlotAfterWithPeersTarget(
ctx, slot-slotsPerEpoch, []peer.ID{pid}, helpers.SlotToEpoch(pidState.HeadSlot))
if err != nil {
return nil, fmt.Errorf("cannot locate non-empty slot for a peer: %w", err)
}
// Request blocks starting from the first non-empty slot.
req := &p2ppb.BeaconBlocksByRangeRequest{
StartSlot: nonSkippedSlot,
Count: slotsPerEpoch * 2,
Step: 1,
}
blocks, err := f.requestBlocks(ctx, req, pid)
if err != nil {
return nil, fmt.Errorf("cannot fetch blocks: %w", err)
}
// Traverse blocks, and if we've got one that doesn't have parent in DB, backtrack on it.
for i, block := range blocks {
parentRoot := bytesutil.ToBytes32(block.Block.ParentRoot)
if !f.db.HasBlock(ctx, parentRoot) && !f.chain.HasInitSyncBlock(parentRoot) {
log.WithFields(logrus.Fields{
"peer": pid,
"slot": block.Block.Slot,
"root": fmt.Sprintf("%#x", parentRoot),
}).Debug("Block with unknown parent root has been found")
// Backtrack only if the first block is diverging,
// otherwise we already know the common ancestor slot.
if i == 0 {
// Backtrack on a root, to find a common ancestor from which we can resume syncing.
fork, err := f.findAncestor(ctx, pid, block)
if err != nil {
return nil, fmt.Errorf("failed to find common ancestor: %w", err)
}
return fork, nil
}
return &forkData{peer: pid, blocks: blocks}, nil
}
}
return nil, errors.New("no alternative blocks exist within scanned range")
}
// findAncestor tries to figure out common ancestor slot that connects a given root to known block.
func (f *blocksFetcher) findAncestor(ctx context.Context, pid peer.ID, block *eth.SignedBeaconBlock) (*forkData, error) {
outBlocks := []*eth.SignedBeaconBlock{block}
for i := uint64(0); i < backtrackingMaxHops; i++ {
parentRoot := bytesutil.ToBytes32(outBlocks[len(outBlocks)-1].Block.ParentRoot)
if f.db.HasBlock(ctx, parentRoot) || f.chain.HasInitSyncBlock(parentRoot) {
// Common ancestor found, forward blocks back to processor.
sort.Slice(outBlocks, func(i, j int) bool {
return outBlocks[i].Block.Slot < outBlocks[j].Block.Slot
})
return &forkData{
peer: pid,
blocks: outBlocks,
}, nil
}
// Request block's parent.
req := &p2pTypes.BeaconBlockByRootsReq{parentRoot}
blocks, err := f.requestBlocksByRoot(ctx, req, pid)
if err != nil {
return nil, err
}
if len(blocks) == 0 {
break
}
outBlocks = append(outBlocks, blocks[0])
}
return nil, errors.New("no common ancestor found")
}
// bestFinalizedSlot returns the highest finalized slot of the majority of connected peers.
func (f *blocksFetcher) bestFinalizedSlot() uint64 {
finalizedEpoch, _ := f.p2p.Peers().BestFinalized(
params.BeaconConfig().MaxPeersToSync, f.chain.FinalizedCheckpt().Epoch)
return finalizedEpoch * params.BeaconConfig().SlotsPerEpoch
}
// bestNonFinalizedSlot returns the highest non-finalized slot of enough number of connected peers.
func (f *blocksFetcher) bestNonFinalizedSlot() uint64 {
headEpoch := helpers.SlotToEpoch(f.chain.HeadSlot())
targetEpoch, _ := f.p2p.Peers().BestNonFinalized(flags.Get().MinimumSyncPeers*2, headEpoch)
return targetEpoch * params.BeaconConfig().SlotsPerEpoch
}
// calculateHeadAndTargetEpochs return node's current head epoch, along with the best known target
// epoch. For the latter peers supporting that target epoch are returned as well.
func (f *blocksFetcher) calculateHeadAndTargetEpochs() (uint64, uint64, []peer.ID) {
var targetEpoch, headEpoch uint64
var peers []peer.ID
if f.mode == modeStopOnFinalizedEpoch {
headEpoch = f.chain.FinalizedCheckpt().Epoch
targetEpoch, peers = f.p2p.Peers().BestFinalized(params.BeaconConfig().MaxPeersToSync, headEpoch)
} else {
headEpoch = helpers.SlotToEpoch(f.chain.HeadSlot())
targetEpoch, peers = f.p2p.Peers().BestNonFinalized(flags.Get().MinimumSyncPeers, headEpoch)
}
return headEpoch, targetEpoch, peers
}
|
spectral-lab/spectral-extractor
|
src/constants/midi-export-options.js
|
export const ALL_TRACKS = 'ALL_TRACKS';
export const SELECTED_TRACKS = 'SELECTED_TRACKS';
export const ALL_CLIPS = 'ALL_CLIPS';
export const SELECTED_CLIPS = 'SELECTED_CLIPS';
|
thinkharderdev/zio-flow
|
zio-flow/shared/src/main/scala/zio/flow/remote/RemoteExecutingFlowSyntax.scala
|
/*
* Copyright 2021-2022 <NAME> and the ZIO Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package zio.flow.remote
import zio.flow.{ActivityError, ExecutingFlow, FlowId, Remote, ZFlow}
class RemoteExecutingFlowSyntax[A](self: Remote[A]) {
def flowId[E, A2](implicit ev: A <:< ExecutingFlow[E, A2]): Remote[FlowId] = ???
def await[E, A2](implicit ev: A <:< ExecutingFlow[E, A2]): ZFlow[Any, ActivityError, Either[E, A2]] =
ZFlow.Await(self.widen[ExecutingFlow[E, A2]])
def interrupt[E, A2](implicit ev: A <:< ExecutingFlow[E, A2]): ZFlow[Any, ActivityError, Any] =
ZFlow.Interrupt(self.widen[ExecutingFlow[E, A2]])
}
|
eye0fra/credscontroller
|
vendor/github.com/hashicorp/vault/command/unwrap.go
|
package command
import (
"flag"
"fmt"
"strings"
"github.com/hashicorp/vault/api"
"github.com/hashicorp/vault/meta"
)
// UnwrapCommand is a Command that behaves like ReadCommand but specifically
// for unwrapping cubbyhole-wrapped secrets
type UnwrapCommand struct {
meta.Meta
}
func (c *UnwrapCommand) Run(args []string) int {
var format string
var field string
var err error
var secret *api.Secret
var flags *flag.FlagSet
flags = c.Meta.FlagSet("unwrap", meta.FlagSetDefault)
flags.StringVar(&format, "format", "table", "")
flags.StringVar(&field, "field", "", "")
flags.Usage = func() { c.Ui.Error(c.Help()) }
if err := flags.Parse(args); err != nil {
return 1
}
var tokenID string
args = flags.Args()
switch len(args) {
case 0:
case 1:
tokenID = args[0]
default:
c.Ui.Error("Unwrap expects zero or one argument (the ID of the wrapping token)")
flags.Usage()
return 1
}
client, err := c.Client()
if err != nil {
c.Ui.Error(fmt.Sprintf(
"Error initializing client: %s", err))
return 2
}
secret, err = client.Logical().Unwrap(tokenID)
if err != nil {
c.Ui.Error(err.Error())
return 1
}
if secret == nil {
c.Ui.Error("Server gave empty response or secret returned was empty")
return 1
}
// Handle single field output
if field != "" {
return PrintRawField(c.Ui, secret, field)
}
// Check if the original was a list response and format as a list if so
if secret.Data != nil &&
len(secret.Data) == 1 &&
secret.Data["keys"] != nil {
_, ok := secret.Data["keys"].([]interface{})
if ok {
return OutputList(c.Ui, format, secret)
}
}
return OutputSecret(c.Ui, format, secret)
}
func (c *UnwrapCommand) Synopsis() string {
return "Unwrap a wrapped secret"
}
func (c *UnwrapCommand) Help() string {
helpText := `
Usage: vault unwrap [options] <wrapping token ID>
Unwrap a wrapped secret.
Unwraps the data wrapped by the given token ID. The returned result is the
same as a 'read' operation on a non-wrapped secret.
General Options:
` + meta.GeneralOptionsUsage() + `
Read Options:
-format=table The format for output. By default it is a whitespace-
delimited table. This can also be json or yaml.
-field=field If included, the raw value of the specified field
will be output raw to stdout.
`
return strings.TrimSpace(helpText)
}
|
vgauri1797/Eclipse
|
WebAppBuilderForArcGIS/server/apps/2/widgets/Analysis/nls/ru/strings.js
|
<reponame>vgauri1797/Eclipse
define({
"_widgetLabel": "Анализ",
"executeAnalysisTip": "Щелкните инструмент анализа для выполнения",
"noToolTip": "Нет настроенного инструмента анализа!",
"jobSubmitted": "отправлено.",
"jobCancelled": "Остановлено.",
"jobFailed": "Сбой",
"jobSuccess": "Успешно.",
"executing": "Выполнение",
"cancelJob": "Отмена задачи анализа",
"paramName": "Имя параметра",
"learnMore": "Более подробно",
"outputtip": "Примечание: Выходные объекты и таблицы добавляются на карту как рабочие слои.",
"outputSaveInPortal": "Данные сохранены на портале.",
"privilegeError": "Your user role cannot perform analysis. In order to perform analysis, the administrator of your organization needs to grant you certain <a href\"http://doc.arcgis.com/en/arcgis-online/reference/roles.htm\" target=\"_blank\">privileges</a>."
});
|
ManfredVon/poem
|
app/src/main/java/com/fmf/poem/fragment/BasePoemFragment.java
|
<reponame>ManfredVon/poem
package com.fmf.poem.fragment;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.database.Cursor;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.ListFragment;
import android.support.v4.app.LoaderManager;
import android.support.v4.content.AsyncTaskLoader;
import android.support.v4.content.Loader;
import android.view.ContextMenu;
import android.view.LayoutInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.CursorAdapter;
import android.widget.Toast;
import com.fmf.poem.R;
import com.fmf.poem.poem.PoemConstant;
import com.fmf.poem.poem.PoemLog;
public abstract class BasePoemFragment extends ListFragment implements LoaderManager.LoaderCallbacks<Cursor> {
protected CursorAdapter adapter;
private LoaderManager loaderManager;
public BasePoemFragment() {
PoemLog.i(this, "constructor");
}
protected abstract Cursor onQuery(@Nullable String text);
protected abstract CursorAdapter onCreateCursorAdapter();
protected abstract int onDelete(long id);
protected CharSequence onCreateEmptyText() {
return "";
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
PoemLog.i(this, "onCreate");
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
PoemLog.i(this, "onCreateView");
return super.onCreateView(inflater, container, savedInstanceState);
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
PoemLog.i(this, "onViewCreated");
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
PoemLog.i(this, "onActivityCreated");
init();
}
@Override
public void onStart() {
super.onStart();
PoemLog.i(this, "onStart");
}
@Override
public void onPause() {
super.onPause();
PoemLog.i(this, "onPause");
}
@Override
public void onStop() {
super.onStop();
PoemLog.i(this, "onStop");
}
private void init() {
setEmptyText(onCreateEmptyText());
registerForContextMenu(getListView());
adapter = onCreateCursorAdapter();
setListAdapter(adapter);
getListView().setSelector(R.drawable.lv_bg);
// Prepare the loader. Either re-connect with an existing one, or start a new one.
Bundle args = null;
loaderManager = getLoaderManager();
loaderManager.initLoader(PoemConstant.LOADER_ID, args, this);
}
public void query(String text) {
Bundle args = new Bundle();
args.putString(PoemConstant.QUERY, text);
loaderManager.restartLoader(PoemConstant.LOADER_ID, args, this);
}
@Override
public Loader<Cursor> onCreateLoader(int id, Bundle args) {
PoemLog.i(this, "onCreateLoader");
String text = null;
if (args != null) {
text = args.getString(PoemConstant.QUERY);
}
final String finalText = text == null ? null : text.trim();
Loader<Cursor> loader = new AsyncTaskLoader<Cursor>(getActivity()) {
@Override
public Cursor loadInBackground() {
return onQuery(finalText);
}
@Override
protected void onStartLoading() {
forceLoad();
}
};
return loader;
}
@Override
public void onLoadFinished(Loader<Cursor> loader, Cursor data) {
PoemLog.i(this, "onLoadFinished");
// adapter.swapCursor(data);
adapter.changeCursor(data); // close old cursor
// closeCursor(); //不能关闭,否则apdater不能从cursor读取数据
}
@Override
public void onLoaderReset(Loader<Cursor> loader) {
PoemLog.i(this, "onLoaderReset");
// closeCursor();
// adapter.swapCursor(null);
adapter.changeCursor(null); // close old cursor
}
@Override
public void onDestroy() {
super.onDestroy();
PoemLog.i(this, "onDestroy");
closeCursor();
}
private void closeCursor() {
if (adapter != null) {
Cursor cursor = adapter.getCursor();
if (cursor != null) {
cursor.close();
}
}
}
@Override
public void onCreateContextMenu(ContextMenu menu, View v, ContextMenu.ContextMenuInfo menuInfo) {
super.onCreateContextMenu(menu, v, menuInfo);
PoemLog.i(this, "onCreateContextMenu");
getActivity().getMenuInflater().inflate(R.menu.fragment_base_poem_context, menu);
}
@Override
public boolean onContextItemSelected(MenuItem item) {
if (getUserVisibleHint()) {
AdapterView.AdapterContextMenuInfo info = (AdapterView.AdapterContextMenuInfo) item.getMenuInfo();
final long id = info.id;
switch (item.getItemId()) {
case R.id.action_delete:
confirmDelete(id);
return true;
default:
return super.onContextItemSelected(item);
}
}
return false;
}
protected void confirmDelete(final long id) {
new AlertDialog.Builder(getActivity())
// .setTitle("确认删除")
.setMessage("确定删除?")
.setPositiveButton(R.string.action_delete, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
delete(id);
}
})
.setNegativeButton(R.string.action_cancel, null)
.show();
}
private void delete(final long id) {
if (id > 0) {
new AsyncTask<Void, Void, Integer>() {
@Override
protected Integer doInBackground(Void... params) {
return onDelete(id);
}
@Override
protected void onPostExecute(Integer rows) {
int tipId = R.string.tip_delete_fail;
final boolean isSuccess = rows > 0;
if (isSuccess) {
tipId = R.string.tip_delete_success;
// loader没有监听数据源,需手动通知adapter
// adapter.notifyDataSetChanged(); // did not work
// restart loader
query(null);
}
Toast.makeText(BasePoemFragment.this.getActivity(), tipId, Toast.LENGTH_SHORT).show();
}
}.execute();
}
}
}
|
AnimatorPro/Animator-Pro
|
src/PJ/gfx/glrcircl.c
|
#ifdef SLUFFED
#include "gfx.h"
Errcode rcircle(SHORT xcen, SHORT ycen, SHORT rad,
dotout_func dotout, void *dotdat,
hline_func hlineout, void *hlinedat,
Boolean filled)
/* radius circle */
{
return(dcircle(xcen,ycen,(rad<<1)+1,dotout,dotdat,
hlineout,hlinedat,filled));
}
#endif /* SLUFFED */
|
JustinACoder/H22-GR3-UnrealAI
|
Plugins/UnrealEnginePython/Source/UnrealEnginePython/Private/UEPyUStructsImporter.h
|
#pragma once
#include "UEPyModule.h"
typedef struct
{
PyObject_HEAD
/* Type-specific fields go here. */
} ue_PyUStructsImporter;
PyObject *py_ue_new_ustructsimporter();
void ue_python_init_ustructsimporter(PyObject *);
|
kindlychung/mediasoup-sfu-cpp
|
deps/boost/include/boost/outcome/experimental/status-code/posix_code.hpp
|
/* Proposed SG14 status_code
(C) 2018-2020 <NAME> <http://www.nedproductions.biz/> (5 commits)
File Created: Feb 2018
Boost Software License - Version 1.0 - August 17th, 2003
Permission is hereby granted, free of charge, to any person or organization
obtaining a copy of the software and accompanying documentation covered by
this license (the "Software") to use, reproduce, display, distribute,
execute, and transmit the Software, and to prepare derivative works of the
Software, and to permit third-parties to whom the Software is furnished to
do so, all subject to the following:
The copyright notices in the Software and this entire statement, including
the above license grant, this restriction and the following disclaimer,
must be included in all copies of the Software, in whole or in part, and
all derivative works of the Software, unless such copies or derivative
works are solely in the form of machine-executable object code generated by
a source language processor.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
*/
#ifndef BOOST_OUTCOME_SYSTEM_ERROR2_POSIX_CODE_HPP
#define BOOST_OUTCOME_SYSTEM_ERROR2_POSIX_CODE_HPP
#ifdef BOOST_OUTCOME_SYSTEM_ERROR2_NOT_POSIX
#error <posix_code.hpp> is not includable when BOOST_OUTCOME_SYSTEM_ERROR2_NOT_POSIX is defined!
#endif
#include "quick_status_code_from_enum.hpp"
#include <cstring> // for strchr and strerror_r
BOOST_OUTCOME_SYSTEM_ERROR2_NAMESPACE_BEGIN
class _posix_code_domain;
//! A POSIX error code, those returned by `errno`.
using posix_code = status_code<_posix_code_domain>;
//! A specialisation of `status_error` for the POSIX error code domain.
using posix_error = status_error<_posix_code_domain>;
namespace mixins
{
template <class Base> struct mixin<Base, _posix_code_domain> : public Base
{
using Base::Base;
//! Returns a `posix_code` for the current value of `errno`.
static posix_code current() noexcept;
};
} // namespace mixins
/*! The implementation of the domain for POSIX error codes, those returned by `errno`.
*/
class _posix_code_domain : public status_code_domain
{
template <class DomainType> friend class status_code;
template <class StatusCode> friend class detail::indirecting_domain;
using _base = status_code_domain;
static _base::string_ref _make_string_ref(int c) noexcept
{
char buffer[1024] = "";
#ifdef _WIN32
strerror_s(buffer, sizeof(buffer), c);
#elif defined(__gnu_linux__) && !defined(__ANDROID__) // handle glibc's weird strerror_r()
char *s = strerror_r(c, buffer, sizeof(buffer)); // NOLINT
if(s != nullptr)
{
strncpy(buffer, s, sizeof(buffer)); // NOLINT
buffer[1023] = 0;
}
#else
strerror_r(c, buffer, sizeof(buffer));
#endif
size_t length = strlen(buffer); // NOLINT
auto *p = static_cast<char *>(malloc(length + 1)); // NOLINT
if(p == nullptr)
{
return _base::string_ref("failed to get message from system");
}
memcpy(p, buffer, length + 1); // NOLINT
return _base::atomic_refcounted_string_ref(p, length);
}
public:
//! The value type of the POSIX code, which is an `int`
using value_type = int;
using _base::string_ref;
//! Default constructor
constexpr explicit _posix_code_domain(typename _base::unique_id_type id = 0xa59a56fe5f310933) noexcept
: _base(id)
{
}
_posix_code_domain(const _posix_code_domain &) = default;
_posix_code_domain(_posix_code_domain &&) = default;
_posix_code_domain &operator=(const _posix_code_domain &) = default;
_posix_code_domain &operator=(_posix_code_domain &&) = default;
~_posix_code_domain() = default;
//! Constexpr singleton getter. Returns constexpr posix_code_domain variable.
static inline constexpr const _posix_code_domain &get();
virtual string_ref name() const noexcept override { return string_ref("posix domain"); } // NOLINT
protected:
virtual bool _do_failure(const status_code<void> &code) const noexcept override // NOLINT
{
assert(code.domain() == *this); // NOLINT
return static_cast<const posix_code &>(code).value() != 0; // NOLINT
}
virtual bool _do_equivalent(const status_code<void> &code1, const status_code<void> &code2) const noexcept override // NOLINT
{
assert(code1.domain() == *this); // NOLINT
const auto &c1 = static_cast<const posix_code &>(code1); // NOLINT
if(code2.domain() == *this)
{
const auto &c2 = static_cast<const posix_code &>(code2); // NOLINT
return c1.value() == c2.value();
}
if(code2.domain() == generic_code_domain)
{
const auto &c2 = static_cast<const generic_code &>(code2); // NOLINT
if(static_cast<int>(c2.value()) == c1.value())
{
return true;
}
}
return false;
}
virtual generic_code _generic_code(const status_code<void> &code) const noexcept override // NOLINT
{
assert(code.domain() == *this); // NOLINT
const auto &c = static_cast<const posix_code &>(code); // NOLINT
return generic_code(static_cast<errc>(c.value()));
}
virtual string_ref _do_message(const status_code<void> &code) const noexcept override // NOLINT
{
assert(code.domain() == *this); // NOLINT
const auto &c = static_cast<const posix_code &>(code); // NOLINT
return _make_string_ref(c.value());
}
#if defined(_CPPUNWIND) || defined(__EXCEPTIONS) || defined(BOOST_OUTCOME_STANDARDESE_IS_IN_THE_HOUSE)
BOOST_OUTCOME_SYSTEM_ERROR2_NORETURN virtual void _do_throw_exception(const status_code<void> &code) const override // NOLINT
{
assert(code.domain() == *this); // NOLINT
const auto &c = static_cast<const posix_code &>(code); // NOLINT
throw status_error<_posix_code_domain>(c);
}
#endif
};
//! A constexpr source variable for the POSIX code domain, which is that of `errno`. Returned by `_posix_code_domain::get()`.
constexpr _posix_code_domain posix_code_domain;
inline constexpr const _posix_code_domain &_posix_code_domain::get()
{
return posix_code_domain;
}
namespace mixins
{
template <class Base> inline posix_code mixin<Base, _posix_code_domain>::current() noexcept { return posix_code(errno); }
} // namespace mixins
BOOST_OUTCOME_SYSTEM_ERROR2_NAMESPACE_END
#endif
|
jonespm/rsf
|
rsf-core/core/src/uk/org/ponder/rsf/renderer/html/decorators/HTMLLabelTargetDecoratorRenderer.java
|
<reponame>jonespm/rsf
/*
* Created on 24 Oct 2006
*/
package uk.org.ponder.rsf.renderer.html.decorators;
import java.util.Map;
import uk.org.ponder.rsf.components.decorators.UIDecorator;
import uk.org.ponder.rsf.components.decorators.UILabelTargetDecorator;
import uk.org.ponder.rsf.renderer.decorator.DecoratorRenderer;
public class HTMLLabelTargetDecoratorRenderer implements DecoratorRenderer {
public Class getRenderedType() {
return UILabelTargetDecorator.class;
}
public String getContentTypes() {
return "HTML, HTML-FRAGMENT";
}
public void modifyAttributes(UIDecorator decoratoro, String tagname,
Map tomodify) {
UILabelTargetDecorator decorator = (UILabelTargetDecorator) decoratoro;
if (tagname.equals("label")) {
tomodify.put("for", decorator.targetFullID);
}
else
throw new IllegalArgumentException(
"UILabelTargetDecorator peered with unrecognised tag " + tagname
+ " (only <label> is supported for HTML)");
}
}
|
checho651/bfx-report-ui
|
src/state/symbols/utils/index.js
|
import _includes from 'lodash/includes'
import _castArray from 'lodash/castArray'
import { mapCurrency } from './mapping'
export * from './mapping'
// BTCUSD -> tBTCUSD
// BTCF0:USDF0 -> tBTCF0:USDF0
// USD -> fUSD
const addPrefix = (symbol = '', isFunding = false) => (
(isFunding || symbol.length < 6) ? `f${symbol}` : `t${symbol}`
)
const hasPrefix = pair => pair.charAt(0) === 't' || pair.charAt(0) === 'f'
export const removePrefix = pair => (hasPrefix(pair) ? pair.substr(1) : pair)
export const isFundingSymbol = symbol => symbol.length > 3 && symbol.charAt(0) === 'f'
export const isTradingPair = pair => pair.length > 6 && pair.charAt(0) === 't'
// BTCUSD -> BTC:USD
// BTCF0:USTF0 -> BTCF0:USTF0
export const formatPair = (pair) => {
if (!pair) {
return pair
}
// eslint-disable-next-line no-param-reassign
pair = removePrefix(pair)
if (pair.length === 6) {
return `${pair.substr(0, 3)}:${pair.substr(3, 6)}`
}
return pair
}
// USD,ETC -> ['USD', 'ETC']
// USD -> ['USD']
// BTC:USD,ETH:USD -> ['BTC:USD', 'ETH:USD']
// BTC:USD -> ['BTC:USD']
// works for both symbols and pairs
export const getMappedSymbolsFromUrl = (params) => {
if (_includes(params, ',')) {
return params.split(',').map(mapCurrency)
}
return [mapCurrency(params)]
}
// ['USD'] -> USD
// ['USD 'ETC'] -> USD,ETC
export const getSymbolsURL = (symbols) => {
if (Array.isArray(symbols) && symbols.length) {
if (symbols.length === 1) {
return symbols[0]
}
return symbols.join(',')
}
return ''
}
// BTC:USD -> BTCUSD
// BTCF0:USTF0 -> BTCF0:USTF0
const deformatPair = pair => ((pair.length === 7) ? pair.replace(':', '') : pair)
// BTCUSD -> tBTCUSD
// ['BTCUSD'] -> 'tBTCUSD'
// ['BTCUSD', 'ETHUSD'] -> ['tBTCUSD', 'tETHUSD']
// USD -> fUSD
// ['USD'] -> 'fUSD'
// ['USD', 'BTC'] -> ['fUSD', 'fBTC']
export const formatRawSymbols = (symbols, isFunding) => {
const symbolsArray = _castArray(symbols)
.map(deformatPair)
.map((symbol) => addPrefix(symbol, isFunding))
return symbolsArray.length > 1
? symbolsArray
: symbolsArray[0]
}
export default {
formatPair,
formatRawSymbols,
getMappedSymbolsFromUrl,
getSymbolsURL,
isFundingSymbol,
isTradingPair,
removePrefix,
}
|
uktrade-attic/data-hub-zorg
|
src/db/companyrepository.js
|
const knex = require('../db/knex')
const generateUUID = require('../lib/uuid').generateUUID
function Companies () {
return knex('company')
}
function CompanyFamily () {
return knex('companyfamily')
}
function CompanyInvestmentSummary () {
return knex('companyinvestmentsummary')
}
function getCompany (companyId) {
return Companies().where('id', companyId).first()
}
function getCompanies () {
return Companies().select()
}
function addCompany (company) {
company.id = generateUUID()
return Companies()
.insert(company, 'id')
.then((result) => {
return result[0]
})
}
function updateCompany (id, company) {
return Companies().where('id', id).update(company)
}
function deleteCompany (id) {
return Companies().where('id', id).del()
}
function getParentCompanies (id) {
return CompanyFamily().where('company_child', id).map(record => record['company_parent'])
}
function getChildCompanies (id) {
return CompanyFamily().where('company_parent', id).map(record => record['company_child'])
}
function getCompanyInvestmentSummary (id) {
return CompanyInvestmentSummary().where('id', id).first()
}
function addCompanyInvestmentSummary (summary) {
return CompanyInvestmentSummary()
.insert(summary, 'id')
.then((result) => {
return result[0]
})
}
function updateCompanyInvestmentSummary (summary) {
return getCompanyInvestmentSummary(summary.id)
.then((result) => {
if (!result) {
return CompanyInvestmentSummary()
.insert(summary, 'id')
.then((result) => {
return result[0]
})
} else {
return CompanyInvestmentSummary().where('id', summary.id).update(summary)
}
})
}
module.exports = {
getCompany,
getCompanies,
addCompany,
deleteCompany,
updateCompany,
getParentCompanies,
getChildCompanies,
getCompanyInvestmentSummary,
addCompanyInvestmentSummary,
updateCompanyInvestmentSummary
}
|
lbilic/XML
|
src/main/java/com/uns/ac/rs/xml/states/State.java
|
<gh_stars>0
package com.uns.ac.rs.xml.states;
import com.uns.ac.rs.xml.domain.enums.Options;
public abstract class State {
private Options option;
abstract String processRequest(com.uns.ac.rs.xml.util.actions.Action action);
public Options getOptions() {
return option;
}
public void setOption(Options option) {
this.option = option;
}
}
|
joshualucas84/jasper-soft-server
|
jasperserver/buildomatic/src/main/java/com/jaspersoft/buildomatic/sql/ant/ReflectionUtils.java
|
/*
* Copyright (C) 2005 - 2014 TIBCO Software Inc. All rights reserved.
* http://www.jaspersoft.com.
*
* Unless you have purchased a commercial license agreement from Jaspersoft,
* the following license terms apply:
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.jaspersoft.buildomatic.sql.ant;
import java.lang.reflect.Field;
/**
* @author <NAME>
*/
public final class ReflectionUtils {
/**
* Default private constructor.
* Suppresses creation of instances of this class outside the class body.
*/
private ReflectionUtils() {
// No operations.
}
public static Field findField(Class clazz, String name) {
try {
return clazz.getDeclaredField(name);
}
catch (NoSuchFieldException e) {
throw new IllegalArgumentException("Cannot find " +
"[" + name + "] field in class " + clazz.getName() + ". " +
"Reason: " + e.getMessage(), e);
}
}
public static Object getFieldValue(Field field, Object target) {
try {
field.setAccessible(true);
return field.get(target);
}
catch (IllegalAccessException e) {
throw new IllegalArgumentException("Cannot access (get) " +
"[" + field + "] field in target [" + target + "]. " +
"Reason: " + e.getMessage(), e);
}
}
public static void setFieldValue(Field field, Object target, Object value) {
try {
field.setAccessible(true);
field.set(target, value);
}
catch (IllegalAccessException e) {
throw new IllegalArgumentException("Cannot access (set) " +
"[" + field + "] field in target [" + target + "]. " +
"Reason: " + e.getMessage(), e);
}
}
}
|
AngelaRT/tailor_drawer
|
PHOTOBOOTH/cod_python/pygame/tmp/SDL-1.2.14/src/audio/windib/SDL_dibaudio.c
|
/*
SDL - Simple DirectMedia Layer
Copyright (C) 1997-2009 <NAME>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
<NAME>
<EMAIL>
*/
#include "SDL_config.h"
/* Allow access to a raw mixing buffer */
#define WIN32_LEAN_AND_MEAN
#include <windows.h>
#include <mmsystem.h>
#include "SDL_timer.h"
#include "SDL_audio.h"
#include "../SDL_audio_c.h"
#include "SDL_dibaudio.h"
#if defined(_WIN32_WCE) && (_WIN32_WCE < 300)
#include "win_ce_semaphore.h"
#endif
/* Audio driver functions */
static int DIB_OpenAudio(_THIS, SDL_AudioSpec *spec);
static void DIB_ThreadInit(_THIS);
static void DIB_WaitAudio(_THIS);
static Uint8 *DIB_GetAudioBuf(_THIS);
static void DIB_PlayAudio(_THIS);
static void DIB_WaitDone(_THIS);
static void DIB_CloseAudio(_THIS);
/* Audio driver bootstrap functions */
static int Audio_Available(void)
{
return(1);
}
static void Audio_DeleteDevice(SDL_AudioDevice *device)
{
SDL_free(device->hidden);
SDL_free(device);
}
static SDL_AudioDevice *Audio_CreateDevice(int devindex)
{
SDL_AudioDevice *this;
/* Initialize all variables that we clean on shutdown */
this = (SDL_AudioDevice *)SDL_malloc(sizeof(SDL_AudioDevice));
if ( this ) {
SDL_memset(this, 0, (sizeof *this));
this->hidden = (struct SDL_PrivateAudioData *)
SDL_malloc((sizeof *this->hidden));
}
if ( (this == NULL) || (this->hidden == NULL) ) {
SDL_OutOfMemory();
if ( this ) {
SDL_free(this);
}
return(0);
}
SDL_memset(this->hidden, 0, (sizeof *this->hidden));
/* Set the function pointers */
this->OpenAudio = DIB_OpenAudio;
this->ThreadInit = DIB_ThreadInit;
this->WaitAudio = DIB_WaitAudio;
this->PlayAudio = DIB_PlayAudio;
this->GetAudioBuf = DIB_GetAudioBuf;
this->WaitDone = DIB_WaitDone;
this->CloseAudio = DIB_CloseAudio;
this->free = Audio_DeleteDevice;
return this;
}
AudioBootStrap WAVEOUT_bootstrap = {
"waveout", "Win95/98/NT/2000 WaveOut",
Audio_Available, Audio_CreateDevice
};
/* The Win32 callback for filling the WAVE device */
static void CALLBACK FillSound(HWAVEOUT hwo, UINT uMsg, DWORD_PTR dwInstance,
DWORD dwParam1, DWORD dwParam2)
{
SDL_AudioDevice *this = (SDL_AudioDevice *)dwInstance;
/* Only service "buffer done playing" messages */
if ( uMsg != WOM_DONE )
return;
/* Signal that we are done playing a buffer */
#if defined(_WIN32_WCE) && (_WIN32_WCE < 300)
ReleaseSemaphoreCE(audio_sem, 1, NULL);
#else
ReleaseSemaphore(audio_sem, 1, NULL);
#endif
}
static void SetMMerror(char *function, MMRESULT code)
{
size_t len;
char errbuf[MAXERRORLENGTH];
#ifdef _WIN32_WCE
wchar_t werrbuf[MAXERRORLENGTH];
#endif
SDL_snprintf(errbuf, SDL_arraysize(errbuf), "%s: ", function);
len = SDL_strlen(errbuf);
#ifdef _WIN32_WCE
/* UNICODE version */
waveOutGetErrorText(code, werrbuf, MAXERRORLENGTH-len);
WideCharToMultiByte(CP_ACP,0,werrbuf,-1,errbuf+len,MAXERRORLENGTH-len,NULL,NULL);
#else
waveOutGetErrorText(code, errbuf+len, (UINT)(MAXERRORLENGTH-len));
#endif
SDL_SetError("%s",errbuf);
}
/* Set high priority for the audio thread */
static void DIB_ThreadInit(_THIS)
{
SetThreadPriority(GetCurrentThread(), THREAD_PRIORITY_HIGHEST);
}
void DIB_WaitAudio(_THIS)
{
/* Wait for an audio chunk to finish */
#if defined(_WIN32_WCE) && (_WIN32_WCE < 300)
WaitForSemaphoreCE(audio_sem, INFINITE);
#else
WaitForSingleObject(audio_sem, INFINITE);
#endif
}
Uint8 *DIB_GetAudioBuf(_THIS)
{
Uint8 *retval;
retval = (Uint8 *)(wavebuf[next_buffer].lpData);
return retval;
}
void DIB_PlayAudio(_THIS)
{
/* Queue it up */
waveOutWrite(sound, &wavebuf[next_buffer], sizeof(wavebuf[0]));
next_buffer = (next_buffer+1)%NUM_BUFFERS;
}
void DIB_WaitDone(_THIS)
{
int i, left;
do {
left = NUM_BUFFERS;
for ( i=0; i<NUM_BUFFERS; ++i ) {
if ( wavebuf[i].dwFlags & WHDR_DONE ) {
--left;
}
}
if ( left > 0 ) {
SDL_Delay(100);
}
} while ( left > 0 );
}
void DIB_CloseAudio(_THIS)
{
int i;
/* Close up audio */
if ( audio_sem ) {
#if defined(_WIN32_WCE) && (_WIN32_WCE < 300)
CloseSynchHandle(audio_sem);
#else
CloseHandle(audio_sem);
#endif
}
if ( sound ) {
waveOutClose(sound);
}
/* Clean up mixing buffers */
for ( i=0; i<NUM_BUFFERS; ++i ) {
if ( wavebuf[i].dwUser != 0xFFFF ) {
waveOutUnprepareHeader(sound, &wavebuf[i],
sizeof(wavebuf[i]));
wavebuf[i].dwUser = 0xFFFF;
}
}
/* Free raw mixing buffer */
if ( mixbuf != NULL ) {
SDL_free(mixbuf);
mixbuf = NULL;
}
}
int DIB_OpenAudio(_THIS, SDL_AudioSpec *spec)
{
MMRESULT result;
int i;
WAVEFORMATEX waveformat;
/* Initialize the wavebuf structures for closing */
sound = NULL;
audio_sem = NULL;
for ( i = 0; i < NUM_BUFFERS; ++i )
wavebuf[i].dwUser = 0xFFFF;
mixbuf = NULL;
/* Set basic WAVE format parameters */
SDL_memset(&waveformat, 0, sizeof(waveformat));
waveformat.wFormatTag = WAVE_FORMAT_PCM;
/* Determine the audio parameters from the AudioSpec */
switch ( spec->format & 0xFF ) {
case 8:
/* Unsigned 8 bit audio data */
spec->format = AUDIO_U8;
waveformat.wBitsPerSample = 8;
break;
case 16:
/* Signed 16 bit audio data */
spec->format = AUDIO_S16;
waveformat.wBitsPerSample = 16;
break;
default:
SDL_SetError("Unsupported audio format");
return(-1);
}
waveformat.nChannels = spec->channels;
waveformat.nSamplesPerSec = spec->freq;
waveformat.nBlockAlign =
waveformat.nChannels * (waveformat.wBitsPerSample/8);
waveformat.nAvgBytesPerSec =
waveformat.nSamplesPerSec * waveformat.nBlockAlign;
/* Check the buffer size -- minimum of 1/4 second (word aligned) */
if ( spec->samples < (spec->freq/4) )
spec->samples = ((spec->freq/4)+3)&~3;
/* Update the fragment size as size in bytes */
SDL_CalculateAudioSpec(spec);
/* Open the audio device */
result = waveOutOpen(&sound, WAVE_MAPPER, &waveformat,
(DWORD_PTR)FillSound, (DWORD_PTR)this, CALLBACK_FUNCTION);
if ( result != MMSYSERR_NOERROR ) {
SetMMerror("waveOutOpen()", result);
return(-1);
}
#ifdef SOUND_DEBUG
/* Check the sound device we retrieved */
{
WAVEOUTCAPS caps;
result = waveOutGetDevCaps((UINT)sound, &caps, sizeof(caps));
if ( result != MMSYSERR_NOERROR ) {
SetMMerror("waveOutGetDevCaps()", result);
return(-1);
}
printf("Audio device: %s\n", caps.szPname);
}
#endif
/* Create the audio buffer semaphore */
#if defined(_WIN32_WCE) && (_WIN32_WCE < 300)
audio_sem = CreateSemaphoreCE(NULL, NUM_BUFFERS-1, NUM_BUFFERS, NULL);
#else
audio_sem = CreateSemaphore(NULL, NUM_BUFFERS-1, NUM_BUFFERS, NULL);
#endif
if ( audio_sem == NULL ) {
SDL_SetError("Couldn't create semaphore");
return(-1);
}
/* Create the sound buffers */
mixbuf = (Uint8 *)SDL_malloc(NUM_BUFFERS*spec->size);
if ( mixbuf == NULL ) {
SDL_SetError("Out of memory");
return(-1);
}
for ( i = 0; i < NUM_BUFFERS; ++i ) {
SDL_memset(&wavebuf[i], 0, sizeof(wavebuf[i]));
wavebuf[i].lpData = (LPSTR) &mixbuf[i*spec->size];
wavebuf[i].dwBufferLength = spec->size;
wavebuf[i].dwFlags = WHDR_DONE;
result = waveOutPrepareHeader(sound, &wavebuf[i],
sizeof(wavebuf[i]));
if ( result != MMSYSERR_NOERROR ) {
SetMMerror("waveOutPrepareHeader()", result);
return(-1);
}
}
/* Ready to go! */
next_buffer = 0;
return(0);
}
|
DataBiosphere/FHIR-Implementation
|
anvil-api/src/anvil/controller.test.js
|
jest.mock('./service', () => ({
getAllResearchStudies: jest.fn().mockImplementation(() => [[{ id: 'workspace' }], 1]),
getResearchStudyById: jest.fn().mockImplementation(() => ({ id: 'workspaceid' })),
getAllSamples: jest.fn().mockImplementation(() => [[{ id: 'sample' }], 2]),
getSampleById: jest.fn().mockImplementation(() => ({ id: 'sampleid' })),
getAllPatients: jest.fn().mockImplementation(() => [[{ id: 'subject' }], 3]),
getPatientById: jest.fn().mockImplementation(() => ({ id: 'subjectid' })),
getAllObservations: jest.fn().mockImplementation(() => [[{ id: 'observation' }], 4]),
getObservationById: jest.fn().mockImplementation(() => ({ id: 'observationid' })),
}));
const controller = require('./controller');
describe('ANVIL controller tests', () => {
it('should get all ResearchStudy data', async () => {
const mockRes = {
json: jest.fn(),
};
const mockReq = {
query: {
page: 1,
pageSize: 10,
},
};
await controller.getAllResearchStudies(mockReq, mockRes);
expect(mockRes.json.mock.calls[0][0]).toEqual({ count: 1, results: [{ id: 'workspace' }] });
});
it('should get ResearchStudy data by ID', async () => {
const mockRes = {
json: jest.fn(),
};
const mockReq = {
params: {
id: 'foobar',
},
};
await controller.getResearchStudyById(mockReq, mockRes);
expect(mockRes.json.mock.calls[0][0]).toEqual({ id: 'workspaceid' });
});
it('should get all Sample data', async () => {
const mockRes = {
json: jest.fn(),
};
const mockReq = {
query: {
page: 1,
pageSize: 10,
},
};
await controller.getAllSamples(mockReq, mockRes);
expect(mockRes.json.mock.calls[0][0]).toEqual({ count: 2, results: [{ id: 'sample' }] });
});
it('should get all Sample data with Workspace params', async () => {
const mockRes = {
json: jest.fn(),
};
const mockReq = {
params: {
workspace: 'foobar',
},
query: {
page: 1,
pageSize: 10,
},
};
await controller.getAllSamples(mockReq, mockRes);
expect(mockRes.json.mock.calls[0][0]).toEqual({ count: 2, results: [{ id: 'sample' }] });
});
it('should get Sample data by ID', async () => {
const mockRes = {
json: jest.fn(),
};
const mockReq = {
params: {
id: 'foobar',
},
};
await controller.getSampleById(mockReq, mockRes);
expect(mockRes.json.mock.calls[0][0]).toEqual({ id: 'sampleid' });
});
it('should get Sample data by Workspace ID', async () => {
const mockRes = {
json: jest.fn(),
};
const mockReq = {
params: {
workspace: 'foo',
id: 'bar',
},
};
await controller.getSampleById(mockReq, mockRes);
expect(mockRes.json.mock.calls[0][0]).toEqual({ id: 'sampleid' });
});
it('should get all Patient data', async () => {
const mockRes = {
json: jest.fn(),
};
const mockReq = {
query: {
page: 1,
pageSize: 10,
},
};
await controller.getAllPatients(mockReq, mockRes);
expect(mockRes.json.mock.calls[0][0]).toEqual({ count: 3, results: [{ id: 'subject' }] });
});
it('should get all Patient data with Workspace params', async () => {
const mockRes = {
json: jest.fn(),
};
const mockReq = {
params: {
workspace: 'foobar',
},
query: {
page: 1,
pageSize: 10,
},
};
await controller.getAllPatients(mockReq, mockRes);
expect(mockRes.json.mock.calls[0][0]).toEqual({ count: 3, results: [{ id: 'subject' }] });
});
it('should get Patient data by ID', async () => {
const mockRes = {
json: jest.fn(),
};
const mockReq = {
params: {
id: 'foobar',
},
};
await controller.getPatientById(mockReq, mockRes);
expect(mockRes.json.mock.calls[0][0]).toEqual({ id: 'subjectid' });
});
it('should get Patient data by Workspace ID', async () => {
const mockRes = {
json: jest.fn(),
};
const mockReq = {
params: {
workspace: 'foo',
id: 'bar',
},
};
await controller.getPatientById(mockReq, mockRes);
expect(mockRes.json.mock.calls[0][0]).toEqual({ id: 'subjectid' });
});
it('should get all Observation data', async () => {
const mockRes = {
json: jest.fn(),
};
const mockReq = {
query: {
page: 1,
pageSize: 10,
},
};
await controller.getAllObservations(mockReq, mockRes);
expect(mockRes.json.mock.calls[0][0]).toEqual({ count: 4, results: [{ id: 'observation' }] });
});
it('should get all Observation data with Workspace params', async () => {
const mockRes = {
json: jest.fn(),
};
const mockReq = {
params: {
workspace: 'foobar',
},
query: {
page: 1,
pageSize: 10,
},
};
await controller.getAllObservations(mockReq, mockRes);
expect(mockRes.json.mock.calls[0][0]).toEqual({ count: 4, results: [{ id: 'observation' }] });
});
it('should get Observation data by ID', async () => {
const mockRes = {
json: jest.fn(),
};
const mockReq = {
params: {
id: 'foobar',
},
};
await controller.getObservationById(mockReq, mockRes);
expect(mockRes.json.mock.calls[0][0]).toEqual({ id: 'observationid' });
});
it('should get Observation data by Workspace ID', async () => {
const mockRes = {
json: jest.fn(),
};
const mockReq = {
params: {
workspace: 'foo',
id: 'bar',
},
};
await controller.getObservationById(mockReq, mockRes);
expect(mockRes.json.mock.calls[0][0]).toEqual({ id: 'observationid' });
});
});
|
leschzinerlab/myami-3.2-freeHand
|
lib/python2.7/site-packages/pyami/tifffile.py
|
<reponame>leschzinerlab/myami-3.2-freeHand<filename>lib/python2.7/site-packages/pyami/tifffile.py<gh_stars>0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# tifffile.py
# Copyright (c) 2008-2011, The Regents of the University of California
# Produced by the Laboratory for Fluorescence Dynamics
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holders nor the names of any
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Read TIFF, STK, LSM and FluoView files and access image data as numpy array.
Only a subset of the TIFF specification is supported, mainly uncompressed and
losslessly compressed 1-32 bit integer as well as 32 and 64-bit float
images, which are commonly used in scientific imaging.
TIFF, the Tagged Image File Format, is under the control of Adobe Systems.
STK and LSM are TIFF with custom extensions used by MetaMorph respectively
Carl Zeiss MicroImaging. Currently only primary info records are read
for STK, FluoView, and NIH image formats.
For command line usage run ``python tifffile.py --help``
:Authors:
`<NAME> <http://www.lfd.uci.edu/~gohlke/>`__,
Laboratory for Fluorescence Dynamics, University of California, Irvine
:Version: 2010.06.09
Requirements
------------
* `Python 2.6 or 3.1 <http://www.python.org>`__
* `Numpy 1.5 <http://numpy.scipy.org>`__
* `Matplotlib 1.0 <http://matplotlib.sourceforge.net>`__
(optional for plotting)
* `tifffile.c 2010.04.10 <http://www.lfd.uci.edu/~gohlke/>`__
(optional for faster decoding of PackBits and LZW encoded strings)
Acknowledgements
----------------
* <NAME>, University of Manchester, for cz_lsm_scan_info specifics.
* <NAME>, for a bug fix and some read_cz_lsm functions.
References
----------
(1) TIFF 6.0 Specification and Supplements. Adobe Systems Incorporated.
http://partners.adobe.com/public/developer/tiff/
(2) TIFF File Format FAQ. http://www.awaresystems.be/imaging/tiff/faq.html
(3) MetaMorph Stack (STK) Image File Format.
http://support.meta.moleculardevices.com/docs/t10243.pdf
(4) File Format Description - LSM 5xx Release 2.0.
http://ibb.gsf.de/homepage/karsten.rodenacker/IDL/Lsmfile.doc
(5) BioFormats. http://www.loci.wisc.edu/ome/formats.html
Examples
--------
>>> tif = TIFFfile('test.tif')
>>> images = tif.asarray()
>>> image0 = tif[0].asarray()
>>> for page in tif:
... for tag in page.tags.values():
... t = tag.name, tag.value
... image = page.asarray()
... if page.is_rgb: pass
... if page.is_reduced: pass
... if page.is_palette:
... t = page.color_map
... if page.is_stk:
... t = page.mm_uic_tags.number_planes
... if page.is_lsm:
... t = page.cz_lsm_info
>>> tif.close()
"""
from __future__ import division, print_function
import sys
import os
import math
import zlib
import time
import struct
import warnings
import datetime
from contextlib import contextmanager
import numpy
class TIFFfile(object):
"""Read TIFF, STK, and LSM files. Return image data as NumPy array.
Attributes
----------
pages : tuple of TIFFpages.
Examples
--------
>>> tif = TIFFfile('test.tif')
... try:
... images = tif.asarray()
... except Exception as e:
... print(e)
... finally:
... tif.close()
"""
def __init__(self, filename):
"""Initialize object from file."""
self.fhandle = open(filename, 'rb')
self.fname = filename
self.fstat = os.fstat(self.fhandle.fileno())
try:
self._fromfile()
except Exception:
self.fhandle.close()
raise
def close(self):
"""Close the file object."""
self.fhandle.close()
self.fhandle = None
def _fromdata(self, data):
"""Create TIFF header, pages, and tags from numpy array."""
raise NotImplementedError()
def _fromfile(self):
"""Read TIFF header and all page records from file."""
try:
self.byte_order = TIFF_BYTE_ORDERS[self.fhandle.read(2)]
except KeyError:
raise ValueError("not a valid TIFF file")
if struct.unpack(self.byte_order+'H', self.fhandle.read(2))[0] != 42:
raise ValueError("not a TIFF file")
self.pages = []
while True:
try:
self.pages.append(TIFFpage(self))
except StopIteration:
break
def asarray(self, key=None, skipreduced=True, squeeze=True,
colormapped=True, rgbonly=True):
"""Return image data of multiple TIFF pages as numpy array.
Raises ValueError if not all pages are of same shape in all but
first dimension.
Arguments
---------
key : int or slice
Defines which pages to return as array.
skipreduced : bool
If True any reduced images are skipped.
squeeze : bool
If True all length-1 dimensions are squeezed out from result.
colormapped : bool
If True color mapping is applied for palette-indexed images.
rgbonly : bool
If True return RGB(A) images without extra samples.
"""
pages = self.pages
if isinstance(key, int):
pages = [self.pages[key]]
elif isinstance(key, slice):
pages = self.pages[key]
elif key is not None:
raise TypeError('key must be an int or slice')
if skipreduced:
pages = [p for p in pages if not p.is_reduced]
if len(pages) == 1:
result = pages[0].asarray(False, colormapped, rgbonly)
else:
if colormapped and self.is_nih:
result = numpy.vstack(p.asarray(False, False) for p in pages)
if pages[0].is_palette:
result = pages[0].color_map[:, result]
result = numpy.swapaxes(result, 0, 1)
else:
try:
result = numpy.vstack(p.asarray(False, colormapped,
rgbonly) for p in pages)
except ValueError:
# dimensions of pages don't agree
result = pages[0].asarray(False, colormapped, rgbonly)
page = self.pages[0]
if page.is_lsm:
# adjust LSM data shape
lsmi = page.cz_lsm_info
order = CZ_SCAN_TYPES[lsmi.scan_type]
if page.is_rgb:
order = order.replace('C', '').replace('XY', 'XYC')
shape = []
for i in reversed(order):
shape.append(getattr(lsmi, CZ_DIMENSIONS[i]))
result.shape = shape
return result.squeeze() if squeeze else result
def __len__(self):
"""Return number of image pages in file."""
return len(self.pages)
def __getitem__(self, key):
"""Return specified page."""
return self.pages[key]
def __iter__(self):
"""Return iterator over pages."""
return iter(self.pages)
def __str__(self):
"""Return string containing information about file."""
fname = os.path.split(self.fname)[-1].capitalize()
return "%s, %.2f MB, %s, %i pages" % (fname, self.fstat[6]/1048576,
{'<': 'little endian', '>': 'big endian'}[self.byte_order],
len(self.pages), )
def __getattr__(self, name):
"""Return special property."""
if name in ('is_rgb', 'is_palette', 'is_stk'):
return all(getattr(p, name) for p in self.pages)
if name in ('is_lsm', 'is_nih'):
return getattr(self.pages[0], name)
if name == 'is_fluoview':
return 'mm_header' in self.pages[0].tags
raise AttributeError(name)
@contextmanager
def tifffile(filename):
"""Support for 'with' statement.
>>> with tifffile('test.tif') as tif:
... image = tif.asarray()
"""
tiff_file = TIFFfile(filename)
try:
yield tiff_file
finally:
tiff_file.close()
class TIFFpage(object):
"""A TIFF image file directory (IDF).
Attributes
----------
shape : tuple of int
Dimensions of the image array in file.
dtype : str
Data type. One of TIFF_SAMPLE_DTYPES.
tags : TiffTags(Record((dict))
Tag values are also directly accessible as attributes.
color_map : numpy array
Color look up table, palette, if existing.
mm_uic_tags: Record(dict)
Consolidated MetaMorph mm_uic# tags, if exists.
cz_lsm_scan_info: Record(dict)
LSM scan info attributes, if exists.
is_rgb : bool
True if page contains a RGB image.
is_reduced : bool
True if page is a reduced image of another image.
is_palette : bool
True if page contains a palette-colored image.
is_stk : bool
True if page contains MM_UIC2 tag.
is_lsm : bool
True if page contains CZ_LSM_INFO tag.
is_fluoview : bool
True if page contains MM_STAMP tag.
is_nih : bool
True if page contains NIH image header.
"""
def __init__(self, parent):
"""Initialize object from file."""
self._parent = parent
self.shape = ()
self.tags = TiffTags()
self._fromfile()
self._process_tags()
def _fromfile(self):
"""Read TIFF IDF structure and its tags from file.
File cursor must be at storage position of IDF offset and is left at
offset to next IDF.
Raises StopIteration if offset (first two bytes read) are 0.
"""
fhandle = self._parent.fhandle
byte_order = self._parent.byte_order
offset = struct.unpack(byte_order+'I', fhandle.read(4))[0]
if not offset:
raise StopIteration()
# read standard tags
tags = self.tags
fhandle.seek(offset, 0)
numtags = struct.unpack(byte_order+'H', fhandle.read(2))[0]
for _ in range(numtags):
tag = TIFFtag(fhandle, byte_order=byte_order)
tags[tag.name] = tag
# read custom tags
pos = fhandle.tell()
for name, readtag in CUSTOM_TAGS.values():
if name in tags and readtag:
value = readtag(fhandle, byte_order, tags[name])
if isinstance(value, dict): # numpy.core.records.record
value = Record(value)
tags[name].value = value
fhandle.seek(pos)
# read LSM info subrecords
if self.is_lsm:
pos = fhandle.tell()
for name, reader in CZ_LSM_INFO_READERS.items():
try:
offset = self.cz_lsm_info["offset_"+name]
except KeyError:
continue
if not offset:
continue
fhandle.seek(offset)
try:
setattr(self, "cz_lsm_"+name, reader(fhandle, byte_order))
except ValueError:
pass
fhandle.seek(pos)
def _process_tags(self):
"""Validate standard tags and initialize attributes.
Raise ValueError if tag values not supported.
"""
tags = self.tags
for code, (name, default, dtype, count, validate) in TIFF_TAGS.items():
if not (name in tags or default is None):
tags[name] = TIFFtag(code, dtype=dtype, count=count,
value=default, name=name)
if name in tags and validate:
try:
if tags[name].count == 1:
setattr(self, name, validate[tags[name].value])
else:
setattr(self, name, tuple(validate[value]
for value in tags[name].value))
except KeyError:
raise ValueError("%s.value (%s) not supported" %
(name, tags[name].value))
tag = tags['bits_per_sample']
if tag.count != 1:
bps = tag.value[0]
if all((i-bps for i in tag.value)):
raise ValueError(
"samples must be of same type %s" % str(tag))
self.bits_per_sample = bps
tag = tags['sample_format']
if tag.count != 1:
fmt = tag.value[0]
if all((i-fmt for i in tag.value)):
raise ValueError(
"samples must be of same format %s" % str(tag))
self.sample_format = TIFF_SAMPLE_FORMATS[fmt]
self.strips_per_image = int(math.floor((self.image_length +
self.rows_per_strip - 1) / self.rows_per_strip))
key = (self.sample_format, self.bits_per_sample)
try:
self.dtype = TIFF_SAMPLE_DTYPES[key]
except KeyError:
raise ValueError("unsupported sample dtype %s" % str(key))
if self.is_palette:
dtype = self.tags['color_map'].dtype[1]
self.color_map = numpy.array(self.color_map,
dtype).reshape((3, -1))
planes = 0
if self.is_stk:
planes = tags['mm_uic2'].count
# consolidate mm_uci tags
self.mm_uic_tags = Record(tags['mm_uic2'].value)
for key in ('mm_uic3', 'mm_uic4', 'mm_uic1'):
if key in tags:
self.mm_uic_tags.update(tags[key].value)
if planes:
if self.planar_configuration == 'contig':
self.shape = (planes, self.image_length,
self.image_width, self.samples_per_pixel)
else:
self.shape = (planes, self.samples_per_pixel,
self.image_length, self.image_width, 1)
else:
if self.planar_configuration == 'contig':
self.shape = (1, self.image_length, self.image_width,
self.samples_per_pixel)
else:
self.shape = (self.samples_per_pixel, self.image_length,
self.image_width, 1)
if not self.compression and not 'strip_byte_counts' in tags:
self.strip_byte_counts = numpy.product(self.shape) * (
self.bits_per_sample // 8)
def asarray(self, squeeze=True, colormapped=True, rgbonly=True):
"""Read image data and return as numpy array in native byte order.
Raise ValueError if format is unsupported.
Arguments
---------
squeeze : bool
If True all length-1 dimensions are squeezed out from result.
colormapped : bool
If True color mapping is applied for palette-indexed images.
rgbonly : bool
If True return RGB(A) image without extra samples.
"""
fhandle = self._parent.fhandle
if not fhandle:
raise IOError("TIFF file is not open")
if self.compression not in TIFF_DECOMPESSORS:
raise ValueError("Can't decompress %s" % self.compression)
strip_offsets = self.strip_offsets
strip_byte_counts = self.strip_byte_counts
try:
strip_offsets[0]
except TypeError:
strip_offsets = (self.strip_offsets, )
strip_byte_counts = (self.strip_byte_counts, )
byte_order = self._parent.byte_order
typecode = byte_order + self.dtype
if self.is_stk:
fhandle.seek(strip_offsets[0], 0)
result = numpy.fromfile(fhandle, typecode,
numpy.product(self.shape))
else:
# try speed up reading contiguous data by merging all strips
if not self.compression \
and self.bits_per_sample in (8, 16, 32, 64) \
and all(strip_offsets[i] == \
strip_offsets[i+1]-strip_byte_counts[i]
for i in range(len(strip_offsets)-1)):
strip_byte_counts = (strip_offsets[-1] - strip_offsets[0] +
strip_byte_counts[-1], )
strip_offsets = (strip_offsets[0], )
result = numpy.empty(self.shape, self.dtype).reshape(-1)
runlen = self.image_width
if self.planar_configuration == 'contig':
runlen *= self.samples_per_pixel
if self.bits_per_sample in (8, 16, 32, 64):
if self.bits_per_sample*runlen % 8:
raise ValueError("data and sample size mismatch")
unpack = lambda data: numpy.fromstring(data, typecode)
else:
unpack = lambda data: unpackints(data, typecode,
self.bits_per_sample, runlen)
decompress = TIFF_DECOMPESSORS[self.compression]
index = 0
for offset, bytecount in zip(strip_offsets, strip_byte_counts):
fhandle.seek(offset, 0)
data = unpack(decompress(fhandle.read(bytecount)))
size = min(result.size, data.size)
result[index:index+size] = data[:size]
index += size
result.shape = self.shape[:]
if self.predictor == 'horizontal':
numpy.cumsum(result, axis=2, dtype=self.dtype, out=result)
if colormapped and self.photometric == 'palette':
if self.color_map.shape[1] >= 2**self.bits_per_sample:
result = self.color_map[:, result]
result = numpy.swapaxes(result, 0, 1)
if rgbonly and 'extra_samples' in self.tags:
# return only RGB and first unassociated alpha channel if exists
extra_samples = self.tags['extra_samples'].value
if self.tags['extra_samples'].count == 1:
extra_samples = (extra_samples, )
for i, exs in enumerate(extra_samples):
if exs == 2: # unassociated alpha channel
if self.planar_configuration == 'contig':
result = result[..., [0, 1, 2, 3+i]]
else:
result = result[[0, 1, 2, 3+i]]
break
else:
if self.planar_configuration == 'contig':
result = result[..., :3]
else:
result = result[:3]
if result.shape[0] != 1:
result.shape = (1, ) + result.shape
return result.squeeze() if squeeze else result
def __getattr__(self, name):
"""Return tag value or special property."""
tags = self.tags
if name in tags:
return tags[name].value
if name == 'is_rgb':
return tags['photometric'].value == 2
if name == 'is_reduced':
return tags['new_subfile_type'].value & 1
if name == 'is_palette':
return 'color_map' in tags
if name == 'is_stk':
return 'mm_uic2' in tags
if name == 'is_lsm':
return 'cz_lsm_info' in tags
if name == 'is_fluoview':
return 'mm_stamp' in tags
if name == 'is_nih':
return 'nih_image_header' in tags
raise AttributeError(name)
def __str__(self):
"""Return string containing information about page."""
t = ','.join(t[3:] for t in (
'is_stk', 'is_lsm', 'is_nih', 'is_fluoview') if getattr(self, t))
s = ', '.join(str(i) for i in (
(' x '.join(str(i) for i in self.shape if i > 1),
numpy.dtype(self.dtype),
"%i bit" % self.bits_per_sample,
self.photometric,
self.compression if self.compression else 'raw')))
if t:
s = ', '.join((s, t))
return s
class TIFFtag(object):
"""A TIFF tag structure.
Attributes
----------
name : string
Attribute name of tag.
code : int
Decimal code of tag.
dtype : str
Datatype of tag data. One of TIFF_DATA_TYPES.
count : int
Number of values.
value : various types
Tag data. For codes in CUSTOM_TAGS the 4 bytes file content.
"""
__slots__ = ('code', 'name', 'count', 'dtype', 'value')
def __init__(self, arg, **kwargs):
"""Initialize tag from file or arguments."""
if hasattr(arg, 'seek'):
self._fromfile(arg, **kwargs)
else:
self._fromdata(arg, **kwargs)
def _fromdata(self, code, dtype, count, value, name=None):
"""Initialize tag from arguments."""
self.code = int(code)
self.name = name if name else str(code)
self.dtype = TIFF_DATA_TYPES[dtype]
self.count = int(count)
self.value = value
def _fromfile(self, fhandle, byte_order):
"""Read tag structure from open file. Advances file cursor 12 bytes."""
code, dtype, count, value = struct.unpack(byte_order+'HHI4s',
fhandle.read(12))
if code in TIFF_TAGS:
name = TIFF_TAGS[code][0]
elif code in CUSTOM_TAGS:
name = CUSTOM_TAGS[code][0]
else:
name = str(code)
try:
dtype = TIFF_DATA_TYPES[dtype]
except KeyError:
raise ValueError("unknown TIFF tag data type %i" % dtype)
if not code in CUSTOM_TAGS:
frmt = '%s%i%s' % (byte_order, count*int(dtype[0]), dtype[1])
size = struct.calcsize(frmt)
if size <= 4:
value = struct.unpack(frmt, value[:size])
else:
pos = fhandle.tell()
fhandle.seek(struct.unpack(byte_order+'I', value)[0])
value = struct.unpack(frmt, fhandle.read(size))
fhandle.seek(pos)
if len(value) == 1:
value = value[0]
if dtype == '1s':
value = stripnull(value)
self.code = code
self.name = name
self.dtype = dtype
self.count = count
self.value = value
def __str__(self):
"""Return string containing information about tag."""
return ' '.join(str(getattr(self, s)) for s in self.__slots__)
class Record(dict):
"""Dictionary with attribute access.
Can also be initialized with numpy.core.records.record.
"""
__slots__ = ()
def __init__(self, arg=None):
if arg is None:
arg = {}
try:
dict.__init__(self, arg)
except TypeError:
for i, name in enumerate(arg.dtype.names):
v = arg[i]
self[name] = v if v.dtype.char != 'S' else stripnull(v)
def __getattr__(self, name):
return self[name]
def __setattr__(self, name, value):
self.__setitem__(name, value)
def __str__(self):
"""Return string with information about all tags."""
s = []
lists = []
for k in sorted(self):
if k.startswith('_'):
continue
v = self[k]
if type(v) == list and len(v) and isinstance(v[0], Record):
lists.append((k, v))
else:
s.append(("* %s: %s" % (k, str(v)))[:PRINT_LINE_LEN])
for k, v in lists:
l = []
for i, w in enumerate(v):
l.append("* %s[%i]\n %s" % (k, i,
str(w).replace("\n", "\n ")))
s.append('\n'.join(l))
return '\n'.join(s)
class TiffTags(Record):
"""Dictionary of TIFFtags with attribute access."""
def __str__(self):
"""Return string with information about all tags."""
s = []
#sortbycode = lambda a, b: cmp(a.code, b.code)
#for tag in sorted(self.values(), sortbycode):
for tag in sorted(self.values(), key=lambda x: x.code):
typecode = "%i%s" % (tag.count * int(tag.dtype[0]), tag.dtype[1])
line = "* %i %s (%s) %s" % (tag.code, tag.name, typecode,
str(tag.value).split('\n', 1)[0])
s.append(line[:PRINT_LINE_LEN])
return '\n'.join(s)
def read_nih_image_header(fhandle, byte_order, tag):
"""Read NIH_IMAGE_HEADER tag from file and return as dictionary."""
fhandle.seek(12 + struct.unpack(byte_order+'I', tag.value)[0])
return {'version': struct.unpack(byte_order+'H', fhandle.read(2))[0]}
def read_mm_header(fhandle, byte_order, tag):
"""Read MM_HEADER tag from file and return as numpy.rec.array."""
fhandle.seek(struct.unpack(byte_order+'I', tag.value)[0])
return numpy.rec.fromfile(fhandle, MM_HEADER, 1, byteorder=byte_order)[0]
def read_mm_stamp(fhandle, byte_order, tag):
"""Read MM_STAMP tag from file and return as numpy.array."""
fhandle.seek(struct.unpack(byte_order+'I', tag.value)[0])
return numpy.fromfile(fhandle, byte_order+'8f8', 1)[0]
def read_mm_uic1(fhandle, byte_order, tag):
"""Read MM_UIC1 tag from file and return as dictionary."""
fhandle.seek(struct.unpack(byte_order+'I', tag.value)[0])
t = fhandle.read(8*tag.count)
t = struct.unpack('%s%iI' % (byte_order, 2*tag.count), t)
return dict((MM_TAG_IDS[k], v) for k, v in zip(t[::2], t[1::2])
if k in MM_TAG_IDS)
def read_mm_uic2(fhandle, byte_order, tag):
"""Read MM_UIC2 tag from file and return as dictionary."""
result = {'number_planes': tag.count}
fhandle.seek(struct.unpack(byte_order+'I', tag.value)[0])
values = numpy.fromfile(fhandle, byte_order+'I', 6*tag.count)
result['z_distance'] = values[0::6] / values[1::6]
#result['date_created'] = tuple(values[2::6])
#result['time_created'] = tuple(values[3::6])
#result['date_modified'] = tuple(values[4::6])
#result['time_modified'] = tuple(values[5::6])
return result
def read_mm_uic3(fhandle, byte_order, tag):
"""Read MM_UIC3 tag from file and return as dictionary."""
fhandle.seek(struct.unpack(byte_order+'I', tag.value)[0])
t = numpy.fromfile(fhandle, '%sI' % byte_order, 2*tag.count)
return {'wavelengths': t[0::2] / t[1::2]}
def read_mm_uic4(fhandle, byte_order, tag):
"""Read MM_UIC4 tag from file and return as dictionary."""
fhandle.seek(struct.unpack(byte_order+'I', tag.value)[0])
t = struct.unpack(byte_order + 'hI'*tag.count, fhandle.read(6*tag.count))
return dict((MM_TAG_IDS[k], v) for k, v in zip(t[::2], t[1::2])
if k in MM_TAG_IDS)
def read_cz_lsm_info(fhandle, byte_order, tag):
"""Read CS_LSM_INFO tag from file and return as numpy.rec.array."""
fhandle.seek(struct.unpack(byte_order+'I', tag.value)[0])
result = numpy.rec.fromfile(fhandle, CZ_LSM_INFO, 1,
byteorder=byte_order)[0]
{50350412: '1.3', 67127628: '2.0'}[result.magic_number]
return result
def read_cz_lsm_time_stamps(fhandle, byte_order):
"""Read LSM time stamps from file and return as list."""
size, count = struct.unpack(byte_order+'II', fhandle.read(8))
if size != (8 + 8 * count):
raise ValueError("lsm_time_stamps block is too short")
return struct.unpack(('%s%dd' % (byte_order, count)),
fhandle.read(8*count))
def read_cz_lsm_event_list(fhandle, byte_order):
"""Read LSM events from file and return as list of (time, type, text)."""
size, count = struct.unpack(byte_order+'II', fhandle.read(8))
events = []
while count > 0:
esize, etime, etype = struct.unpack(byte_order+'IdI', fhandle.read(16))
etext = stripnull(fhandle.read(esize - 16))
events.append((etime, etype, etext))
count -= 1
return events
def read_cz_lsm_scan_info(fhandle, byte_order):
"""Read LSM scan information from file and return as Record."""
block = Record()
blocks = [block]
unpack = struct.unpack
if 0x10000000 != struct.unpack(byte_order+"I", fhandle.read(4))[0]:
raise ValueError("not a lsm_scan_info structure")
fhandle.read(8)
while 1:
entry, dtype, size = unpack(byte_order+"III", fhandle.read(12))
if dtype == 2:
value = stripnull(fhandle.read(size))
elif dtype == 4:
value = unpack(byte_order+"i", fhandle.read(4))[0]
elif dtype == 5:
value = unpack(byte_order+"d", fhandle.read(8))[0]
else:
value = 0
if entry in CZ_LSM_SCAN_INFO_ARRAYS:
blocks.append(block)
name = CZ_LSM_SCAN_INFO_ARRAYS[entry]
newobj = []
setattr(block, name, newobj)
block = newobj
elif entry in CZ_LSM_SCAN_INFO_STRUCTS:
blocks.append(block)
newobj = Record()
block.append(newobj)
block = newobj
elif entry in CZ_LSM_SCAN_INFO_ATTRIBUTES:
name = CZ_LSM_SCAN_INFO_ATTRIBUTES[entry]
setattr(block, name, value)
elif entry == 0xffffffff:
block = blocks.pop()
else:
setattr(block, "unknown_%x" % entry, value)
if not blocks:
break
return block
def _replace_by(module_function, warn=True):
"""Try replace decorated function by module.function."""
def decorate(func, module_function=module_function, warn=warn):
sys.path.append(os.path.dirname(__file__))
try:
module, function = module_function.split('.')
func, oldfunc = getattr(__import__(module), function), func
globals()['__old_' + func.__name__] = oldfunc
except Exception:
if warn:
warnings.warn("Failed to import %s" % module_function)
sys.path.pop()
return func
return decorate
@_replace_by('_tifffile.decodepackbits')
def decodepackbits(encoded):
"""Decompress PackBits encoded byte string.
PackBits is a simple byte-oriented run-length compression scheme.
"""
func = ord if sys.version[0] == '2' else lambda x: x
result = []
i = 0
try:
while True:
n = func(encoded[i]) + 1
i += 1
if n < 129:
result.extend(encoded[i:i+n])
i += n
elif n > 129:
result.extend(encoded[i:i+1] * (258-n))
i += 1
except IndexError:
pass
return b''.join(result) if sys.version[0] == '2' else bytes(result)
@_replace_by('_tifffile.decodelzw')
def decodelzw(encoded):
"""Decompress LZW (Lempel-Ziv-Welch) encoded TIFF strip (byte string).
The strip must begin with a CLEAR code and end with an EOI code.
This is an implementation of the LZW decoding algorithm described in (1).
It is not compatible with old style LZW compressed files like quad-lzw.tif.
"""
unpack = struct.unpack
if sys.version[0] == '2':
newtable = [chr(i) for i in range(256)]
else:
newtable = [bytes([i]) for i in range(256)]
newtable.extend((0, 0))
def next_code():
"""Return integer of `bitw` bits at `bitcount` position in encoded."""
start = bitcount // 8
s = encoded[start:start+4]
try:
code = unpack('>I', s)[0]
except Exception:
code = unpack('>I', s + b'\x00'*(4-len(s)))[0]
code = code << (bitcount % 8)
code = code & mask
return code >> shr
switchbitch = { # code: bit-width, shr-bits, bit-mask
255: (9, 23, int(9*'1'+'0'*23, 2)),
511: (10, 22, int(10*'1'+'0'*22, 2)),
1023: (11, 21, int(11*'1'+'0'*21, 2)),
2047: (12, 20, int(12*'1'+'0'*20, 2)), }
bitw, shr, mask = switchbitch[255]
bitcount = 0
if len(encoded) < 4:
raise ValueError("strip must be at least 4 characters long")
if next_code() != 256:
raise ValueError("strip must begin with CLEAR code")
code = oldcode = 0
result = []
while True:
code = next_code() # ~5% faster when inlining this function
bitcount += bitw
if code == 257: # EOI
break
if code == 256: # CLEAR
table = newtable[:]
lentable = 258
bitw, shr, mask = switchbitch[255]
code = next_code()
bitcount += bitw
if code == 257: # EOI
break
result.append(table[code])
else:
if code < lentable:
decoded = table[code]
newcode = table[oldcode] + decoded[:1]
else:
newcode = table[oldcode]
newcode += newcode[:1]
decoded = newcode
result.append(decoded)
table.append(newcode)
lentable += 1
oldcode = code
if lentable in switchbitch:
bitw, shr, mask = switchbitch[lentable]
if code != 257:
raise ValueError("unexpected end of stream (code %i)" % code)
return b''.join(result)
#@_replace_by('_tifffile.unpackints')
def unpackints(data, dtype, intsize, runlen=0):
"""Decompress byte string to array of integers of any bit size <= 32.
data : str
dtype : numpy.dtype or str
A numpy boolean or integer type.
intsize : int
Number of bits per integer.
runlen : int
Number of consecutive integers, after which to start at next byte.
"""
if intsize == 1: # bitarray
data = numpy.fromstring(data, '|B')
data = numpy.unpackbits(data)
if runlen % 8 != 0:
data = data.reshape(-1, runlen+(8-runlen%8))
data = data[:, :runlen].reshape(-1)
return data.astype(dtype)
dtype = numpy.dtype(dtype)
if 32 < intsize < 1:
raise ValueError("intsize out of range")
if dtype.kind not in "biu":
raise ValueError("invalid dtype")
if intsize > dtype.itemsize * 8:
raise ValueError("dtype.itemsize too small")
for i in (8, 16, 32):
if intsize <= i:
itembytes = i // 8
break
if runlen == 0:
runlen = len(data) // itembytes
skipbits = runlen*intsize % 8
if skipbits:
skipbits = 8 - skipbits
shrbits = itembytes*8 - intsize
bitmask = int(intsize*'1'+'0'*shrbits, 2)
if dtype.byteorder == '|':
dtypestr = '=' + dtype.char
else:
dtypestr = dtype.byteorder + dtype.char
unpack = struct.unpack
l = runlen * (len(data)*8 // (runlen*intsize + skipbits))
result = numpy.empty((l, ), dtype)
bitcount = 0
for i in range(len(result)):
start = bitcount // 8
s = data[start:start+itembytes]
try:
code = unpack(dtypestr, s)[0]
except Exception:
code = unpack(dtypestr, s + b'\x00'*(itembytes-len(s)))[0]
code = code << (bitcount % 8)
code = code & bitmask
result[i] = code >> shrbits
bitcount += intsize
if (i+1) % runlen == 0:
bitcount += skipbits
return result
def stripnull(string):
"""Return string truncated at first null character."""
i = string.find(b'\x00')
return string if (i < 0) else string[:i]
def datetime_from_timestamp(n, epoch=datetime.datetime.fromordinal(693594)):
"""Return datetime object from timestamp in Excel serial format.
>>> datetime_from_timestamp(40237.029999999795)
datetime.datetime(2010, 2, 28, 0, 43, 11, 999982)
"""
return epoch + datetime.timedelta(n)
def test_tifffile(directory='testimages', verbose=True):
"""Read all images in directory. Print error message on failure.
>>> test_tifffile(verbose=False)
"""
import glob
successful = 0
failed = 0
start = time.time()
for f in glob.glob(os.path.join(directory, '*.*')):
if verbose:
print("\n%s>" % f.lower(), end=' ')
t0 = time.time()
try:
tif = TIFFfile(f)
except Exception as e:
if not verbose:
print(f, end=' ')
print("ERROR:", e)
failed += 1
continue
try:
img = tif.asarray()
except ValueError:
try:
img = tif[0].asarray()
except Exception as e:
if not verbose:
print(f, end=' ')
print("ERROR:", e)
finally:
tif.close()
successful += 1
if verbose:
print("%s, %s %s, %s, %.0f ms" % (str(tif), str(img.shape),
img.dtype, tif[0].compression, (time.time()-t0) * 1e3))
if verbose:
print("\nSuccessfully read %i of %i files in %.3f s\n" % (
successful, successful+failed, time.time()-start))
class TIFF_SUBFILE_TYPES(object):
def __getitem__(self, key):
result = []
if key & 1:
result.append('reduced_image')
if key & 2:
result.append('page')
if key & 4:
result.append('mask')
return tuple(result)
TIFF_OSUBFILE_TYPES = {
0: 'undefined',
1: 'image', # full-resolution image data
2: 'reduced_image', # reduced-resolution image data
3: 'page'} # a single page of a multi-page image
TIFF_PHOTOMETRICS = {
0: 'miniswhite',
1: 'minisblack',
2: 'rgb',
3: 'palette',
4: 'mask',
5: 'separated',
6: 'cielab',
7: 'icclab',
8: 'itulab',
32844: 'logl',
32845: 'logluv'}
TIFF_COMPESSIONS = {
1: None,
2: 'ccittrle',
3: 'ccittfax3',
4: 'cittfax4',
5: 'lzw',
6: 'ojpeg',
7: 'jpeg',
8: 'adobe_deflate',
9: 't85',
10: 't43',
32766: 'next',
32771: 'ccittrlew',
32773: 'packbits',
32809: 'thunderscan',
32895: 'it8ctpad',
32896: 'it8lw',
32897: 'it8mp',
32898: 'it8bl',
32908: 'pixarfilm',
32909: 'pixarlog',
32946: 'deflate',
32947: 'dcs',
34661: 'jbig',
34676: 'sgilog',
34677: 'sgilog24',
34712: 'jp2000'}
TIFF_DECOMPESSORS = {
None: lambda x: x,
'adobe_deflate': zlib.decompress,
'deflate': zlib.decompress,
'packbits': decodepackbits,
'lzw': decodelzw}
TIFF_DATA_TYPES = {
1: '1B', # BYTE 8-bit unsigned integer.
2: '1s', # ASCII 8-bit byte that contains a 7-bit ASCII code;
# the last byte must be NUL (binary zero).
3: '1H', # SHORT 16-bit (2-byte) unsigned integer
4: '1I', # LONG 32-bit (4-byte) unsigned integer.
5: '2I', # RATIONAL Two LONGs: the first represents the numerator of
# a fraction; the second, the denominator.
6: '1b', # SBYTE An 8-bit signed (twos-complement) integer.
7: '1B', # UNDEFINED An 8-bit byte that may contain anything,
# depending on the definition of the field.
8: '1h', # SSHORT A 16-bit (2-byte) signed (twos-complement) integer.
9: '1i', # SLONG A 32-bit (4-byte) signed (twos-complement) integer.
10: '2i', # SRATIONAL Two SLONGs: the first represents the numerator
# of a fraction, the second the denominator.
11: '1f', # FLOAT Single precision (4-byte) IEEE format.
12: '1d'} # DOUBLE Double precision (8-byte) IEEE format.
TIFF_BYTE_ORDERS = {
b'II': '<', # little endian
b'MM': '>'} # big endian
TIFF_SAMPLE_FORMATS = {
1: 'uint',
2: 'int',
3: 'float',
#4: 'void',
#5: 'complex_int',
6: 'complex'}
TIFF_SAMPLE_DTYPES = {
('uint', 1): '?', # bitmap
('uint', 2): 'B',
('uint', 4): 'B',
('uint', 5): 'B',
('uint', 6): 'B',
('uint', 8): 'B',
('uint', 10): 'H',
('uint', 12): 'H',
('uint', 14): 'H',
('uint', 16): 'H',
('uint', 24): 'I',
('uint', 32): 'I',
('uint', 64): 'L',
('int', 8): 'b',
('int', 16): 'h',
('int', 32): 'i',
('int', 64): 'l',
('float', 32): 'f',
('float', 64): 'd',
('float', 64): 'd',
('complex', 64): 'F',
('complex', 128): 'D'}
TIFF_PREDICTORS = {
1: None,
2: 'horizontal'}
#3: 'floatingpoint',
TIFF_ORIENTATIONS = {
1: 'top_left',
2: 'top_right',
3: 'bottom_right',
4: 'bottom_left',
5: 'left_top',
6: 'right_top',
7: 'right_bottom',
8: 'left_bottom'}
TIFF_FILLORDERS = {
1: 'msb2lsb',
2: 'lsb2msb'}
TIFF_RESUNITS = {
1: 'none',
2: 'inch',
3: 'centimeter'}
TIFF_PLANARCONFIGS = {
1: 'contig',
2: 'separate'}
TIFF_EXTRA_SAMPLES = {
0: 'unspecified',
1: 'assocalpha',
2: 'unassalpha'}
# MetaMorph STK tags
MM_TAG_IDS = {
0: 'auto_scale',
1: 'min_scale',
2: 'max_scale',
3: 'spatial_calibration',
#4: 'x_calibration',
#5: 'y_calibration',
#6: 'calibration_units',
#7: 'name',
8: 'thresh_state',
9: 'thresh_state_red',
11: 'thresh_state_green',
12: 'thresh_state_blue',
13: 'thresh_state_lo',
14: 'thresh_state_hi',
15: 'zoom',
#16: 'create_time',
#17: 'last_saved_time',
18: 'current_buffer',
19: 'gray_fit',
20: 'gray_point_count',
#21: 'gray_x',
#22: 'gray_y',
#23: 'gray_min',
#24: 'gray_max',
#25: 'gray_unit_name',
26: 'standard_lut',
27: 'wavelength',
#28: 'stage_position',
#29: 'camera_chip_offset',
#30: 'overlay_mask',
#31: 'overlay_compress',
#32: 'overlay',
#33: 'special_overlay_mask',
#34: 'special_overlay_compress',
#35: 'special_overlay',
36: 'image_property',
#37: 'stage_label',
#38: 'autoscale_lo_info',
#39: 'autoscale_hi_info',
#40: 'absolute_z',
#41: 'absolute_z_valid',
#42: 'gamma',
#43: 'gamma_red',
#44: 'gamma_green',
#45: 'gamma_blue',
#46: 'camera_bin',
47: 'new_lut',
#48: 'image_property_ex',
49: 'plane_property',
#50: 'user_lut_table',
51: 'red_autoscale_info',
#52: 'red_autoscale_lo_info',
#53: 'red_autoscale_hi_info',
54: 'red_minscale_info',
55: 'red_maxscale_info',
56: 'green_autoscale_info',
#57: 'green_autoscale_lo_info',
#58: 'green_autoscale_hi_info',
59: 'green_minscale_info',
60: 'green_maxscale_info',
61: 'blue_autoscale_info',
#62: 'blue_autoscale_lo_info',
#63: 'blue_autoscale_hi_info',
64: 'blue_min_scale_info',
65: 'blue_max_scale_info'}
#66: 'overlay_plane_color',
# Olymus Fluoview
MM_DIMENSION = [
('name', 'a16'),
('size', 'i4'),
('origin', 'f8'),
('resolution', 'f8'),
('unit', 'a64')]
MM_HEADER = [
('header_flag', 'i2'),
('image_type', 'u1'),
('image_name', 'a257'),
('offset_data', 'u4'),
('palette_size', 'i4'),
('offset_palette0', 'u4'),
('offset_palette1', 'u4'),
('comment_size', 'i4'),
('offset_comment', 'u4'),
('dimensions', MM_DIMENSION, 10),
('offset_position', 'u4'),
('map_type', 'i2'),
('map_min', 'f8'),
('map_max', 'f8'),
('min_value', 'f8'),
('max_value', 'f8'),
('offset_map', 'u4'),
('gamma', 'f8'),
('offset', 'f8'),
('gray_channel', MM_DIMENSION),
('offset_thumbnail', 'u4'),
('voice_field', 'i4'),
('offset_voice_field', 'u4')]
# <NAME> LSM
CZ_LSM_INFO = [
('magic_number', 'i4'),
('structure_size', 'i4'),
('dimension_x', 'i4'),
('dimension_y', 'i4'),
('dimension_z', 'i4'),
('dimension_channels', 'i4'),
('dimension_time', 'i4'),
('dimension_data_type', 'i4'),
('thumbnail_x', 'i4'),
('thumbnail_y', 'i4'),
('voxel_size_x', 'f8'),
('voxel_size_y', 'f8'),
('voxel_size_z', 'f8'),
('origin_x', 'f8'),
('origin_y', 'f8'),
('origin_z', 'f8'),
('scan_type', 'u2'),
('spectral_scan', 'u2'),
('data_type', 'u4'),
('offset_vector_overlay', 'u4'),
('offset_input_lut', 'u4'),
('offset_output_lut', 'u4'),
('offset_channel_colors', 'u4'),
('time_interval', 'f8'),
('offset_channel_data_types', 'u4'),
('offset_scan_information', 'u4'),
('offset_ks_data', 'u4'),
('offset_time_stamps', 'u4'),
('offset_event_list', 'u4'),
('offset_roi', 'u4'),
('offset_bleach_roi', 'u4'),
('offset_next_recording', 'u4'),
('display_aspect_x', 'f8'),
('display_aspect_y', 'f8'),
('display_aspect_z', 'f8'),
('display_aspect_time', 'f8'),
('offset_mean_of_roi_overlay', 'u4'),
('offset_topo_isoline_overlay', 'u4'),
('offset_topo_profile_overlay', 'u4'),
('offset_linescan_overlay', 'u4'),
('offset_toolbar_flags', 'u4')]
# Import functions for LSM_INFO subrecords
CZ_LSM_INFO_READERS = {
'scan_information': read_cz_lsm_scan_info,
'time_stamps': read_cz_lsm_time_stamps,
'event_list': read_cz_lsm_event_list}
# Map cz_lsm_info.scan_type to dimension order
CZ_SCAN_TYPES = {
0: 'XYZCT', # x-y-z scan
1: 'XYZCT', # z scan (x-z plane)
2: 'XYZCT', # line scan
3: 'XYTCZ', # time series x-y
4: 'XYZTC', # time series x-z
5: 'XYTCZ', # time series 'Mean of ROIs'
6: 'XYZTC', # time series x-y-z
7: 'XYCTZ', # spline scan
8: 'XYCZT', # spline scan x-z
9: 'XYTCZ', # time series spline plane x-z
10: 'XYZCT'} # point mode
# Map dimension codes to cz_lsm_info attribute
CZ_DIMENSIONS = {
'X': 'dimension_x',
'Y': 'dimension_y',
'Z': 'dimension_z',
'C': 'dimension_channels',
'T': 'dimension_time'}
# Descriptions of cz_lsm_info.data_type
CZ_DATA_TYPES = {
0: 'varying data types',
2: '12 bit unsigned integer',
5: '32 bit float'}
CZ_LSM_SCAN_INFO_ARRAYS = {
0x20000000: "tracks",
0x30000000: "lasers",
0x60000000: "detectionchannels",
0x80000000: "illuminationchannels",
0xa0000000: "beamsplitters",
0xc0000000: "datachannels",
0x13000000: "markers",
0x11000000: "timers"}
CZ_LSM_SCAN_INFO_STRUCTS = {
0x40000000: "tracks",
0x50000000: "lasers",
0x70000000: "detectionchannels",
0x90000000: "illuminationchannels",
0xb0000000: "beamsplitters",
0xd0000000: "datachannels",
0x14000000: "markers",
0x12000000: "timers"}
CZ_LSM_SCAN_INFO_ATTRIBUTES = {
0x10000001: "name",
0x10000002: "description",
0x10000003: "notes",
0x10000004: "objective",
0x10000005: "processing_summary",
0x10000006: "special_scan_mode",
0x10000007: "oledb_recording_scan_type",
0x10000008: "oledb_recording_scan_mode",
0x10000009: "number_of_stacks",
0x1000000a: "lines_per_plane",
0x1000000b: "samples_per_line",
0x1000000c: "planes_per_volume",
0x1000000d: "images_width",
0x1000000e: "images_height",
0x1000000f: "images_number_planes",
0x10000010: "images_number_stacks",
0x10000011: "images_number_channels",
0x10000012: "linscan_xy_size",
0x10000013: "scan_direction",
0x10000014: "time_series",
0x10000015: "original_scan_data",
0x10000016: "zoom_x",
0x10000017: "zoom_y",
0x10000018: "zoom_z",
0x10000019: "sample_0x",
0x1000001a: "sample_0y",
0x1000001b: "sample_0z",
0x1000001c: "sample_spacing",
0x1000001d: "line_spacing",
0x1000001e: "plane_spacing",
0x1000001f: "plane_width",
0x10000020: "plane_height",
0x10000021: "volume_depth",
0x10000023: "nutation",
0x10000034: "rotation",
0x10000035: "precession",
0x10000036: "sample_0time",
0x10000037: "start_scan_trigger_in",
0x10000038: "start_scan_trigger_out",
0x10000039: "start_scan_event",
0x10000040: "start_scan_time",
0x10000041: "stop_scan_trigger_in",
0x10000042: "stop_scan_trigger_out",
0x10000043: "stop_scan_event",
0x10000044: "stop_scan_time",
0x10000045: "use_rois",
0x10000046: "use_reduced_memory_rois",
0x10000047: "user",
0x10000048: "use_bccorrection",
0x10000049: "position_bccorrection1",
0x10000050: "position_bccorrection2",
0x10000051: "interpolation_y",
0x10000052: "camera_binning",
0x10000053: "camera_supersampling",
0x10000054: "camera_frame_width",
0x10000055: "camera_frame_height",
0x10000056: "camera_offset_x",
0x10000057: "camera_offset_y",
# lasers
0x50000001: "name",
0x50000002: "acquire",
0x50000003: "power",
# tracks
0x40000001: "multiplex_type",
0x40000002: "multiplex_order",
0x40000003: "sampling_mode",
0x40000004: "sampling_method",
0x40000005: "sampling_number",
0x40000006: "acquire",
0x40000007: "sample_observation_time",
0x4000000b: "time_between_stacks",
0x4000000c: "name",
0x4000000d: "collimator1_name",
0x4000000e: "collimator1_position",
0x4000000f: "collimator2_name",
0x40000010: "collimator2_position",
0x40000011: "is_bleach_track",
0x40000012: "is_bleach_after_scan_number",
0x40000013: "bleach_scan_number",
0x40000014: "trigger_in",
0x40000015: "trigger_out",
0x40000016: "is_ratio_track",
0x40000017: "bleach_count",
0x40000018: "spi_center_wavelength",
0x40000019: "pixel_time",
0x40000021: "condensor_frontlens",
0x40000023: "field_stop_value",
0x40000024: "id_condensor_aperture",
0x40000025: "condensor_aperture",
0x40000026: "id_condensor_revolver",
0x40000027: "condensor_filter",
0x40000028: "id_transmission_filter1",
0x40000029: "id_transmission1",
0x40000030: "id_transmission_filter2",
0x40000031: "id_transmission2",
0x40000032: "repeat_bleach",
0x40000033: "enable_spot_bleach_pos",
0x40000034: "spot_bleach_posx",
0x40000035: "spot_bleach_posy",
0x40000036: "spot_bleach_posz",
0x40000037: "id_tubelens",
0x40000038: "id_tubelens_position",
0x40000039: "transmitted_light",
0x4000003a: "reflected_light",
0x4000003b: "simultan_grab_and_bleach",
0x4000003c: "bleach_pixel_time",
# detection_channels
0x70000001: "integration_mode",
0x70000002: "special_mode",
0x70000003: "detector_gain_first",
0x70000004: "detector_gain_last",
0x70000005: "amplifier_gain_first",
0x70000006: "amplifier_gain_last",
0x70000007: "amplifier_offs_first",
0x70000008: "amplifier_offs_last",
0x70000009: "pinhole_diameter",
0x7000000a: "counting_trigger",
0x7000000b: "acquire",
0x7000000c: "point_detector_name",
0x7000000d: "amplifier_name",
0x7000000e: "pinhole_name",
0x7000000f: "filter_set_name",
0x70000010: "filter_name",
0x70000013: "integrator_name",
0x70000014: "detection_channel_name",
0x70000015: "detection_detector_gain_bc1",
0x70000016: "detection_detector_gain_bc2",
0x70000017: "detection_amplifier_gain_bc1",
0x70000018: "detection_amplifier_gain_bc2",
0x70000019: "detection_amplifier_offset_bc1",
0x70000020: "detection_amplifier_offset_bc2",
0x70000021: "detection_spectral_scan_channels",
0x70000022: "detection_spi_wavelength_start",
0x70000023: "detection_spi_wavelength_stop",
0x70000026: "detection_dye_name",
0x70000027: "detection_dye_folder",
# illumination_channels
0x90000001: "name",
0x90000002: "power",
0x90000003: "wavelength",
0x90000004: "aquire",
0x90000005: "detchannel_name",
0x90000006: "power_bc1",
0x90000007: "power_bc2",
# beam_splitters
0xb0000001: "filter_set",
0xb0000002: "filter",
0xb0000003: "name",
# data_channels
0xd0000001: "name",
0xd0000003: "acquire",
0xd0000004: "color",
0xd0000005: "sample_type",
0xd0000006: "bits_per_sample",
0xd0000007: "ratio_type",
0xd0000008: "ratio_track1",
0xd0000009: "ratio_track2",
0xd000000a: "ratio_channel1",
0xd000000b: "ratio_channel2",
0xd000000c: "ratio_const1",
0xd000000d: "ratio_const2",
0xd000000e: "ratio_const3",
0xd000000f: "ratio_const4",
0xd0000010: "ratio_const5",
0xd0000011: "ratio_const6",
0xd0000012: "ratio_first_images1",
0xd0000013: "ratio_first_images2",
0xd0000014: "dye_name",
0xd0000015: "dye_folder",
0xd0000016: "spectrum",
0xd0000017: "acquire",
# markers
0x14000001: "name",
0x14000002: "description",
0x14000003: "trigger_in",
0x14000004: "trigger_out",
# timers
0x12000001: "name",
0x12000002: "description",
0x12000003: "interval",
0x12000004: "trigger_in",
0x12000005: "trigger_out",
0x12000006: "activation_time",
0x12000007: "activation_number"}
# Map TIFF tag codes to attribute names, default value, type, count, validator
TIFF_TAGS = {
254: ('new_subfile_type', 0, 4, 1, TIFF_SUBFILE_TYPES()),
255: ('subfile_type', None, 3, 1, TIFF_OSUBFILE_TYPES),
256: ('image_width', None, 4, 1, None),
257: ('image_length', None, 4, 1, None),
258: ('bits_per_sample', 1, 3, 1, None),
259: ('compression', 1, 3, 1, TIFF_COMPESSIONS),
262: ('photometric', None, 3, 1, TIFF_PHOTOMETRICS),
266: ('fill_order', 1, 3, 1, TIFF_FILLORDERS),
269: ('document_name', None, 2, None, None),
270: ('image_description', None, 2, None, None),
271: ('make', None, 2, None, None),
272: ('model', None, 2, None, None),
273: ('strip_offsets', None, 4, None, None),
274: ('orientation', 1, 3, 1, TIFF_ORIENTATIONS),
277: ('samples_per_pixel', 1, 3, 1, None),
278: ('rows_per_strip', 2**32-1, 4, 1, None),
279: ('strip_byte_counts', None, 4, None, None), # required
#280: ('min_sample_value', 0, 3, None, None),
#281: ('max_sample_value', None, 3, None, None), # 2**bits_per_sample
282: ('x_resolution', None, 5, 1, None),
283: ('y_resolution', None, 5, 1, None),
284: ('planar_configuration', 1, 3, 1, TIFF_PLANARCONFIGS),
285: ('page_name', None, 2, None, None),
296: ('resolution_unit', 2, 4, 1, TIFF_RESUNITS),
305: ('software', None, 2, None, None),
306: ('datetime', None, 2, None, None),
315: ('artist', None, 2, None, None),
316: ('host_computer', None, 2, None, None),
317: ('predictor', 1, 3, 1, TIFF_PREDICTORS),
320: ('color_map', None, 3, None, None),
338: ('extra_samples', None, 3, None, TIFF_EXTRA_SAMPLES),
339: ('sample_format', 1, 3, 1, TIFF_SAMPLE_FORMATS),
33432: ('copyright', None, 2, None, None),
32997: ('image_depth', None, 4, 1, None),
32998: ('tile_depth', None, 4, 1, None)}
# Map custom TIFF tag codes to attribute names and import functions
CUSTOM_TAGS = {
33628: ('mm_uic1', read_mm_uic1),
33629: ('mm_uic2', read_mm_uic2),
33630: ('mm_uic3', read_mm_uic3),
33631: ('mm_uic4', read_mm_uic4),
34361: ('mm_header', read_mm_header),
34362: ('mm_stamp', read_mm_stamp),
34386: ('mm_user_block', None),
34412: ('cz_lsm_info', read_cz_lsm_info),
43314: ('nih_image_header', read_nih_image_header)}
# Max line length of printed output
PRINT_LINE_LEN = 79
def imshow(data, title=None, isrgb=True, vmin=0, vmax=None,
cmap=None, photometric='rgb', interpolation='bilinear',
dpi=96, figure=None, subplot=111, maxdim=4096, **kwargs):
"""Plot n-dimensional images using matplotlib.pyplot.
Return figure, subplot and plot axis.
Requires pyplot already imported ``from matplotlib import pyplot``.
Arguments
---------
isrgb : bool
If True, data will be displayed as RGB(A) images if possible.
photometric : str
'miniswhite', 'minisblack', 'rgb', or 'palette'
title : str
Window and subplot title.
figure : a matplotlib.figure.Figure instance (optional).
subplot : int
A matplotlib.pyplot.subplot axis.
maxdim: int
maximum image size in any dimension.
Other arguments are same as for matplotlib.pyplot.imshow.
"""
if photometric not in ('miniswhite', 'minisblack', 'rgb', 'palette'):
raise ValueError("Can't handle %s photometrics" % photometric)
data = data.squeeze()
data = data[(slice(0, maxdim), ) * len(data.shape)]
dims = len(data.shape)
if dims < 2:
raise ValueError("not an image")
if dims == 2:
dims = 0
isrgb = False
else:
if (isrgb and data.shape[-3] in (3, 4)):
data = numpy.swapaxes(data, -3, -2)
data = numpy.swapaxes(data, -2, -1)
elif (not isrgb and data.shape[-1] in (3, 4)):
data = numpy.swapaxes(data, -3, -1)
data = numpy.swapaxes(data, -2, -1)
isrgb = isrgb and data.shape[-1] in (3, 4)
dims -= 3 if isrgb else 2
datamax = data.max()
if data.dtype in (numpy.int8, numpy.int16, numpy.int32,
numpy.uint8, numpy.uint16, numpy.uint32):
for bits in (1, 2, 4, 6, 8, 10, 12, 14, 16, 24, 32):
if datamax <= 2**bits:
datamax = 2**bits
break
if isrgb:
data *= (255.0 / datamax) # better use digitize()
data = data.astype('B')
elif isrgb:
data /= datamax
if not isrgb and vmax is None:
vmax = datamax
pyplot = sys.modules['matplotlib.pyplot']
if figure is None:
pyplot.rc('font', family='sans-serif', weight='normal', size=8)
figure = pyplot.figure(dpi=dpi, figsize=(10.3, 6.3), frameon=True,
facecolor='1.0', edgecolor='w')
try:
figure.canvas.manager.window.title(title)
except Exception:
pass
pyplot.subplots_adjust(bottom=0.03*(dims+2), top=0.925,
left=0.1, right=0.95, hspace=0.05, wspace=0.0)
subplot = pyplot.subplot(subplot)
if title:
pyplot.title(title, size=11)
if cmap is None:
if photometric == 'miniswhite':
cmap = pyplot.cm.binary
else:
cmap = pyplot.cm.gray
image = pyplot.imshow(data[(0, ) * dims].squeeze(), vmin=vmin, vmax=vmax,
cmap=cmap, interpolation=interpolation, **kwargs)
if not isrgb:
pyplot.colorbar() # panchor=(0.55, 0.5), fraction=0.05)
def format_coord(x, y):
"""Callback to format coordinate display in toolbar."""
x = int(x + 0.5)
y = int(y + 0.5)
try:
if dims:
return "%s @ %s [%4i, %4i]" % (cur_ax_dat[1][y, x],
current, x, y)
else:
return "%s @ [%4i, %4i]" % (data[y, x], x, y)
except IndexError:
return ""
pyplot.gca().format_coord = format_coord
if dims:
current = list((0, ) * dims)
cur_ax_dat = [0, data[tuple(current)].squeeze()]
sliders = [pyplot.Slider(
pyplot.axes([0.125, 0.03*(axis+1), 0.725, 0.025]),
'Dimension %i' % axis, 0, data.shape[axis]-1, 0, facecolor='0.5',
valfmt='%%.0f of %i' % data.shape[axis]) for axis in range(dims)]
for slider in sliders:
slider.drawon = False
def set_image(current, sliders=sliders, data=data):
"""Change image and redraw canvas."""
cur_ax_dat[1] = data[tuple(current)].squeeze()
image.set_data(cur_ax_dat[1])
for ctrl, index in zip(sliders, current):
ctrl.eventson = False
ctrl.set_val(index)
ctrl.eventson = True
figure.canvas.draw()
def on_changed(index, axis, data=data, image=image, figure=figure,
current=current):
"""Callback for slider change event."""
index = int(round(index))
cur_ax_dat[0] = axis
if index == current[axis]:
return
if index >= data.shape[axis]:
index = 0
elif index < 0:
index = data.shape[axis] - 1
current[axis] = index
set_image(current)
def on_keypressed(event, data=data, current=current):
"""Callback for key press event."""
key = event.key
axis = cur_ax_dat[0]
if str(key) in '0123456789':
on_changed(key, axis)
elif key == 'right':
on_changed(current[axis] + 1, axis)
elif key == 'left':
on_changed(current[axis] - 1, axis)
elif key == 'up':
cur_ax_dat[0] = 0 if axis == len(data.shape)-1 else axis + 1
elif key == 'down':
cur_ax_dat[0] = len(data.shape)-1 if axis == 0 else axis - 1
elif key == 'end':
on_changed(data.shape[axis] - 1, axis)
elif key == 'home':
on_changed(0, axis)
figure.canvas.mpl_connect('key_press_event', on_keypressed)
for axis, ctrl in enumerate(sliders):
ctrl.on_changed(lambda k, a=axis: on_changed(k, a))
return figure, subplot, image
def main(argv=None):
"""Command line usage main function."""
if float(sys.version[0:3]) < 2.6:
print("This script requires Python version 2.6 or better.")
print("This is Python version %s" % sys.version)
return 0
if argv is None:
argv = sys.argv
import re
import optparse
search_doc = lambda r, d: re.search(r, __doc__).group(1) if __doc__ else d
parser = optparse.OptionParser(
usage="usage: %prog [options] path",
description=search_doc("\n\n([^|]*?)\n\n", ''),
version="%%prog %s" % search_doc(":Version: (.*)", "Unknown"))
opt = parser.add_option
opt('-p', '--page', dest='page', type='int', default=-1,
help="display single page")
opt('--noplot', dest='noplot', action='store_true', default=False,
help="don't display images")
opt('--norgb', dest='norgb', action='store_true', default=False,
help="don't try display as RGB(A) color images")
opt('--nocolmap', dest='nocolmap', action='store_true', default=False,
help="don't apply color mapping to paletted images")
opt('--interpol', dest='interpol', metavar='INTERPOL', default='bilinear',
help="image interpolation method")
opt('--dpi', dest='dpi', type='int', default=96,
help="set plot resolution")
opt('--test', dest='test', action='store_true', default=False,
help="try read all images in path")
opt('--doctest', dest='doctest', action='store_true', default=False,
help="runs the internal tests")
opt('-v', '--verbose', dest='verbose', action='store_true', default=True)
opt('-q', '--quiet', dest='verbose', action='store_false')
settings, path = parser.parse_args()
path = ' '.join(path)
if settings.doctest:
import doctest
doctest.testmod()
return 0
if not path:
parser.error("No file specified")
if settings.test:
test_tifffile(path, settings.verbose)
return 0
print("Reading file structure...", end=' ')
start = time.time()
tif = TIFFfile(path)
print("%.3f ms" % ((time.time()-start) * 1e3))
img = None
if not settings.noplot:
print("Reading image data... ", end=' ')
start = time.time()
try:
if settings.page < 0:
img = tif.asarray(colormapped=not settings.nocolmap,
rgbonly=not settings.norgb)
else:
img = tif[settings.page].asarray(rgbonly=not settings.norgb,
colormapped=not settings.nocolmap)
print("%.3f ms" % ((time.time()-start) * 1e3))
except ValueError as e:
print(e)
# raise
tif.close()
print("\nTIFF file:", tif)
page = 0 if settings.page < 0 else settings.page
print("\nPAGE %i:" % page, tif[page])
page = tif[page]
print(page.tags)
if page.is_palette:
print("\nColor Map:", page.color_map.shape, page.color_map.dtype)
for attr in ('cz_lsm_info', 'cz_lsm_scan_information',
'mm_uic_tags', 'mm_header', 'nih_image_header'):
if hasattr(page, attr):
print("", attr.upper(), "", Record(getattr(page, attr)), sep="\n")
if img is not None and not settings.noplot:
try:
import matplotlib
matplotlib.use('TkAgg')
from matplotlib import pyplot
except ImportError as e:
warnings.warn("Failed to import matplotlib.\n%s" % e)
else:
imshow(img, title=', '.join((str(tif), str(tif[0]))),
photometric=page.photometric,
interpolation=settings.interpol,
dpi=settings.dpi, isrgb=not settings.norgb)
pyplot.show()
# Documentation in HTML format can be generated with Epydoc
__docformat__ = "restructuredtext en"
if __name__ == "__main__":
sys.exit(main())
|
AVBelyy/concrete-java
|
services/src/main/java/edu/jhu/hlt/concrete/services/store/StoreServiceWrapper.java
|
<gh_stars>10-100
/*
*
*/
package edu.jhu.hlt.concrete.services.store;
import org.apache.thrift.TException;
import org.apache.thrift.TProcessorFactory;
import org.apache.thrift.protocol.TCompactProtocol;
import org.apache.thrift.server.TNonblockingServer;
import org.apache.thrift.server.TServer;
import org.apache.thrift.transport.TFramedTransport;
import org.apache.thrift.transport.TNonblockingServerSocket;
import org.apache.thrift.transport.TNonblockingServerTransport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import edu.jhu.hlt.concrete.access.StoreCommunicationService;
import edu.jhu.hlt.concrete.access.StoreCommunicationService.Iface;
/**
*
*/
public class StoreServiceWrapper implements AutoCloseable, Runnable {
private static final Logger LOGGER = LoggerFactory.getLogger(StoreServiceWrapper.class);
private final TNonblockingServerTransport serverXport;
private final TServer server;
private final TNonblockingServer.Args servArgs;
public StoreServiceWrapper(StoreCommunicationService.Iface impl, int port) throws TException {
this.serverXport = new TNonblockingServerSocket(port);
final TNonblockingServer.Args args = new TNonblockingServer.Args(this.serverXport);
args.protocolFactory(new TCompactProtocol.Factory());
final TFramedTransport.Factory transFactory = new TFramedTransport.Factory(Integer.MAX_VALUE);
args.transportFactory(transFactory);
StoreCommunicationService.Processor<Iface> proc = new StoreCommunicationService.Processor<>(impl);
args.processorFactory(new TProcessorFactory(proc));
args.maxReadBufferBytes = Long.MAX_VALUE;
this.servArgs = args;
this.server = new TNonblockingServer(this.servArgs);
}
@Override
public void run() {
this.server.serve();
LOGGER.debug("Server is serving.");
}
@Override
public void close() {
LOGGER.debug("Preparing to stop.");
this.server.stop();
LOGGER.debug("Preparing to close.");
this.serverXport.close();
}
}
|
jonggyup/RequestOrganizer
|
Code/drivers/gpu/drm/virtio/virtgpu_ioctl.c
|
/*
* Copyright (C) 2015 Red Hat, Inc.
* All Rights Reserved.
*
* Authors:
* <NAME>
* <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
#include <drm/drmP.h>
#include <drm/virtgpu_drm.h>
#include <drm/ttm/ttm_execbuf_util.h>
#include "virtgpu_drv.h"
static void convert_to_hw_box(struct virtio_gpu_box *dst,
const struct drm_virtgpu_3d_box *src)
{
dst->x = cpu_to_le32(src->x);
dst->y = cpu_to_le32(src->y);
dst->z = cpu_to_le32(src->z);
dst->w = cpu_to_le32(src->w);
dst->h = cpu_to_le32(src->h);
dst->d = cpu_to_le32(src->d);
}
static int virtio_gpu_map_ioctl(struct drm_device *dev, void *data,
struct drm_file *file_priv)
{
struct virtio_gpu_device *vgdev = dev->dev_private;
struct drm_virtgpu_map *virtio_gpu_map = data;
return virtio_gpu_mode_dumb_mmap(file_priv, vgdev->ddev,
virtio_gpu_map->handle,
&virtio_gpu_map->offset);
}
static int virtio_gpu_object_list_validate(struct ww_acquire_ctx *ticket,
struct list_head *head)
{
struct ttm_operation_ctx ctx = { false, false };
struct ttm_validate_buffer *buf;
struct ttm_buffer_object *bo;
struct virtio_gpu_object *qobj;
int ret;
ret = ttm_eu_reserve_buffers(ticket, head, true, NULL);
if (ret != 0)
return ret;
list_for_each_entry(buf, head, head) {
bo = buf->bo;
qobj = container_of(bo, struct virtio_gpu_object, tbo);
ret = ttm_bo_validate(bo, &qobj->placement, &ctx);
if (ret) {
ttm_eu_backoff_reservation(ticket, head);
return ret;
}
}
return 0;
}
static void virtio_gpu_unref_list(struct list_head *head)
{
struct ttm_validate_buffer *buf;
struct ttm_buffer_object *bo;
struct virtio_gpu_object *qobj;
list_for_each_entry(buf, head, head) {
bo = buf->bo;
qobj = container_of(bo, struct virtio_gpu_object, tbo);
drm_gem_object_put_unlocked(&qobj->gem_base);
}
}
/*
* Usage of execbuffer:
* Relocations need to take into account the full VIRTIO_GPUDrawable size.
* However, the command as passed from user space must *not* contain the initial
* VIRTIO_GPUReleaseInfo struct (first XXX bytes)
*/
static int virtio_gpu_execbuffer_ioctl(struct drm_device *dev, void *data,
struct drm_file *drm_file)
{
struct drm_virtgpu_execbuffer *exbuf = data;
struct virtio_gpu_device *vgdev = dev->dev_private;
struct virtio_gpu_fpriv *vfpriv = drm_file->driver_priv;
struct drm_gem_object *gobj;
struct virtio_gpu_fence *fence;
struct virtio_gpu_object *qobj;
int ret;
uint32_t *bo_handles = NULL;
void __user *user_bo_handles = NULL;
struct list_head validate_list;
struct ttm_validate_buffer *buflist = NULL;
int i;
struct ww_acquire_ctx ticket;
void *buf;
if (vgdev->has_virgl_3d == false)
return -ENOSYS;
INIT_LIST_HEAD(&validate_list);
if (exbuf->num_bo_handles) {
bo_handles = kvmalloc_array(exbuf->num_bo_handles,
sizeof(uint32_t), GFP_KERNEL);
buflist = kvmalloc_array(exbuf->num_bo_handles,
sizeof(struct ttm_validate_buffer),
GFP_KERNEL | __GFP_ZERO);
if (!bo_handles || !buflist) {
kvfree(bo_handles);
kvfree(buflist);
return -ENOMEM;
}
user_bo_handles = (void __user *)(uintptr_t)exbuf->bo_handles;
if (copy_from_user(bo_handles, user_bo_handles,
exbuf->num_bo_handles * sizeof(uint32_t))) {
ret = -EFAULT;
kvfree(bo_handles);
kvfree(buflist);
return ret;
}
for (i = 0; i < exbuf->num_bo_handles; i++) {
gobj = drm_gem_object_lookup(drm_file, bo_handles[i]);
if (!gobj) {
kvfree(bo_handles);
kvfree(buflist);
return -ENOENT;
}
qobj = gem_to_virtio_gpu_obj(gobj);
buflist[i].bo = &qobj->tbo;
list_add(&buflist[i].head, &validate_list);
}
kvfree(bo_handles);
}
ret = virtio_gpu_object_list_validate(&ticket, &validate_list);
if (ret)
goto out_free;
buf = memdup_user((void __user *)(uintptr_t)exbuf->command,
exbuf->size);
if (IS_ERR(buf)) {
ret = PTR_ERR(buf);
goto out_unresv;
}
virtio_gpu_cmd_submit(vgdev, buf, exbuf->size,
vfpriv->ctx_id, &fence);
ttm_eu_fence_buffer_objects(&ticket, &validate_list, &fence->f);
/* fence the command bo */
virtio_gpu_unref_list(&validate_list);
kvfree(buflist);
dma_fence_put(&fence->f);
return 0;
out_unresv:
ttm_eu_backoff_reservation(&ticket, &validate_list);
out_free:
virtio_gpu_unref_list(&validate_list);
kvfree(buflist);
return ret;
}
static int virtio_gpu_getparam_ioctl(struct drm_device *dev, void *data,
struct drm_file *file_priv)
{
struct virtio_gpu_device *vgdev = dev->dev_private;
struct drm_virtgpu_getparam *param = data;
int value;
switch (param->param) {
case VIRTGPU_PARAM_3D_FEATURES:
value = vgdev->has_virgl_3d == true ? 1 : 0;
break;
case VIRTGPU_PARAM_CAPSET_QUERY_FIX:
value = 1;
break;
default:
return -EINVAL;
}
if (copy_to_user((void __user *)(unsigned long)param->value,
&value, sizeof(int))) {
return -EFAULT;
}
return 0;
}
static int virtio_gpu_resource_create_ioctl(struct drm_device *dev, void *data,
struct drm_file *file_priv)
{
struct virtio_gpu_device *vgdev = dev->dev_private;
struct drm_virtgpu_resource_create *rc = data;
int ret;
uint32_t res_id;
struct virtio_gpu_object *qobj;
struct drm_gem_object *obj;
uint32_t handle = 0;
uint32_t size;
struct list_head validate_list;
struct ttm_validate_buffer mainbuf;
struct virtio_gpu_fence *fence = NULL;
struct ww_acquire_ctx ticket;
struct virtio_gpu_resource_create_3d rc_3d;
if (vgdev->has_virgl_3d == false) {
if (rc->depth > 1)
return -EINVAL;
if (rc->nr_samples > 1)
return -EINVAL;
if (rc->last_level > 1)
return -EINVAL;
if (rc->target != 2)
return -EINVAL;
if (rc->array_size > 1)
return -EINVAL;
}
INIT_LIST_HEAD(&validate_list);
memset(&mainbuf, 0, sizeof(struct ttm_validate_buffer));
virtio_gpu_resource_id_get(vgdev, &res_id);
size = rc->size;
/* allocate a single page size object */
if (size == 0)
size = PAGE_SIZE;
qobj = virtio_gpu_alloc_object(dev, size, false, false);
if (IS_ERR(qobj)) {
ret = PTR_ERR(qobj);
goto fail_id;
}
obj = &qobj->gem_base;
if (!vgdev->has_virgl_3d) {
virtio_gpu_cmd_create_resource(vgdev, res_id, rc->format,
rc->width, rc->height);
ret = virtio_gpu_object_attach(vgdev, qobj, res_id, NULL);
} else {
/* use a gem reference since unref list undoes them */
drm_gem_object_get(&qobj->gem_base);
mainbuf.bo = &qobj->tbo;
list_add(&mainbuf.head, &validate_list);
ret = virtio_gpu_object_list_validate(&ticket, &validate_list);
if (ret) {
DRM_DEBUG("failed to validate\n");
goto fail_unref;
}
rc_3d.resource_id = cpu_to_le32(res_id);
rc_3d.target = cpu_to_le32(rc->target);
rc_3d.format = cpu_to_le32(rc->format);
rc_3d.bind = cpu_to_le32(rc->bind);
rc_3d.width = cpu_to_le32(rc->width);
rc_3d.height = cpu_to_le32(rc->height);
rc_3d.depth = cpu_to_le32(rc->depth);
rc_3d.array_size = cpu_to_le32(rc->array_size);
rc_3d.last_level = cpu_to_le32(rc->last_level);
rc_3d.nr_samples = cpu_to_le32(rc->nr_samples);
rc_3d.flags = cpu_to_le32(rc->flags);
virtio_gpu_cmd_resource_create_3d(vgdev, &rc_3d, NULL);
ret = virtio_gpu_object_attach(vgdev, qobj, res_id, &fence);
if (ret) {
ttm_eu_backoff_reservation(&ticket, &validate_list);
goto fail_unref;
}
ttm_eu_fence_buffer_objects(&ticket, &validate_list, &fence->f);
}
qobj->hw_res_handle = res_id;
ret = drm_gem_handle_create(file_priv, obj, &handle);
if (ret) {
drm_gem_object_release(obj);
if (vgdev->has_virgl_3d) {
virtio_gpu_unref_list(&validate_list);
dma_fence_put(&fence->f);
}
return ret;
}
drm_gem_object_put_unlocked(obj);
rc->res_handle = res_id; /* similiar to a VM address */
rc->bo_handle = handle;
if (vgdev->has_virgl_3d) {
virtio_gpu_unref_list(&validate_list);
dma_fence_put(&fence->f);
}
return 0;
fail_unref:
if (vgdev->has_virgl_3d) {
virtio_gpu_unref_list(&validate_list);
dma_fence_put(&fence->f);
}
//fail_obj:
// drm_gem_object_handle_unreference_unlocked(obj);
fail_id:
virtio_gpu_resource_id_put(vgdev, res_id);
return ret;
}
static int virtio_gpu_resource_info_ioctl(struct drm_device *dev, void *data,
struct drm_file *file_priv)
{
struct drm_virtgpu_resource_info *ri = data;
struct drm_gem_object *gobj = NULL;
struct virtio_gpu_object *qobj = NULL;
gobj = drm_gem_object_lookup(file_priv, ri->bo_handle);
if (gobj == NULL)
return -ENOENT;
qobj = gem_to_virtio_gpu_obj(gobj);
ri->size = qobj->gem_base.size;
ri->res_handle = qobj->hw_res_handle;
drm_gem_object_put_unlocked(gobj);
return 0;
}
static int virtio_gpu_transfer_from_host_ioctl(struct drm_device *dev,
void *data,
struct drm_file *file)
{
struct virtio_gpu_device *vgdev = dev->dev_private;
struct virtio_gpu_fpriv *vfpriv = file->driver_priv;
struct drm_virtgpu_3d_transfer_from_host *args = data;
struct ttm_operation_ctx ctx = { true, false };
struct drm_gem_object *gobj = NULL;
struct virtio_gpu_object *qobj = NULL;
struct virtio_gpu_fence *fence;
int ret;
u32 offset = args->offset;
struct virtio_gpu_box box;
if (vgdev->has_virgl_3d == false)
return -ENOSYS;
gobj = drm_gem_object_lookup(file, args->bo_handle);
if (gobj == NULL)
return -ENOENT;
qobj = gem_to_virtio_gpu_obj(gobj);
ret = virtio_gpu_object_reserve(qobj, false);
if (ret)
goto out;
ret = ttm_bo_validate(&qobj->tbo, &qobj->placement, &ctx);
if (unlikely(ret))
goto out_unres;
convert_to_hw_box(&box, &args->box);
virtio_gpu_cmd_transfer_from_host_3d
(vgdev, qobj->hw_res_handle,
vfpriv->ctx_id, offset, args->level,
&box, &fence);
reservation_object_add_excl_fence(qobj->tbo.resv,
&fence->f);
dma_fence_put(&fence->f);
out_unres:
virtio_gpu_object_unreserve(qobj);
out:
drm_gem_object_put_unlocked(gobj);
return ret;
}
static int virtio_gpu_transfer_to_host_ioctl(struct drm_device *dev, void *data,
struct drm_file *file)
{
struct virtio_gpu_device *vgdev = dev->dev_private;
struct virtio_gpu_fpriv *vfpriv = file->driver_priv;
struct drm_virtgpu_3d_transfer_to_host *args = data;
struct ttm_operation_ctx ctx = { true, false };
struct drm_gem_object *gobj = NULL;
struct virtio_gpu_object *qobj = NULL;
struct virtio_gpu_fence *fence;
struct virtio_gpu_box box;
int ret;
u32 offset = args->offset;
gobj = drm_gem_object_lookup(file, args->bo_handle);
if (gobj == NULL)
return -ENOENT;
qobj = gem_to_virtio_gpu_obj(gobj);
ret = virtio_gpu_object_reserve(qobj, false);
if (ret)
goto out;
ret = ttm_bo_validate(&qobj->tbo, &qobj->placement, &ctx);
if (unlikely(ret))
goto out_unres;
convert_to_hw_box(&box, &args->box);
if (!vgdev->has_virgl_3d) {
virtio_gpu_cmd_transfer_to_host_2d
(vgdev, qobj->hw_res_handle, offset,
box.w, box.h, box.x, box.y, NULL);
} else {
virtio_gpu_cmd_transfer_to_host_3d
(vgdev, qobj->hw_res_handle,
vfpriv ? vfpriv->ctx_id : 0, offset,
args->level, &box, &fence);
reservation_object_add_excl_fence(qobj->tbo.resv,
&fence->f);
dma_fence_put(&fence->f);
}
out_unres:
virtio_gpu_object_unreserve(qobj);
out:
drm_gem_object_put_unlocked(gobj);
return ret;
}
static int virtio_gpu_wait_ioctl(struct drm_device *dev, void *data,
struct drm_file *file)
{
struct drm_virtgpu_3d_wait *args = data;
struct drm_gem_object *gobj = NULL;
struct virtio_gpu_object *qobj = NULL;
int ret;
bool nowait = false;
gobj = drm_gem_object_lookup(file, args->handle);
if (gobj == NULL)
return -ENOENT;
qobj = gem_to_virtio_gpu_obj(gobj);
if (args->flags & VIRTGPU_WAIT_NOWAIT)
nowait = true;
ret = virtio_gpu_object_wait(qobj, nowait);
drm_gem_object_put_unlocked(gobj);
return ret;
}
static int virtio_gpu_get_caps_ioctl(struct drm_device *dev,
void *data, struct drm_file *file)
{
struct virtio_gpu_device *vgdev = dev->dev_private;
struct drm_virtgpu_get_caps *args = data;
unsigned size, host_caps_size;
int i;
int found_valid = -1;
int ret;
struct virtio_gpu_drv_cap_cache *cache_ent;
void *ptr;
if (vgdev->num_capsets == 0)
return -ENOSYS;
/* don't allow userspace to pass 0 */
if (args->size == 0)
return -EINVAL;
spin_lock(&vgdev->display_info_lock);
for (i = 0; i < vgdev->num_capsets; i++) {
if (vgdev->capsets[i].id == args->cap_set_id) {
if (vgdev->capsets[i].max_version >= args->cap_set_ver) {
found_valid = i;
break;
}
}
}
if (found_valid == -1) {
spin_unlock(&vgdev->display_info_lock);
return -EINVAL;
}
host_caps_size = vgdev->capsets[found_valid].max_size;
/* only copy to user the minimum of the host caps size or the guest caps size */
size = min(args->size, host_caps_size);
list_for_each_entry(cache_ent, &vgdev->cap_cache, head) {
if (cache_ent->id == args->cap_set_id &&
cache_ent->version == args->cap_set_ver) {
ptr = cache_ent->caps_cache;
spin_unlock(&vgdev->display_info_lock);
goto copy_exit;
}
}
spin_unlock(&vgdev->display_info_lock);
/* not in cache - need to talk to hw */
virtio_gpu_cmd_get_capset(vgdev, found_valid, args->cap_set_ver,
&cache_ent);
ret = wait_event_timeout(vgdev->resp_wq,
atomic_read(&cache_ent->is_valid), 5 * HZ);
if (!ret)
return -EBUSY;
/* is_valid check must proceed before copy of the cache entry. */
smp_rmb();
ptr = cache_ent->caps_cache;
copy_exit:
if (copy_to_user((void __user *)(unsigned long)args->addr, ptr, size))
return -EFAULT;
return 0;
}
struct drm_ioctl_desc virtio_gpu_ioctls[DRM_VIRTIO_NUM_IOCTLS] = {
DRM_IOCTL_DEF_DRV(VIRTGPU_MAP, virtio_gpu_map_ioctl,
DRM_AUTH | DRM_UNLOCKED | DRM_RENDER_ALLOW),
DRM_IOCTL_DEF_DRV(VIRTGPU_EXECBUFFER, virtio_gpu_execbuffer_ioctl,
DRM_AUTH | DRM_UNLOCKED | DRM_RENDER_ALLOW),
DRM_IOCTL_DEF_DRV(VIRTGPU_GETPARAM, virtio_gpu_getparam_ioctl,
DRM_AUTH | DRM_UNLOCKED | DRM_RENDER_ALLOW),
DRM_IOCTL_DEF_DRV(VIRTGPU_RESOURCE_CREATE,
virtio_gpu_resource_create_ioctl,
DRM_AUTH | DRM_UNLOCKED | DRM_RENDER_ALLOW),
DRM_IOCTL_DEF_DRV(VIRTGPU_RESOURCE_INFO, virtio_gpu_resource_info_ioctl,
DRM_AUTH | DRM_UNLOCKED | DRM_RENDER_ALLOW),
/* make transfer async to the main ring? - no sure, can we
* thread these in the underlying GL
*/
DRM_IOCTL_DEF_DRV(VIRTGPU_TRANSFER_FROM_HOST,
virtio_gpu_transfer_from_host_ioctl,
DRM_AUTH | DRM_UNLOCKED | DRM_RENDER_ALLOW),
DRM_IOCTL_DEF_DRV(VIRTGPU_TRANSFER_TO_HOST,
virtio_gpu_transfer_to_host_ioctl,
DRM_AUTH | DRM_UNLOCKED | DRM_RENDER_ALLOW),
DRM_IOCTL_DEF_DRV(VIRTGPU_WAIT, virtio_gpu_wait_ioctl,
DRM_AUTH | DRM_UNLOCKED | DRM_RENDER_ALLOW),
DRM_IOCTL_DEF_DRV(VIRTGPU_GET_CAPS, virtio_gpu_get_caps_ioctl,
DRM_AUTH | DRM_UNLOCKED | DRM_RENDER_ALLOW),
};
|
dmontoya1/cajas
|
cajas/movement/models/movement_don_juan.py
|
from django.db import models
from cajas.boxes.models.box_don_juan import BoxDonJuan
from .movement_mixin import MovementMixin
class MovementDonJuan(MovementMixin):
"""Modelo para guardar los movimientos de las cajas de Presidente
"""
box_don_juan = models.ForeignKey(
BoxDonJuan,
verbose_name='Caja Presidente Oficina',
on_delete=models.SET_NULL,
blank=True, null=True,
related_name='movements'
)
movement_box_colombia = models.IntegerField(
"Movimiento Caja Colombia",
blank=True,
null=True
)
movement_office = models.IntegerField(
"Movimiento Oficina PK",
blank=True,
null=True
)
movement_don_juan_usd = models.IntegerField(
"Movimiento Presidente Dólares PK",
null=True,
blank=True
)
def __str__(self):
if self.box_don_juan.office is not None:
return "Movimiento de la caja de {} de Presidente".format(self.box_don_juan.office)
return "Movimiento de la caja de Presidente"
class Meta:
verbose_name = 'Movimiento de Presidente'
verbose_name_plural = 'Movimientos de Presidente'
ordering = ['-date', '-pk']
|
folio-org/mod-marccat
|
src/main/java/org/folio/marccat/business/common/group/RegExpGroup.java
|
package org.folio.marccat.business.common.group;
import org.apache.regexp.RE;
import org.folio.marccat.business.cataloguing.common.Tag;
public class RegExpGroup extends MarcTagGroup {
private RE regexp;
public RegExpGroup(boolean canSort, boolean singleSort, String pattern) {
super(canSort, singleSort);
regexp = new RE(pattern);
}
public boolean contains(Tag t) {
return regexp.match(t.getMarcEncoding().getMarcTag());
}
}
|
moissinac/csvfix
|
csvfix/src/csved_util.cpp
|
//---------------------------------------------------------------------------
// csved_util.cpp
//
// utilities for CSVED
//
// Copyright (C) 2009 <NAME>
//---------------------------------------------------------------------------
#include "a_base.h"
#include "a_env.h"
#include "a_str.h"
#include "a_collect.h"
#include "csved_util.h"
#include "csved_except.h"
using std::string;
using std::vector;
namespace CSVED {
//---------------------------------------------------------------------------
// Convert comma list to vector of ints to be used as col index. We now allow
// ranges in the form n1:n2.
//---------------------------------------------------------------------------
static void MakeAscending( int n1, int n2, FieldList & fl ) {
while( n1 <= n2 ) {
fl.push_back( n1 - 1 );
n1++;
}
}
static void MakeDescending( int n1, int n2, FieldList & fl ) {
while( n1 >= n2 ) {
fl.push_back( n1 - 1 );
n1--;
}
}
void CommaListToIndex( const ALib::CommaList & cl,
FieldList & idx ) {
idx.clear();
for ( unsigned int i = 0; i < cl.Size(); i++ ) {
string cle = cl.At(i);
vector <string> fl;
ALib::Split( cle, ':', fl );
if ( fl.size() > 2 ) {
CSVTHROW( "Invalid field: " << cle );
}
else if ( fl.size() == 2 ) {
if ( ! (ALib::IsInteger(fl[0]) && ALib::IsInteger(fl[1])) ) {
CSVTHROW( "Invalid range: " << cle );
}
int n1 = ALib::ToInteger( fl[0] );
int n2 = ALib::ToInteger( fl[1] );
if ( n1 < 1 || n2 < 1 ) {
CSVTHROW( "Invalid range: " << cle );
}
if ( n1 < n2 ) {
MakeAscending( n1, n2, idx );
}
else {
MakeDescending( n1, n2, idx );
}
}
else {
if ( ! ALib::IsInteger( cle ) ) {
CSVTHROW( "Need integer, not '" << cle << "'" );
}
int n = ALib::ToInteger( cle );
if ( n < 1 ) {
CSVTHROW( "Index must be greater than zero, not '" << cle << "'" );
}
idx.push_back( n - 1 ); // convert to zero-based
}
}
}
//---------------------------------------------------------------------------
// Compare two CSV rows - return as for strcmp. If field list is provided,
// compare only fields in list.
//----------------------------------------------------------------------------
int CmpRow( const CSVRow & a, const CSVRow & b, const FieldList & f ) {
unsigned int n = std::max( a.size(), b.size() );
for ( unsigned int i = 0; i < n; i++ ) {
if ( f.size() && ! ALib::Contains( f, i ) ) {
continue;
}
string fa = GetField( a, i );
string fb = GetField( b, i );
if ( fa == fb ) {
continue;
}
else if ( fa < fb ) {
return -1;
}
else {
return 1;
}
}
return 0;
}
//----------------------------------------------------------------------------
// Get field or empty string if field does not exist
//----------------------------------------------------------------------------
std::string GetField( const CSVRow & row, unsigned int i ) {
return i >= row.size() ? "" : row[i];
}
//----------------------------------------------------------------------------
// Check that not both of two flags are specified, and throw if they are.
//----------------------------------------------------------------------------
void NotBoth( const ALib::CommandLine & cmd, const std::string & a,
const std::string & b, ReqOp r ) {
if ( cmd.HasFlag( a ) && cmd.HasFlag( b ) ) {
CSVTHROW( "Cannot specify both " << a << " and " << b << " options");
}
if ( r == ReqOp::Required && ! ( cmd.HasFlag( a ) || cmd.HasFlag(b) ) ) {
CSVTHROW( "Need one of " << a << " or " << b << " options" );
}
}
//---------------------------------------------------------------------------
} // end namespace
// end
|
kevcadieux/Sprout
|
sprout/compost/formats/as_imag.hpp
|
/*=============================================================================
Copyright (c) 2011-2019 <NAME>
https://github.com/bolero-MURAKAMI/Sprout
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
=============================================================================*/
#ifndef SPROUT_COMPOST_FORMATS_AS_IMAG_HPP
#define SPROUT_COMPOST_FORMATS_AS_IMAG_HPP
#include <utility>
#include <sprout/config.hpp>
#include <sprout/complex.hpp>
#include <sprout/utility/forward.hpp>
#include <sprout/range/adaptor/transformed.hpp>
namespace sprout {
namespace compost {
//
// to_imag_value
//
struct to_imag_value {
public:
template<typename Complex>
SPROUT_CONSTEXPR decltype(imag(std::declval<Complex const&>())) operator()(Complex const& x) const {
return imag(x);
}
};
namespace formats {
//
// as_imag_forwarder
//
class as_imag_forwarder {};
//
// as_imag
//
namespace {
SPROUT_STATIC_CONSTEXPR sprout::compost::formats::as_imag_forwarder as_imag = {};
} // anonymous-namespace
//
// operator|
//
template<typename Range>
inline SPROUT_CONSTEXPR auto
operator|(Range&& lhs, sprout::compost::formats::as_imag_forwarder const&)
-> decltype(
SPROUT_FORWARD(Range, lhs)
| sprout::adaptors::transformed(sprout::compost::to_imag_value())
)
{
return SPROUT_FORWARD(Range, lhs)
| sprout::adaptors::transformed(sprout::compost::to_imag_value())
;
}
} // namespace formats
using sprout::compost::formats::as_imag;
} // namespace compost
} // namespace sprout
#endif // #ifndef SPROUT_COMPOST_FORMATS_AS_IMAG_HPP
|
lizeidsness/minimalist
|
app/@esri/calcite-ui-icons/js/caretUp24F.js
|
<gh_stars>1-10
export const caretUp24F = "M1.5 16l11-11 11 11z";
|
mikatammi/georap
|
client/js/components/Location/Events/Model.js
|
<filename>client/js/components/Location/Events/Model.js
// This module is responsible for manipulating raw events array of location.
var emitter = require('component-emitter');
module.exports = function (rawEvents, location) {
// Parameters:
// events array of the raw location.
var self = this;
emitter(self);
// Bind
location.on('location_event_created', function (ev) {
// Most recent is topmost
rawEvents.unshift(ev);
// For view
self.emit('location_event_created');
});
// Public methods
self.getLocation = function () {
// Return location model
return location;
};
self.toRawArray = function () {
// Get raw events as array.
return rawEvents;
};
};
|
vnaveen0/nachos
|
mem-axc-64/apps/macsim.r.d.b.s/macsim-mem-axc-64/gems-lib-ooo/ruby_clean/network/garnet-fixed-pipeline/Router_d.h
|
<gh_stars>1-10
/*
Copyright (C) 1999-2008 by <NAME> and <NAME> for the
Wisconsin Multifacet Project. Contact: <EMAIL>
http://www.cs.wisc.edu/gems/
--------------------------------------------------------------------
This file is part of Garnet (Princeton's interconnect model),
a component of the Multifacet GEMS (General Execution-driven
Multiprocessor Simulator) software toolset originally developed at
the University of Wisconsin-Madison.
Garnet was developed by Niket Agarwal at Princeton University. Orion was
developed by Princeton University.
Substantial further development of Multifacet GEMS at the
University of Wisconsin was performed by <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>,
<NAME>, <NAME>, <NAME>, <NAME>, <NAME>,
<NAME>, <NAME>, <NAME>, <NAME>, <NAME>,
<NAME>, and <NAME>.
--------------------------------------------------------------------
If your use of this software contributes to a published paper, we
request that you (1) cite our summary paper that appears on our
website (http://www.cs.wisc.edu/gems/) and (2) e-mail a citation
for your published paper to <EMAIL>.
If you redistribute derivatives of this software, we request that
you notify us and either (1) ask people to register with us at our
website (http://www.cs.wisc.edu/gems/) or (2) collect registration
information and periodically send it to us.
--------------------------------------------------------------------
Multifacet GEMS is free software; you can redistribute it and/or
modify it under the terms of version 2 of the GNU General Public
License as published by the Free Software Foundation.
Multifacet GEMS is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with the Multifacet GEMS; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
02111-1307, USA
The GNU General Public License is contained in the file LICENSE.
### END HEADER ###
*/
/*
* Router_d.h
*
* <NAME>, Princeton University
*
* */
#ifndef ROUTER_D_H
#define ROUTER_D_H
#include "NetworkHeader.h"
#include "Vector.h"
#include "flit_d.h"
#include "NetDest.h"
#include "power_router_init.h"
class GarnetNetwork_d;
class NetworkLink_d;
class CreditLink_d;
class InputUnit_d;
class OutputUnit_d;
class RoutingUnit_d;
class VCallocator_d;
class SWallocator_d;
class Switch_d;
class Router_d {
public:
Router_d(int id, GarnetNetwork_d *network_ptr);
~Router_d();
void init();
void addInPort(NetworkLink_d *link, CreditLink_d *credit_link);
void addOutPort(NetworkLink_d *link, const NetDest& routing_table_entry, int link_weight, CreditLink_d *credit_link);
int get_num_vcs() { return m_num_vcs; }
int get_vc_per_vnet() {return m_vc_per_vnet; }
int get_num_inports() { return m_input_unit.size(); }
int get_num_outports() { return m_output_unit.size(); }
void printConfig(ostream& out);
int get_id() { return m_id; }
GarnetNetwork_d* get_net_ptr() { return m_network_ptr; }
Vector<InputUnit_d *>& get_inputUnit_ref() { return m_input_unit; }
Vector<OutputUnit_d *>& get_outputUnit_ref() { return m_output_unit; }
void update_sw_winner(int inport, flit_d *t_flit);
void update_incredit(int in_port, int in_vc, int credit);
void route_req(flit_d *t_flit, InputUnit_d* in_unit, int invc);
void vcarb_req();
void swarb_req();
void power_router_initialize(power_router *router, power_router_info *info);
double calculate_power();
double calculate_offline_power(power_router*, power_router_info*);
void calculate_performance_numbers();
private:
int m_id;
int m_virtual_networks, m_num_vcs, m_vc_per_vnet;
GarnetNetwork_d *m_network_ptr;
int m_flit_width;
double buf_read_count, buf_write_count, crossbar_count, vc_local_arbit_count, vc_global_arbit_count, sw_local_arbit_count, sw_global_arbit_count;
Vector<InputUnit_d *> m_input_unit;
Vector<OutputUnit_d *> m_output_unit;
RoutingUnit_d *m_routing_unit;
VCallocator_d *m_vc_alloc;
SWallocator_d *m_sw_alloc;
Switch_d *m_switch;
};
#endif
|
vyscond/android-experiments
|
GridLayoutTest/src/fire/bolt/gridlayouttest/SourceCode.java
|
<reponame>vyscond/android-experiments<filename>GridLayoutTest/src/fire/bolt/gridlayouttest/SourceCode.java
package fire.bolt.gridlayouttest;
import android.app.Activity;
import android.graphics.Color;
import android.os.Bundle;
import android.view.Menu;
import android.widget.Button;
import android.widget.EditText;
import android.widget.GridLayout;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.LinearLayout.LayoutParams;
public class SourceCode extends Activity {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_source_code);
GridLayout gl = (GridLayout) findViewById(R.id.GridLayout1);
EditText source = new EditText(this);
source.setText("LinearLayout ll = new LinearLayout(this);"+"\n"+
"ll.setOrientation(LinearLayout.VERTICAL);"+"\n"+
"Button b = new Button(this);"+"\n"+
"b.setText(textView_value);"+"\n"+
"TextView tv = new TextView(this);"+"\n"+
"tv.setText(button_value);"+"\n"+
"ll.addView(tv);"+"\n"+
"ll.addView(b);"+"\n"+
//LinearLayout.LayoutParams params = (LayoutParams) LinearLayout.setLayoutParams(LayoutParams.MATCH_PARENT,LayoutParams.MATCH_PARENT);
"LinearLayout.LayoutParams params = new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);"+"\n"+
"ll.setLayoutParams(params);"+"\n"+
"if( swp_color == Color.BLACK )"+"\n"+
"{"+"\n"+
"swp_color = Color.DKGRAY;"+"\n"+
"}"+"\n"+
"else"+"\n"+
"{"+"\n"+
"swp_color = Color.BLACK;"+"\n"+
"}"+"\n"+
"ll.setBackgroundColor(swp_color);"+"\n"+
"return ll;"+"\n");
gl.addView(source);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.activity_source_code, menu);
return true;
}
}
|
yffd/easy-parent
|
easy-uumc/easy-uumc-service/src/main/java/com/yffd/easy/uumc/service/UumcSysApplicationService.java
|
package com.yffd.easy.uumc.service;
import org.springframework.stereotype.Service;
import com.yffd.easy.uumc.pojo.entity.UumcSysApplicationEntity;
/**
* @Description 简单描述该类的功能(可选).
* @Date 2018年06月11日 15时19分27秒 <br/>
* @author ZhangST <br/>
* @version 1.0
* @since JDK 1.7+
* @see
*/
@Service
public class UumcSysApplicationService extends UupmBaseService<UumcSysApplicationEntity> {
}
|
chenqwwq/_leetcode
|
src/basis/math/Gcd.java
|
package top.chenqwwq.basis.math;
import javax.xml.crypto.dsig.spec.XSLTTransformParameterSpec;
/**
* 求最大公约数
*
* @author chen
* @date 2022-02-10
**/
public class Gcd {
/**
* 穷举法
*/
public static int gcd_exhaustively(int a,int b){
for (int i = 2;i < Math.min(a,b);i++){
if(a % i == 0 && b % i == 0){
return i;
}
}
return 1;
}
/**
* 辗转相除法
*/
public static int gcd_division_recursive(int a,int b){
return b == 0 ? a : gcd_division_recursive(b,a % b);
}
}
|
hao-wang/Montage
|
js-test-suite/testsuite/7eba994776dc4e051c7f119db3f89cd8.js
|
<reponame>hao-wang/Montage
load("fcfbc86708bc3a4062c2091a062e13b6.js");
load("faa81dc98fc13338aca921d45eebae79.js");
load("68b329da9893e34099c7d8ad5cb9c940.js");
// Copyright 2013 Mozilla Corporation. All rights reserved.
// This code is governed by the license found in the LICENSE file.
/**
* @description Tests that Intl has Object.prototype as its prototype.
* @author <NAME>
*/
if (Object.getPrototypeOf(Intl) !== Object.prototype) {
$ERROR("Intl doesn't have Object.prototype as its prototype.");
}
|
scscgit/XchangeCrypt-AndroidClient
|
app/src/main/java/bit/xchangecrypt/client/datamodel/ContentCacheType.java
|
package bit.xchangecrypt.client.datamodel;
/**
* Created by Peter on 05.05.2018.
*/
public enum ContentCacheType {
INSTRUMENTS,
ACCOUNT_ORDERS,
ACCOUNT_ORDER_HISTORY,
ACCOUNT_TRANSACTION_HISTORY,
COINS_BALANCE,
}
|
Timtendo12/FarCryDiscordBot
|
src/Commands/sps.js
|
const Command = require("../Structures/command.js");
const Discord = require("discord.js");
const config = require("../data/config.json");
const {MessageEmbed} = require("discord.js");
module.exports = new Command({
name: "bet",
description: "funny bet command",
async run(message, args, client) {
const noBetMessage = new MessageEmbed()
.setTitle(`Command usage:`)
.setColor('RED')
.setTimestamp()
.setDescription(`fc!bet ${config.stone}/${config.paper}/${config.scissors}`);
// playerBet = whatever he bets.
const playerBet = args.slice(1).join(" ")
if(!playerBet){
message.channel.send({embeds: [noBetMessage]});
}else{
//possible bets. Defined in config.
let options = [`${config.stone}`,`${config.paper}`,`${config.scissors}`];
let betStone = options[0]
let betPaper = options[1]
let betScissors = options[2]
//embeds
const winMessage = new MessageEmbed()
.setTitle(`you win you sussy baka`)
.setColor('GREEN')
.setTimestamp()
.setDescription(`Congrats!`);
const loseMessage = new MessageEmbed()
.setTitle(`YOU LOST I WON`)
.setColor('YELLOW')
.setTimestamp()
.setDescription(`Better luck next time!`);
const evenMessage = new MessageEmbed()
.setTitle(`OH NO! EVEN!`)
.setColor('YELLOW')
.setTimestamp()
.setDescription(`I'll destroy you next time!`);
//bot bet is randomized
const botBet = options[Math.floor(Math.random()*options.length)];
//checks if args[1] (format: prefixArgs[0] [args1]) contains something from the array.
if (options.some(word => args[1].includes(word))){
message.channel.send(`${botBet}`)
console.log(args[1]);
//checks if player uses stone and bot uses paper
//-----------------------------------------------------------------
// botBet 1 = stone | botBet 2 = paper | botBet 3 = scissors
//-----------------------------------------------------------------
//what to do if the player bets scissors
if (playerBet === betStone) {
console.log(playerBet)
//WTD if bot bets scissors
console.log(botBet);
if (botBet === betScissors){
console.log(botBet)
message.channel.send({ embeds: [winMessage]})
//WTD if bot bets paper
} else if (botBet === betPaper){
console.log(botBet)
message.channel.send({ embeds: [loseMessage]})
}
}
//what to do if the player bets paper
if (playerBet === betPaper){
if (botBet === betStone){
message.channel.send({ embeds: [winMessage]})
} else if (botBet === betScissors){
message.channel.send({ embeds: [loseMessage]})
}
}
//what to do if the player bets scissors
if (playerBet === betScissors){
if (botBet === betStone){
message.channel.send({ embeds: [loseMessage]})
} else if (botBet === betPaper){
message.channel.send({ embeds: [winMessage]})
}
}
//Playerbet is equal to botbet
if (playerBet === botBet){
message.channel.send({ embeds: [evenMessage]})
}
}else{
message.channel.send(`Usage command fc!bet ${config.stone}/${config.paper}/${config.scissors}`);
console.log(`${message.author.tag} is dumb and used ${args[1]} instead of ${config.stone}, ${config.paper} or ${config.scissors}`)
}
}
}
});
|
specs-feup/matisse
|
MatlabProcessorTests/src/org/specs/MatlabProcessor/cell/CellArrayTest.java
|
/**
* Copyright 2015 SPeCS.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License. under the License.
*/
package org.specs.MatlabProcessor.cell;
import org.junit.Assert;
import org.junit.Test;
import org.specs.MatlabIR.Exceptions.CodeParsingException;
import org.specs.MatlabIR.MatlabNode.MatlabNode;
import org.specs.MatlabIR.MatlabNode.StatementNode;
import org.specs.MatlabIR.MatlabNode.nodes.core.CellNode;
import org.specs.MatlabIR.MatlabNode.nodes.root.FileNode;
import org.specs.MatlabIR.MatlabNode.nodes.root.ScriptNode;
import org.specs.MatlabIR.MatlabNode.nodes.statements.AssignmentSt;
import org.specs.MatlabProcessor.MatlabParser.MatlabParser;
import junit.framework.TestCase;
import pt.up.fe.specs.util.SpecsIo;
import pt.up.fe.specs.util.SpecsSystem;
import pt.up.fe.specs.util.properties.SpecsProperty;
public class CellArrayTest extends TestCase {
@Override
protected void setUp() {
SpecsSystem.programStandardInit();
SpecsProperty.ShowStackTrace.applyProperty("true");
}
@Test
public void testResults() {
for (CellArrayResource resource : CellArrayResource.values()) {
testResult(resource.name(), resource.getResource(), resource.getResultResource());
}
}
@Test
public void testErrors() {
for (CellArrayErrorResource resource : CellArrayErrorResource.values()) {
System.out.println(resource.name());
SpecsProperty.LoggingLevel.applyProperty("901");
try {
new MatlabParser().parse(resource);
// MatlabProcessorUtils.fromMFile(IoUtils.getResource(resource),
// "script");
Assert.fail("Expected CodeParsingException");
} catch (CodeParsingException e) {
// Good
}
SpecsProperty.LoggingLevel.applyProperty("700");
}
}
@Test
public void testValid() {
for (CellArrayValidParseResource resource : CellArrayValidParseResource.values()) {
System.out.println(resource.name());
String expectedResult = SpecsIo.getResource(resource.getResultResource());
FileNode testToken = new MatlabParser().parse(resource);
ScriptNode script = testToken.getScript();
StatementNode accessCall = script.getStatements().get(1);
MatlabNode child = accessCall.getChild(0);
String resourceName = resource.name();
Assert.assertEquals(resourceName + ": " + expectedResult.replace("\r\n", "\n").trim(),
resourceName + ": " + child.toString().replace("\r\n", "\n").trim());
}
}
private static void testResult(String resourceName, String test, String resultResource) {
System.out.println(resourceName);
FileNode testToken = new MatlabParser().parse(() -> test);
// FileNode testToken = new MatlabParser().parse(() -> test);
String result = "{" + SpecsIo.getResource(resultResource) + "}";
ScriptNode script = testToken.getScript();
AssignmentSt assignment = script.getStatements(AssignmentSt.class).get(0);
MatlabNode right = assignment.getRightHand().normalize();
CellNode cellArray = (CellNode) right;
Assert.assertEquals(resourceName + ": " + result,
resourceName + ": " + cellArray.getCode());
}
}
|
shutterSound/livewire1
|
public/app-assets/js/scripts/tables/material-datatable.js
|
/*=========================================================================================
File Name: material-datatables.js
Description: Material Datatable
----------------------------------------------------------------------------------------
Item Name: Modern Admin - Clean Bootstrap 4 Dashboard HTML Template
Author: PIXINVENT
Author URL: http://www.themeforest.net/user/pixinvent
==========================================================================================*/
$(document).ready(function() {
$('.material-table').DataTable();
});
|
everyside/aura
|
aura-impl/src/main/java/org/auraframework/impl/validation/ValidationFileSourceLoader.java
|
<filename>aura-impl/src/main/java/org/auraframework/impl/validation/ValidationFileSourceLoader.java
/*
* Copyright (C) 2013 salesforce.com, inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.auraframework.impl.validation;
import java.io.File;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
import org.apache.log4j.Logger;
import org.auraframework.def.DefDescriptor;
import org.auraframework.impl.source.file.FileSourceLoader;
import org.auraframework.throwable.AuraRuntimeException;
/**
* Specialized file source loader used for validation, it loads all types of definitions.
*/
public final class ValidationFileSourceLoader extends FileSourceLoader {
private static final Logger LOG = Logger.getLogger(ValidationFileSourceLoader.class);
public ValidationFileSourceLoader(File base) {
super(base);
}
/**
* Recursively searches for definitions starting at path and returns the set of descriptors for all definitions
* found.
*/
public Set<DefDescriptor<?>> findIn(File path) {
Set<DefDescriptor<?>> ret = new HashSet<>();
AnyTypeFilter af = new FindInFilter(ret, path);
for (String ns : getNamespaces()) {
af.setNamespace(ns);
findFiles(new File(base, ns), null, af);
}
return ret;
}
private static final class FindInFilter extends AnyTypeFilter {
private final String rootCanonicalPath;
public FindInFilter(Set<DefDescriptor<?>> dset, File root) {
super(dset, null);
try {
rootCanonicalPath = root.getCanonicalPath();
} catch (IOException e) {
throw new AuraRuntimeException(e);
}
}
@Override
public boolean accept(File file) {
// TODO: optimize?
String canonicalPath;
try {
canonicalPath = file.getCanonicalPath();
} catch (IOException e) {
LOG.warn(file.getAbsolutePath() + ": " + e);
return false;
}
if (file.isDirectory()) {
return true;
}
if (canonicalPath.startsWith(rootCanonicalPath)) {
DefDescriptor<?> dd = getDescriptor(canonicalPath);
this.dset.add(dd);
}
return false;
}
}
}
|
qlcchain/WinQ-Android-code
|
app/src/main/java/com/stratagile/qlink/ui/activity/vpn/module/RankModule.java
|
package com.stratagile.qlink.ui.activity.vpn.module;
import com.stratagile.qlink.data.api.HttpAPIWrapper;
import com.stratagile.qlink.ui.activity.base.ActivityScope;
import com.stratagile.qlink.ui.activity.vpn.RankActivity;
import com.stratagile.qlink.ui.activity.vpn.contract.RankContract;
import com.stratagile.qlink.ui.activity.vpn.presenter.RankPresenter;
import dagger.Module;
import dagger.Provides;
/**
* @author hzp
* @Package com.stratagile.qlink.ui.activity.vpn
* @Description: The moduele of RankActivity, provide field for RankActivity
* @date 2018/07/31 17:14:45
*/
@Module
public class RankModule {
private final RankContract.View mView;
public RankModule(RankContract.View view) {
this.mView = view;
}
@Provides
@ActivityScope
public RankPresenter provideRankPresenter(HttpAPIWrapper httpAPIWrapper, RankActivity mActivity) {
return new RankPresenter(httpAPIWrapper, mView, mActivity);
}
@Provides
@ActivityScope
public RankActivity provideRankActivity() {
return (RankActivity) mView;
}
}
|
Caspar12/graphql-braid
|
src/main/java/com/atlassian/braid/source/VariableNamespacingGraphQLQueryVisitor.java
|
<filename>src/main/java/com/atlassian/braid/source/VariableNamespacingGraphQLQueryVisitor.java<gh_stars>0
package com.atlassian.braid.source;
import graphql.language.Argument;
import graphql.language.ArrayValue;
import graphql.language.Directive;
import graphql.language.Field;
import graphql.language.Node;
import graphql.language.NodeVisitorStub;
import graphql.language.ObjectField;
import graphql.language.ObjectValue;
import graphql.language.OperationDefinition;
import graphql.language.Type;
import graphql.language.Value;
import graphql.language.VariableDefinition;
import graphql.language.VariableReference;
import graphql.schema.DataFetchingEnvironment;
import graphql.util.TraversalControl;
import graphql.util.TraverserContext;
import java.util.Map;
import static com.atlassian.braid.source.NamespacedVariableReference.namespacedVariableReference;
import static java.util.stream.Collectors.toList;
public class VariableNamespacingGraphQLQueryVisitor extends NodeVisitorStub {
private final int counter;
private final OperationDefinition queryType;
private final Map<String, Object> variables;
private final DataFetchingEnvironment environment;
private final OperationDefinition queryOp;
public VariableNamespacingGraphQLQueryVisitor(int counter,
OperationDefinition operationDefinition,
Map<String, Object> variables,
DataFetchingEnvironment environment,
OperationDefinition queryOp) {
this.counter = counter;
this.queryType = operationDefinition;
this.variables = variables;
this.environment = environment;
this.queryOp = queryOp;
}
@Override
public TraversalControl visitField(Field node, TraverserContext<Node> traverserContext) {
node.setArguments(node.getArguments().stream().map(this::namespaceReferences).collect(toList()));
node.setDirectives(node.getDirectives().stream().map(this::namespaceReferences).collect(toList()));
return TraversalControl.CONTINUE;
}
private Argument namespaceReferences(Argument arg) {
return new Argument(arg.getName(), namespaceReferences(arg.getValue()));
}
private Directive namespaceReferences(Directive original) {
return new Directive(original.getName(), original.getArguments().stream().map(this::namespaceReferences).collect(toList()));
}
private Value namespaceReferences(Value value) {
final Value transformedValue;
if (value instanceof VariableReference) {
transformedValue = maybeNamespaceReference((VariableReference) value);
} else if (value instanceof ObjectValue) {
transformedValue = namespaceReferencesForObjectValue((ObjectValue) value);
} else if (value instanceof ArrayValue) {
transformedValue = namespaceReferencesForArrayValue((ArrayValue) value);
} else {
transformedValue = value;
}
return transformedValue;
}
private ObjectValue namespaceReferencesForObjectValue(ObjectValue value) {
return new ObjectValue(
value.getChildren().stream()
.map(ObjectField.class::cast)
.map(o -> new ObjectField(o.getName(), namespaceReferences(o.getValue())))
.collect(toList()));
}
private ArrayValue namespaceReferencesForArrayValue(ArrayValue value) {
return new ArrayValue(
value.getChildren().stream()
.map(Value.class::cast)
.map(this::namespaceReferences)
.collect(toList()));
}
private VariableReference maybeNamespaceReference(VariableReference value) {
return isVariableAlreadyNamespaced(value) ? value : namespaceVariable(value);
}
private VariableReference namespaceVariable(VariableReference varRef) {
final String newName = varRef.getName() + counter;
final VariableReference value = namespacedVariableReference(newName);
final Type type = findVariableType(varRef, queryType);
variables.put(newName, environment.getExecutionContext().getVariables().get(varRef.getName()));
queryOp.getVariableDefinitions().add(new VariableDefinition(newName, type));
return value;
}
private boolean isVariableAlreadyNamespaced(VariableReference varRef) {
return varRef instanceof NamespacedVariableReference;
}
private static Type findVariableType(VariableReference varRef, OperationDefinition queryType) {
return queryType.getVariableDefinitions()
.stream()
.filter(d -> d.getName().equals(varRef.getName()))
.map(VariableDefinition::getType)
.findFirst()
.orElseThrow(IllegalArgumentException::new);
}
}
|
panc-test/python-study
|
test_scripts/test02.py
|
"""
列表 [1, 2, 3, 8, 9],要求你把列表里面的每个值加1
"""
list = [1, 2, 3, 8, 9]
list_new = [i + 1 for i in list] # 列表推导式
print(list_new)
|
xBlazeTECH/xBlazeCore
|
src/net/xblaze/xBlazeCore/api/nms/FancyMessage.java
|
<filename>src/net/xblaze/xBlazeCore/api/nms/FancyMessage.java<gh_stars>0
package net.xblaze.xBlazeCore.api.nms;
import java.util.ArrayList;
import java.util.List;
import net.minecraft.server.v1_7_R3.ChatSerializer;
import net.minecraft.server.v1_7_R3.NBTTagCompound;
import net.minecraft.server.v1_7_R3.PacketPlayOutChat;
import org.bukkit.ChatColor;
import org.bukkit.craftbukkit.v1_7_R3.entity.CraftPlayer;
import org.bukkit.craftbukkit.v1_7_R3.inventory.CraftItemStack;
import org.bukkit.entity.Player;
import org.bukkit.inventory.ItemStack;
import org.json.JSONException;
import org.json.JSONStringer;
public class FancyMessage implements GenericFancyMessage {
private final List<MessagePart> messageParts;
public FancyMessage(final String firstPartText) {
messageParts = new ArrayList<MessagePart>();
messageParts.add(new MessagePart(firstPartText));
}
public FancyMessage color(final ChatColor color) {
if (!color.isColor()) {
throw new IllegalArgumentException(color.name() + " is not a color");
}
latest().color = color;
return this;
}
public FancyMessage style(final ChatColor... styles) {
for (final ChatColor style : styles) {
if (!style.isFormat()) {
throw new IllegalArgumentException(style.name() + " is not a style");
}
}
latest().styles = styles;
return this;
}
public FancyMessage file(final String path) {
onClick("open_file", path);
return this;
}
public FancyMessage link(final String url) {
onClick("open_url", url);
return this;
}
public FancyMessage suggest(final String command) {
onClick("suggest_command", command);
return this;
}
public FancyMessage command(final String command) {
onClick("run_command", command);
return this;
}
public FancyMessage achievementTooltip(final String name) {
onHover("show_achievement", "achievement." + name);
return this;
}
public FancyMessage itemTooltip(final String itemJSON) {
onHover("show_item", itemJSON);
return this;
}
public FancyMessage itemTooltip(final ItemStack itemStack) {
return itemTooltip(CraftItemStack.asNMSCopy(itemStack).save(new NBTTagCompound()).toString());
}
public FancyMessage tooltip(final String text) {
final String[] lines = text.split("\\n");
if (lines.length <= 1) {
onHover("show_text", text);
} else {
itemTooltip(makeMultilineTooltip(lines));
}
return this;
}
public FancyMessage then(final Object obj) {
messageParts.add(new MessagePart(obj.toString()));
return this;
}
public String toJSONString() {
final JSONStringer json = new JSONStringer();
try {
if (messageParts.size() == 1) {
latest().writeJson(json);
} else {
json.object().key("text").value("").key("extra").array();
for (final MessagePart part : messageParts) {
part.writeJson(json);
}
json.endArray().endObject();
}
} catch (final JSONException e) {
throw new RuntimeException("invalid message");
}
return json.toString();
}
public void send(Player player){
((CraftPlayer) player).getHandle().playerConnection.sendPacket(new PacketPlayOutChat(ChatSerializer.a(toJSONString())));
}
private MessagePart latest() {
return messageParts.get(messageParts.size() - 1);
}
private String makeMultilineTooltip(final String[] lines) {
final JSONStringer json = new JSONStringer();
try {
json.object().key("id").value(1);
json.key("tag").object().key("display").object();
json.key("Name").value("\\u00A7f" + lines[0].replace("\"", "\\\""));
json.key("Lore").array();
for (int i = 1; i < lines.length; i++) {
final String line = lines[i];
json.value(line.isEmpty() ? " " : line.replace("\"", "\\\""));
}
json.endArray().endObject().endObject().endObject();
} catch (final JSONException e) {
throw new RuntimeException("invalid tooltip");
}
return json.toString();
}
private void onClick(final String name, final String data) {
final MessagePart latest = latest();
latest.clickActionName = name;
latest.clickActionData = data;
}
private void onHover(final String name, final String data) {
final MessagePart latest = latest();
latest.hoverActionName = name;
latest.hoverActionData = data;
}
}
|
microsoft/wifi-ztp
|
src/utils/event_loop.h
|
#ifndef __EVENT_LOOP_H__
#define __EVENT_LOOP_H__
#include <stdbool.h>
#include <stdint.h>
#include <time.h>
#include <userspace/linux/list.h>
struct scheduled_task;
/**
* @brief Event loop read-event handler prototype.
*/
typedef void (*event_handler_fn)(int fd, void *context);
/**
* @brief Generic file-descriptor based event.
*/
struct event {
int fd;
event_handler_fn handler;
void *handler_arg;
};
/**
* @brief Event dispatcher, holding a table of events.
*/
struct event_dispatch {
size_t num_events;
struct event *events;
};
/**
* @brief Event loop control structure.
*/
struct event_loop {
struct list_head scheduled_tasks;
clockid_t clock;
int epoll_fd;
bool terminate_pending;
size_t events_max;
struct epoll_event *events;
struct event_dispatch dispatch;
int exit_code;
};
/**
* @brief Registers a handler for events that signal data is available to be read from a file descriptor.
*
* @param loop The event loop instance.
* @param events The event types to monitor. Must be one of the EPOLL* macros.
* @param fd The file descriptor to monitor for changes to read from.
* @param handler The handler function to invoke when data is available to be read from 'fd'.
* @param handler_arg The argument that should be passed to the handler function.
* @return int 0 if the handler was successfully registered, non-zero otherwise.
*/
int
event_loop_register_event(struct event_loop *loop, uint32_t events, int fd, event_handler_fn handler, void *handler_arg);
/**
* @brief Unregisters an read event handler.
*
* @param loop The event loop instance.
* @param fd The file descriptor associated with the read event to unregister.
*/
void
event_loop_unregister_event(struct event_loop *loop, int fd);
/**
* @brief Type of scheduled task.
*/
enum scheduled_task_type {
TASK_ONESHOT,
TASK_PERIODIC
};
/**
* @brief Handler function to be invoked for a scheduled task.
*/
typedef void (*scheduled_task_handler)(void *task_context);
struct scheduled_task_timeout {
uint32_t seconds;
uint32_t useconds;
};
/**
* @brief Context for scheduled task.
*/
struct scheduled_task {
struct list_head list;
struct timespec expiry;
struct event_loop *loop;
struct scheduled_task_timeout timeout;
enum scheduled_task_type type;
scheduled_task_handler handler;
void *context;
};
/**
* @brief Schedules a task for execution at a later time.
*
* The expiry time is specified as the total number of seconds plus the
* total number of microseconds from the current time.
*
* The event loop does not need to be rescheduled following the addition of new
* or removal of existing timers. Each time the event loop blocks to wait for
* events, it schedules its wait timeout to be the expiry time of the next
* scheduled task. This ensures the event loop will be unblocked precisely when
* the next (in time) scheduled task need to run.
*
* Since the event loop and all code interacting with it is single-threaded, a
* new task cannot be scheduled until the event loop is unblocked. This
* guarantees that newly added and removed tasks will be accounted for.
*
* @param loop The event loop control structure.
* @param seconds The number of seconds from now the task should execute.
* @param useconds The number of microseconds, relative to the number of seconds, the task should execute.
* @param type The type of scheduled task; oneshot or periodic.
* @param handler The handler function to invoke when the task expiry time occurs.
* @param task_context The contextual data that will be passed to the handler function.
* @return int
*/
int
event_loop_task_schedule(struct event_loop *loop, uint32_t seconds, uint32_t useconds, enum scheduled_task_type type, scheduled_task_handler handler, void *task_context);
/**
* @brief Helper function to schedule a one-shot scheduled task to run immediately.
*
* @param loop The event loop control structure.
* @param handler The handler function to invoke when the task expiry time occurs.
* @param task_context The contextual data that will be passed to the handler function.
* @return int
*/
int
event_loop_task_schedule_now(struct event_loop *loop, scheduled_task_handler handler, void *task_context);
/**
* @brief Cancels a scheduled task.
*
* @param loop The event loop control structure.
* @param handler The task event handler.
* @param context The context for the event handler.
* @return uint32_t The number of tasks that were canceled.
*/
uint32_t
event_loop_task_cancel(struct event_loop *loop, scheduled_task_handler handler, void *task_context);
/**
* @brief Initialize the event loop.
*
* @param loop The event loop control structure to initialize.
* @return int 0 if initialization was successful, non-zero otherwise.
*/
int
event_loop_initialize(struct event_loop *loop);
/**
* @brief Process the scheduled task queue.
*
* @param loop The event loop control structure.
*/
void
event_loop_process_scheduled_tasks(struct event_loop *loop);
/**
* @brief Calculates the event loop timeout.
*
* The event loop timeout is the relative time when the event loops needs to
* unblock to check for events. Currently the only events that need to be
* checked after such a timeout is the expiry of a scheduled task.
*
* If there are no configured scheduled tasks, the returned value represents an
* infinite timeout for epoll_wait().
*
* @param loop The event loop control structure.
* @return int The timeout to be supplied to epoll_wait().
*/
int
event_loop_get_timeout(struct event_loop *loop);
/**
* @brief Uninitializes the event loop.
*
* This will cancel all existing timers. The event loop must not be active.
*
* @param loop The event loop control structure to uninitialize.
*/
void
event_loop_uninitialize(struct event_loop *loop);
/**
* @brief Requests that the event loop stop running. The event loop will not
* terminate immediately, however, it will terminate at the end of the current
* loop iteration.
*
* @param loop The event loop instance.
* @param exit_code The return value, 0 if success, an error code otherwise
* @return 0 if it exited cleanly, an error code otherwise
*/
int
event_loop_stop(struct event_loop *loop, int exit_code);
/**
* @brief Runs the event loop. This uses the calling thread to wait for changes
* to the event loop's configured file descriptors. The event loop will run
* until the event_loop_stop() function is called.
*
* @param loop The event loop instance.
* @return 0 if it exited cleanly, an error code otherwise
*/
int
event_loop_run(struct event_loop *loop);
#endif //__EVENT_LOOP_H__
|
VEckardt/IntegrityDocs
|
src/com/ptc/services/utilities/docgen/type/StateTransitions.java
|
<reponame>VEckardt/IntegrityDocs
/*
* Copyright: Copyright 2018 (c) Parametric Technology GmbH
* Product: PTC Integrity Lifecycle Manager
* Author: <NAME>, Principal Consultant ALM
* Purpose: Custom Developed Code
* ************** File Version Details **************
* Revision: $Revision: 1.3 $
* Last changed: $Date: 2018/05/18 02:18:19CET $
*/
package com.ptc.services.utilities.docgen.type;
import java.util.ArrayList;
import java.util.List;
import java.util.Iterator;
import com.mks.api.response.APIException;
import com.mks.api.response.Field;
import com.mks.api.response.Item;
import com.mks.api.response.ItemNotFoundException;
import static com.ptc.services.utilities.docgen.Constants.GROUP_XML_PREFIX;
import static com.ptc.services.utilities.docgen.Constants.nl;
import com.ptc.services.utilities.docgen.utils.ExceptionHandler;
import com.ptc.services.utilities.docgen.Integrity;
import static com.ptc.services.utilities.docgen.Integrity.getXMLParamFieldValue;
import com.ptc.services.utilities.docgen.IntegrityState;
import static com.ptc.services.utilities.docgen.IntegrityUtils.getFieldValue;
import com.ptc.services.utilities.docgen.XMLWriter;
import static com.ptc.services.utilities.docgen.utils.Logger.log;
import java.util.LinkedHashMap;
import java.util.NoSuchElementException;
public class StateTransitions {
private Field transitions;
private List<String> statesList;
private String strTransitions;
private LinkedHashMap<String, IntegrityState> statesHash;
public StateTransitions(String typeName, Field stateTransitions) {
transitions = stateTransitions;
statesList = new ArrayList<>();
strTransitions = new String();
statesHash = new LinkedHashMap<>();
setUniqueStatesAndTransitions();
try {
statesHash = Integrity.getStates(typeName, statesList);
} catch (APIException aex) {
ExceptionHandler eh = new ExceptionHandler(aex);
log(eh.getMessage());
log(eh.getCommand());
aex.printStackTrace();
}
}
private void addUniqueState(String name) {
if (!name.equalsIgnoreCase("Unspecified") && !statesList.contains(name)) {
statesList.add(name);
}
}
@SuppressWarnings("unchecked")
private void setUniqueStatesAndTransitions() {
//state:state:group|dynamic group,group|dynamic group,...[;
if (null != transitions && null != transitions.getList()) {
List<Item> stateTransitionsList = transitions.getList();
StringBuilder sb = new StringBuilder();
// Loop thru all the state transitions
for (Iterator<Item> lit = stateTransitionsList.iterator(); lit.hasNext();) {
// Get the "From State" value
Item stateTransition = lit.next();
// Add the 'Unspecified' state to the list of resources
if (stateTransition.getId().equalsIgnoreCase("unspecified")) {
XMLWriter.paramsHash.put(IntegrityState.XML_PREFIX
+ XMLWriter.getXMLParamName(stateTransition.getId()), stateTransition.getId());
}
// Add the from state to our list of unique states
addUniqueState(stateTransition.getId());
// Get the list of "To State" values
Field targetStates = stateTransition.getField("targetStates");
List<Item> targetStatesList = targetStates.getList();
// Loop thru the target list of the transitions
for (Iterator<Item> tlit = targetStatesList.iterator(); tlit.hasNext();) {
// Get the value for "To State"
Item targetState = tlit.next();
// Add the to state to our list of unique states
addUniqueState(stateTransition.getId());
// Add the from state to our string builder representation of the state transitions
String xmlFromState = IntegrityState.XML_PREFIX + XMLWriter.getXMLParamName(stateTransition.getId());
sb.append(XMLWriter.padXMLParamName(xmlFromState)).append(":");
// Add the target state to our string builder representation of the state transitions
String xmlToState = IntegrityState.XML_PREFIX + XMLWriter.getXMLParamName(targetState.getId());
sb.append(XMLWriter.padXMLParamName(xmlToState)).append(":");
// Add the permitted groups for this state transition
try {
sb.append(getXMLParamFieldValue(targetState.getField("permittedGroups"), GROUP_XML_PREFIX, ","));
} catch (NoSuchElementException e) {
sb.append("");
}
// Add the delimiter for the next state transition in the list
sb.append(tlit.hasNext() ? ";" + nl + "\t\t\t" : "");
}
// Add the delimiter for the next set of state transitions in the list
sb.append(lit.hasNext() ? ";" + nl + "\t\t\t" : "");
}
// Set the strTransitions variable
strTransitions = sb.toString();
}
}
public LinkedHashMap<String, IntegrityState> getList() {
return statesHash;
}
public String getStringTransitions() {
return strTransitions;
}
public List<String> getStateList() {
return statesList;
}
@SuppressWarnings("unchecked")
public String getFormattedReport() throws ItemNotFoundException {
StringBuilder report = new StringBuilder();
// Construct the open table and heading line
report.append("<table class='list'>").append(nl);
report.append(" <tr>").append(nl);
report.append(" <th>From State</th>").append(nl);
report.append(" <th>To State</th>").append(nl);
report.append(" <th>Permitted Groups</th>").append(nl);
report.append(" </tr>").append(nl);
// Ensure we're dealing with some valid data
if (null != transitions && null != transitions.getList()) {
List<Item> stateTransitionsList = transitions.getList();
// Loop thru all the state transitions
for (Item stateTransition : stateTransitionsList) {
// Get the list of "To State" values
Field targetStates = stateTransition.getField("targetStates");
List<Item> targetStatesList = targetStates.getList();
for (Iterator<Item> tlit = targetStatesList.iterator(); tlit.hasNext();) {
// Write out the new table row
report.append(" <tr>" + nl);
// Get the value for "To State"
Item targetState = tlit.next();
// Write out the "From State" value
report.append(" <td>" + stateTransition.getId() + "</td>" + nl);
// Write out the "To State" value
report.append(" <td>" + targetState.getId() + "</td>" + nl);
// Finally write out the "Permitted Groups" value
report.append(" <td>" + getFieldValue(targetState.getField("permittedGroups"), "<br/>") + "</td>" + nl);
// Close out the table row
report.append(" </tr>" + nl);
}
}
}
// Close the table tag
report.append("</table>" + nl);
return report.toString();
}
}
|
AdrianZw/csapex_core_plugins
|
csapex_vision/src/features/lsd.cpp
|
/*----------------------------------------------------------------------------
LSD - Line Segment Detector on digital images
Copyright (c) 2007-2011 <NAME> <<EMAIL>>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
----------------------------------------------------------------------------*/
/// HEADER
#include "lsd.h"
/// PROJECT
#include <csapex/model/node_modifier.h>
#include <csapex/msg/generic_vector_message.hpp>
#include <csapex/msg/io.h>
#include <csapex/param/parameter_factory.h>
#include <csapex/utility/register_apex_plugin.h>
#include <csapex_opencv/cv_mat_message.h>
#include <ctype.h>
#include <math.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
using namespace csapex;
using namespace csapex::connection_types;
using namespace csapex;
CSAPEX_REGISTER_CLASS(csapex::LineSegmentDetector, csapex::Node)
LineSegmentDetector::LineSegmentDetector()
{
}
void LineSegmentDetector::process()
{
connection_types::CvMatMessage::ConstPtr a = msg::getMessage<connection_types::CvMatMessage>(input_);
// cv::Mat image_cv = a->value;
// X = image_cv.rows;
// Y = image.cv.cols;
// /* get memory */
// image = new double [X*Y];
// if( image == NULL ) throw std::runtime_error("unsupported norm type");
// if(Image.type() == CV_8UC3) {
// Reading_image_impl<cv::Vec1b, Norm>(Image, Distance, Direction,
// Interpolation_coeff);
// } else if (Image.type() == CV_32FC3) {
// Reading_image_impl<cv::Vec1f, Norm>(Image, Distance, Direction,
// Interpolation_coeff);
// } else if (Image.type() == CV_64FC3) {
// Reading_image_impl<cv::Vec1d, Norm>(Image, Distance, Direction,
// Interpolation_coeff);
// } else {
// throw std::runtime_error("unsupported image type");
// }
//// template <typename PixelType, template<typename> class Norm>
//// void color_edge_detection::RCMG_computation_impl(cv::Mat Image, cv::Mat
/// Distance, cv::Mat Direction, cv::Mat Interpolation_coeff) / {
///* read data */
// for(int y=0;y<Y;y++)
// for(int x=0;x<X;x++)
// image[ x + y * X ] = (double) image_cv.at<image_cv.type()>(x,y);
}
void LineSegmentDetector::setup(csapex::NodeModifier& node_modifier)
{
input_ = node_modifier.addInput<connection_types::CvMatMessage>("Image");
output_ = node_modifier.addOutput<connection_types::CvMatMessage>("Image");
}
|
yaozd/com.yzd.hazelcast-lean
|
com.yzd.hazelcast-lean/02-hazelcast-in-vertx/src/main/java/com/yzd/schedule/ResponseJob.java
|
<gh_stars>0
package com.yzd.schedule;
import com.yzd.internal.Container;
import io.vertx.core.AsyncResult;
import io.vertx.core.Handler;
import io.vertx.core.http.HttpServerRequest;
import lombok.extern.slf4j.Slf4j;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import java.util.Date;
import java.util.Iterator;
import java.util.Map;
/**
* @Author: yaozh
* @Description:
*/
@Slf4j
@Component
public class ResponseJob {
@Scheduled(initialDelay = 3000, fixedDelay = 1000 * 5)
public void doWork() {
Iterator<Map.Entry<String, HttpServerRequest>> iterator =
Container.getInstance().getRequestMap().entrySet().iterator();
while (iterator.hasNext()) {
String timeValue = new Date().toString();
Map.Entry<String, HttpServerRequest> next = iterator.next();
next.getValue().response().end(timeValue, new Handler<AsyncResult<Void>>() {
@Override
public void handle(AsyncResult<Void> asyncResult) {
if (asyncResult.failed()) {
log.warn("Failed", asyncResult.cause());
}
}
});
iterator.remove();
}
}
/* //老的方法
@Scheduled(initialDelay = 3000, fixedDelay = 1000 * 5)
public void doWork_old() {
for (Map.Entry<String, HttpServerRequest> entry : Container.getInstance().getRequestMap().entrySet()) {
String timeValue = new Date().toString();
Handler<AsyncResult<Void>> handler = new Handler<AsyncResult<Void>>() {
@Override
public void handle(AsyncResult<Void> asyncResult) {
if (asyncResult.failed()) {
log.warn("Failed", asyncResult.cause());
}
}
};
entry.getValue().response().end(timeValue, handler);
}
}*/
}
|
nicai20098/sofa-jraft
|
jraft-core/src/main/java/com/alipay/sofa/jraft/util/concurrent/DefaultSingleThreadExecutor.java
|
<reponame>nicai20098/sofa-jraft
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alipay.sofa.jraft.util.concurrent;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import com.alipay.sofa.jraft.util.ExecutorServiceHelper;
import com.alipay.sofa.jraft.util.NamedThreadFactory;
import com.alipay.sofa.jraft.util.ThreadPoolUtil;
/**
*
* @author jiachun.fjc
*/
public final class DefaultSingleThreadExecutor implements SingleThreadExecutor {
private final SingleThreadExecutor singleThreadExecutor;
/**
* Anti-gentleman is not against villains, we believe that you are
* providing a single-thread executor.
*
* @param singleThreadExecutorService a {@link ExecutorService} instance
*/
public DefaultSingleThreadExecutor(ExecutorService singleThreadExecutorService) {
this.singleThreadExecutor = wrapSingleThreadExecutor(singleThreadExecutorService);
}
public DefaultSingleThreadExecutor(String poolName, int maxPendingTasks) {
this.singleThreadExecutor = createSingleThreadExecutor(poolName, maxPendingTasks);
}
@Override
public void execute(final Runnable task) {
this.singleThreadExecutor.execute(task);
}
@Override
public boolean shutdownGracefully() {
return this.singleThreadExecutor.shutdownGracefully();
}
@Override
public boolean shutdownGracefully(final long timeout, final TimeUnit unit) {
return this.singleThreadExecutor.shutdownGracefully(timeout, unit);
}
private static SingleThreadExecutor wrapSingleThreadExecutor(final ExecutorService executor) {
if (executor instanceof SingleThreadExecutor) {
return (SingleThreadExecutor) executor;
} else {
return new SingleThreadExecutor() {
@Override
public boolean shutdownGracefully() {
return ExecutorServiceHelper.shutdownAndAwaitTermination(executor);
}
@Override
public boolean shutdownGracefully(final long timeout, final TimeUnit unit) {
return ExecutorServiceHelper.shutdownAndAwaitTermination(executor, unit.toMillis(timeout));
}
@Override
public void execute(final Runnable command) {
executor.execute(command);
}
};
}
}
private static SingleThreadExecutor createSingleThreadExecutor(final String poolName, final int maxPendingTasks) {
final ExecutorService singleThreadPool = ThreadPoolUtil.newBuilder() //
.poolName(poolName) //
.enableMetric(true) //
.coreThreads(1) //
.maximumThreads(1) //
.keepAliveSeconds(60L) //
.workQueue(new LinkedBlockingQueue<>(maxPendingTasks)) //
.threadFactory(new NamedThreadFactory(poolName, true)) //
.build();
return new SingleThreadExecutor() {
@Override
public boolean shutdownGracefully() {
return ExecutorServiceHelper.shutdownAndAwaitTermination(singleThreadPool);
}
@Override
public boolean shutdownGracefully(final long timeout, final TimeUnit unit) {
return ExecutorServiceHelper.shutdownAndAwaitTermination(singleThreadPool, unit.toMillis(timeout));
}
@Override
public void execute(final Runnable command) {
singleThreadPool.execute(command);
}
};
}
}
|
ppdaicorp/atlas
|
src/main/java/com/ppdai/atlas/dao/UserRepository.java
|
<filename>src/main/java/com/ppdai/atlas/dao/UserRepository.java
package com.ppdai.atlas.dao;
import com.ppdai.atlas.entity.UserEntity;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.domain.Specification;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.util.List;
public interface UserRepository extends BaseJpaRepository<UserEntity, Long> {
@Override
@Query("select a from UserEntity a where a.isActive=true order by a.id")
List<UserEntity> findAll();
@Query("select a from UserEntity a where a.isActive=true and a.id=?1")
UserEntity findOneById(Long id);
@Query("select a from UserEntity a where a.isActive=true and a.workNumber=?1")
List<UserEntity> findByWorkNumber(String workNumber);
@Query("select a from UserEntity a where a.isActive=true order by a.id")
@Override
Page<UserEntity> findAll(Pageable pageable);
@Query("select a from UserEntity a where a.userName=?1")
UserEntity findByUserName(String userName);
@Query("select a from UserEntity a where a.isActive=true and a.workNumber <> '' and a.userName LIKE CONCAT('%',:userName,'%') order by a.id")
Page<UserEntity> fuzzyFindByUserName(@Param("userName") String userName, Pageable pageable);
@Query("select a from UserEntity a where a.isActive=true and a.source=?1 order by a.ldapUpdateTime desc")
Page<UserEntity> findLdapLastUpdateFromSource(String source, Pageable pageable);
@Query("select a from UserEntity a where a.isActive=false and a.source=?1 order by a.ldapUpdateTime desc")
Page<UserEntity> findLdapLastDeleteFromSource(String source, Pageable pageable);
@Modifying(clearAutomatically = true)
@Query("update UserEntity a set a.isActive=false where a.id=?1")
void removeOneEntityById(Long id);
Page<UserEntity> findAll(Specification<UserEntity> specification, Pageable pageable);
}
|
truthiswill/intellij-community
|
java/java-tests/testData/codeInsight/parameterInfo/SelectionWithGenerics.java
|
class Test {
{
refresh(<caret>false, false, null, "");
}
public final void refresh(boolean async, boolean recursive, Runnable finishRunnable, String... files) {
}
public final void refresh(boolean async, boolean recursive, Runnable finishRunnable, Integer files) {
}
}
|
indigoabstract/appplex
|
src/mod/pub/tst/mouse-input/mod-mouse-input.hxx
|
<reponame>indigoabstract/appplex
#pragma once
#include "mws-mod.hxx"
class mod_mouse_input : public mws_mod
{
public:
static mws_sp<mod_mouse_input> nwi();
virtual void build_sws() override;
private:
mod_mouse_input();
};
|
uk-gov-dft/bluebadge-citizen-webapp
|
acceptance-tests/src/test/java/uk/gov/service/bluebadge/test/acceptance/pages/site/BenifitsPage.java
|
<gh_stars>0
package uk.gov.service.bluebadge.test.acceptance.pages.site;
public class BenifitsPage {
public static final String PAGE_TITLE_YOURSELF = "Do you receive any of these benefits?";
public static final String PAGE_TITLE_SOMEONE_ELSE = "Do they receive any of these benefits?";
public static final String VALIDATION_MESSAGE_FOR_NO_OPTION = "Select a benefit option";
public static final String BENEFIT_RECEIVED_LIST = "benefitType";
public static final String PAGE_URL = "/benefits";
}
|
udongo/udongo
|
spec/lib/udongo/breadcrumb_spec.rb
|
<reponame>udongo/udongo
require 'rails_helper'
describe Udongo::Breadcrumb do
let(:instance) { Udongo::Breadcrumb.new }
it '#all' do
expect(instance.all).to eq []
end
it '#add' do
instance.add :foo, :bar
expect(instance.all).to eq [{ name: :foo, link: :bar }]
end
describe '#any?' do
it :false do
expect(instance).not_to be_any
end
it :true do
instance.add :foo, :bar
expect(instance).to be_any
end
end
it '#respond_to?' do
expect(instance).to respond_to(:all, :add, :any?, :each)
end
end
|
sxweetlollipop2912/MaCode
|
.LHP/He10/T.Hung/XINGAU/XINGAU/XINGAU.cpp
|
#include "pch.h"
#include <iostream>
#include <cstdio>
#include <algorithm>
#include <vector>
#define maxN 51
#define maxX 2502
#define bground '.'
//#define xn '*'
#define pix 'X'
typedef int maxn;
maxn n, m, xid[maxN][maxN], cntp[maxX];
char map[maxN][maxN];
int sp[4][2] = { {-1,0},{1,0},{0,-1},{0,1} };
std::vector <maxn> res;
void Prepare() {
std::cin >> n >> m;
for (maxn i = 0; i < m; i++) {
for (maxn j = 0; j < n; j++) {
std::cin >> map[i][j];
}
}
}
bool check(const maxn x) {
return x >= 0 && x < n;
}
void DFS1(const maxn x, const maxn y, const maxn fill) {
xid[x][y] = fill;
for (int ii = 0; ii < 4; ii++) {
maxn i = x + sp[ii][0], j = y + sp[ii][1];
if (!check(i) || !check(j) || map[i][j] == bground || xid[i][j]) continue;
DFS1(i, j, fill);
}
}
void DFS2(const maxn x, const maxn y) {
xid[x][y] = 0;
for (int ii = 0; ii < 4; ii++) {
maxn i = x + sp[ii][0], j = y + sp[ii][1];
if (!check(i) || !check(j) || !xid[i][j] || map[i][j] != pix) continue;
DFS2(i, j);
}
}
void Process() {
maxn fill = 0;
for (maxn i = 0; i < m; i++) {
for (maxn j = 0; j < n; j++) {
if (xid[i][j] || map[i][j] == bground) continue;
DFS1(i, j, ++fill);
}
}
for (maxn i = 0; i < m; i++) {
for (maxn j = 0; j < n; j++) {
if (!xid[i][j] || map[i][j] != pix) continue;
++cntp[xid[i][j]];
//std::cout << i << ' ' << j << ' ' << xid[i][j] << ' ' << cntp[xid[i][j]] << '\n';
DFS2(i, j);
}
}
res.clear();
for (maxn i = 0; i < maxX; i++)
if (cntp[i]) res.push_back(cntp[i]);
std::sort(res.begin(), res.end());
for (maxn i = 0; i < res.size(); i++)
std::cout << res[i] << ' ';
}
int main() {
//freopen("xingau.inp", "r", stdin);
//freopen("xingau.out", "w", stdout);
std::ios_base::sync_with_stdio(0);
std::cin.tie(0);
Prepare();
Process();
}
|
ukiras123/DataStructure-Algorithm
|
src/com/datastructure/javacollection/CollectionsUtils.java
|
package com.datastructure.javacollection;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Created by Kiran on 9/20/18.
*/
public class CollectionsUtils {
public static void main(String[] args) {
List<String> list = new ArrayList<String>();
list.add("C");
list.add("Core Java");
list.add("Advance Java");
System.out.println("Initial collection value:" + list);
Collections.addAll(list, "Servlet", "JSP");
System.out.println("After adding elements collection value:" + list);
String[] strArr = {"C#", ".Net"};
Collections.addAll(list, strArr);
System.out.println("After adding array collection value:" + list);
System.out.println("-----------Collection max()-----------");
List<Integer> list2 = new ArrayList<Integer>();
list2.add(46);
list2.add(67);
list2.add(24);
list2.add(16);
list2.add(8);
list2.add(12);
System.out.println("Value of maximum element from the collection: "+Collections.max(list2));
}
}
|
li624120638/EasySa
|
framework/core/src/easysa_eventbus.cpp
|
<reponame>li624120638/EasySa
/*************************************************************************
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*************************************************************************/
#include "easysa_eventbus.hpp"
#include <list>
#include <memory>
#include <thread>
#include <utility>
#define GLOG_NO_ABBREVIATED_SEVERITIES
#include <glog/logging.h>
#include "easysa_pipeline.hpp"
namespace easysa {
EventBus::~EventBus() {
Stop();
}
bool EventBus::IsRunning() {
return running_.load();
}
bool EventBus::Start() {
running_.store(true);
event_thread_ = std::thread(&EventBus::EventLoop, this);
return true;
}
void EventBus::Stop() {
if (IsRunning()) {
running_.store(false);
if (event_thread_.joinable()) {
event_thread_.join();
}
}
}
// @return The number of bus watchers that has been added to this event bus.
uint32_t EventBus::AddBusWatch(BusWatcher func) {
std::lock_guard<std::mutex> lk(watcher_mtx_);
bus_watchers_.push_front(func);
return bus_watchers_.size(); // disable warning
}
void EventBus::ClearAllWatchers() {
std::lock_guard<std::mutex> lk(watcher_mtx_);
bus_watchers_.clear();
}
const std::list<BusWatcher>& EventBus::GetBusWatchers() const {
return bus_watchers_;
}
bool EventBus::PostEvent(Event event) {
if (!running_.load()) {
LOG(WARNING) << "[core]:" << "Post event failed, pipeline not running";
return false;
}
// LOG(INFO) << "[core]:" << "Recieve Event from [" << event.module->GetName() << "] :" << event.message;
queue_.Push(event);
#ifdef UNIT_TEST
if (unit_test) {
test_eventq_.Push(event);
unit_test = false;
}
#endif
return true;
}
Event EventBus::PollEvent() {
Event event;
event.type = EVENT_INVALID;
while (running_.load()) {
if (queue_.WaitAndTryPop(event, std::chrono::milliseconds(100))) {
break;
}
}
if (!running_.load()) event.type = EVENT_STOP;
return event;
}
void EventBus::EventLoop() {
const std::list<BusWatcher>& kWatchers = GetBusWatchers();
EventHandleFlag flag = EVENT_HANDLE_NULL;
// SetThreadName("cn-EventLoop", pthread_self());
// start loop
while (IsRunning()) {
Event event = PollEvent();
if (event.type == EVENT_INVALID) {
LOG(INFO) << "[core]:" << "[EventLoop] event type is invalid";
break;
}
else if (event.type == EVENT_STOP) {
LOG(INFO) << "[core]:" << "[EventLoop] Get stop event";
break;
}
std::unique_lock<std::mutex> lk(watcher_mtx_);
for (auto& watcher : kWatchers) {
flag = watcher(event);
if (flag == EVENT_HANDLE_INTERCEPTION || flag == EVENT_HANDLE_STOP) {
break;
}
}
if (flag == EVENT_HANDLE_STOP) {
break;
}
}
LOG(INFO) << "[core]:" << "Event bus exit.";
}
#ifdef UNIT_TEST
Event EventBus::PollEventToTest() {
Event event;
event.type = EVENT_INVALID;
while (running_.load()) {
if (test_eventq_.WaitAndTryPop(event, std::chrono::milliseconds(100))) {
break;
}
}
if (!running_.load()) event.type = EVENT_STOP;
return event;
}
#endif
} // namespace easysa
|
anshulverma/ghost
|
web/src/main/java/com/mystique/ghost/web/GhostResponseBuilder.java
|
package com.mystique.ghost.web;
import com.mystique.ghost.core.model.CharacterContext;
/**
* @author mystique
*/
public class GhostResponseBuilder {
private CharacterContext characterContext;
private String prefix;
public GhostResponseBuilder setCharacterContext(CharacterContext characterContext) {
this.characterContext = characterContext;
return this;
}
public GhostResponseBuilder setPrefix(String prefix) {
this.prefix = prefix;
return this;
}
public GhostResponse build() {
GhostResponseStatus status = GhostResponseStatus.SUCCESS;
if (characterContext == CharacterContext.INVALID) {
status = GhostResponseStatus.INVALID;
} else if (characterContext == CharacterContext.NULL) {
status = GhostResponseStatus.PREFIX_COMPLETE;
} else if (characterContext.isLeaf()) {
status = GhostResponseStatus.COMPLETE;
}
return new GhostResponse(prefix, characterContext.getValue(), status);
}
}
|
alexey-anufriev/intellij-community
|
platform/platform-api/src/com/intellij/openapi/options/colors/ColorDescriptor.java
|
<reponame>alexey-anufriev/intellij-community
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.options.colors;
import com.intellij.openapi.editor.colors.ColorKey;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import java.util.function.Supplier;
/**
* Describes a color which can be configured in a custom colors and fonts page.
*
* @see ColorSettingsPage#getColorDescriptors()
*/
public final class ColorDescriptor extends AbstractKeyDescriptor<ColorKey> {
public static final ColorDescriptor[] EMPTY_ARRAY = new ColorDescriptor[0];
public enum Kind {
BACKGROUND,
FOREGROUND,
BACKGROUND_WITH_TRANSPARENCY,
FOREGROUND_WITH_TRANSPARENCY;
public boolean isBackground() {
return this == BACKGROUND || this == BACKGROUND_WITH_TRANSPARENCY;
}
public boolean isForeground() {
return this == FOREGROUND || this == FOREGROUND_WITH_TRANSPARENCY;
}
public boolean isWithTransparency() {
return this == FOREGROUND_WITH_TRANSPARENCY || this == BACKGROUND_WITH_TRANSPARENCY;
}
}
private final Kind myKind;
/** Please use {@link #ColorDescriptor(Supplier, ColorKey, Kind)} instead. */
public ColorDescriptor(@NotNull @Nls(capitalization = Nls.Capitalization.Sentence) String displayName,
@NotNull ColorKey key,
@NotNull Kind kind) {
this(new StaticSupplier(displayName), key, kind);
}
/**
* Creates a color descriptor with the specified name and color key.
*
* @param displayName the name of the color shown in the colors list.
* @param key the color key for which the color is specified.
* @param kind the type of color corresponding to the color key (foreground or background).
*/
public ColorDescriptor(@NotNull Supplier<@Nls(capitalization = Nls.Capitalization.Sentence) String> displayName,
@NotNull ColorKey key,
@NotNull Kind kind) {
super(displayName, key);
myKind = kind;
}
/**
* Returns the type of color corresponding to the color key (foreground or background).
*/
public @NotNull Kind getKind() {
return myKind;
}
@SuppressWarnings("RedundantMethodOverride") // binary compatibility
@Override
public @NotNull ColorKey getKey() {
return super.getKey();
}
}
|
matthewfaw/kanye_game_308
|
src/views/scenes/UltralightBeamScene.java
|
package views.scenes;
import utils.PictureNames;
import views.elements.background.BackgroundImage;
public class UltralightBeamScene extends GameScene {
private static final String BACKGROUND_IMAGE_NAME = PictureNames.Bound;
/**
* Creates an empty scene with Kanye and Kim
* @param aWidth
* @param aHeight
*/
public UltralightBeamScene(int aWidth, int aHeight)
{
super();
BackgroundImage backgroundImage = new BackgroundImage(aWidth, aHeight, BACKGROUND_IMAGE_NAME);
fRoot.getChildren().add(backgroundImage.getRoot());
}
}
|
angry-tony/syndesis-integration
|
app/runtime/model/src/main/java/io/syndesis/integration/model/YamlHelpers.java
|
<filename>app/runtime/model/src/main/java/io/syndesis/integration/model/YamlHelpers.java
/*
* Copyright (C) 2016 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.syndesis.integration.model;
import java.io.IOException;
import java.io.InputStream;
import java.util.ServiceLoader;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.databind.jsontype.NamedType;
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
import com.fasterxml.jackson.dataformat.yaml.YAMLGenerator;
import com.fasterxml.jackson.datatype.jdk8.Jdk8Module;
import io.syndesis.integration.model.steps.Step;
public final class YamlHelpers {
private YamlHelpers() {
}
public static ObjectMapper createObjectMapper() {
final YAMLFactory yamlFactory = new YAMLFactory()
.configure(YAMLGenerator.Feature.USE_NATIVE_TYPE_ID, false)
.configure(YAMLGenerator.Feature.MINIMIZE_QUOTES, true)
.configure(YAMLGenerator.Feature.ALWAYS_QUOTE_NUMBERS_AS_STRINGS, true)
.configure(YAMLGenerator.Feature.USE_NATIVE_TYPE_ID, false);
ObjectMapper mapper = new ObjectMapper(yamlFactory)
.registerModule(new Jdk8Module())
.setSerializationInclusion(JsonInclude.Include.NON_EMPTY)
.enable(SerializationFeature.INDENT_OUTPUT)
.disable(SerializationFeature.WRITE_NULL_MAP_VALUES);
for (Step step : ServiceLoader.load(Step.class, YamlHelpers.class.getClassLoader())) {
mapper.registerSubtypes(new NamedType(step.getClass(), step.getKind()));
}
return mapper;
}
public static SyndesisModel load(InputStream source) throws IOException {
return YamlHelpers.createObjectMapper().readValue(source, SyndesisModel.class);
}
}
|
waikato-datamining/bynning
|
src/wai/bynning/_BinItem.py
|
from typing import Generic, Iterator, Iterable, TypeVar
from ._typing import KeyType
from ._Binnable import Binnable
from .extraction import Extractor
# The type of the payload of the bin-item (generally non-binnable)
PayloadType = TypeVar("PayloadType")
class BinItem(Binnable[KeyType], Generic[KeyType, PayloadType]):
"""
Wrapper class for objects which makes them binnable. Also provides the capability
of respecifying the bin-key of another binnable, by wrapping it in this class with
an extractor.
"""
def __init__(self, bin_key: KeyType, item: PayloadType):
self._bin_key: KeyType = bin_key
self._payload: PayloadType = item
@property
def bin_key(self) -> KeyType:
return self._bin_key
@property
def payload(self) -> PayloadType:
"""
Gets the item that is the payload for this bin item.
"""
return self._payload
@property
def payload_is_binnable(self) -> bool:
"""
Checks if the payload of this bin-item is binnable (i.e. this
bin-item is being used to modify the bin-key of the payload).
"""
return isinstance(self._payload, Binnable)
def __str__(self) -> str:
return f"({self._bin_key}): {self._payload}"
@staticmethod
def extract_from(extractor: Extractor[PayloadType, KeyType],
items: Iterable[PayloadType]) -> Iterator['BinItem[KeyType, PayloadType]']:
"""
Extracts a bin-item from each given item using the given key-extractor.
:param extractor: The extractor to use to extract the bin-key,
or a constant bin-key to use for all items.
:param items: The items to create bin-items for.
:return: The bin-items.
"""
return (BinItem(extractor.extract(item), item) for item in items)
@staticmethod
def unwrapping_iterator(wrapped_iterator: Iterator['BinItem[KeyType, PayloadType]']) \
-> Iterator[PayloadType]:
"""
Returns an iterator over the payloads of the bin-items in the given iterator.
:param wrapped_iterator: An iterator over bin-items.
"""
return (item.payload for item in wrapped_iterator)
|
vladaspasic/fireant
|
fireant/__init__.py
|
<reponame>vladaspasic/fireant
# noinspection PyUnresolvedReferences
from .database import *
# noinspection PyUnresolvedReferences
from .slicer import *
# noinspection PyUnresolvedReferences
from .slicer.widgets import *
__version__ = '2.0.1'
|
AlexanderKosianchuk/luche-front
|
src/components/supervision/realtime-chart/RealtimeChart.js
|
import './realtime-chart.sass';
import React, { Component } from 'react';
import PropTypes from 'prop-types';
import { bindActionCreators } from 'redux';
import { connect } from 'react-redux';
import { Line, defaults } from 'react-chartjs-2';
import transmit from 'actions/transmit';
const LOCKED_SCALE_TIMEOUT = 200;
class RealtimeChart extends Component {
constructor(props) {
super(props);
defaults.global.animation = false;
this.isScaleAllowed = { min: true, max: true };
this.yAxesMin = this.props.param.minValue;
this.yAxesMax = this.props.param.maxValue;
}
getData() {
return {
labels: this.props.timeline.length > 0 ? this.props.timeline : [0],
datasets: [{
fill: false,
label: this.props.param.code,
backgroundColor: '#' + this.props.param.color,
borderColor: '#' + this.props.param.color,
data: this.props.line
}]
};
}
lockScale(val) {
if (this.isScaleAllowed[val] === false) {
return;
}
this.isScaleAllowed[val] = false;
setTimeout(() => {
this.isScaleAllowed[val] = true;
}, LOCKED_SCALE_TIMEOUT);
}
calcAxesVal(side, current) {
let region = this.props.param.minValue + this.props.param.maxValue;
let minOnLine = (this.props.line.length > 0)
? Math.min(...this.props.line) : this.props.param.minValue;
let maxOnLine = (this.props.line.length > 0)
? Math.max(...this.props.line) : this.props.param.maxValue;
let axisValue = 0;
if (side === 'min') {
axisValue = Math[side](
this.props.param[side + 'Value'] - region * 0.2,
minOnLine - Math.abs((maxOnLine - minOnLine) * 0.2)
);
} else if (side === 'max') {
axisValue = Math[side](
this.props.param[side + 'Value'] + region * 0.2,
maxOnLine + Math.abs((maxOnLine - minOnLine) * 0.2)
);
}
if ((axisValue !== current)
&& this.isScaleAllowed[side]
) {
this.lockScale(side);
return axisValue;
}
return current;
}
getOptions() {
let options = {
scales: {
xAxes: [{
type: 'time',
time: {
unit: 'second'
}
}]
},
legend: {
onClick: (e) => e.stopPropagation()
}
};
if ((this.props.param.minValue !== 0) && (this.props.param.maxValue !== 1)) {
this.yAxesMin = this.calcAxesVal('min', this.yAxesMin);
this.yAxesMax = this.calcAxesVal('max', this.yAxesMax);
options.scales.yAxes = [{
display: true,
stacked: true,
ticks: {
min: this.yAxesMin,
max: this.yAxesMax
}
}]
}
return options;
}
handleClick() {
this.props.transmit('CHANGE_SUPERVISION_CHARTS_CHECKSTATE', {
...this.props.param,
...{ state: false }
});
}
render() {
return (
<div className='supervision-realtime-chart'>
<Line
height={ this.props.isBinary ? 100 : 280 }
data={ this.getData() }
options={ this.getOptions() }
redraw={ true }
/>
<div
className='supervision-realtime-chart__checkbox'
onClick={ this.handleClick.bind(this) }
>
<span className='glyphicon glyphicon-remove'></span>
</div>
</div>
);
}
}
RealtimeChart.propTypes = {
param: PropTypes.object.isRequired,
line: PropTypes.array.isRequired,
timeline: PropTypes.array.isRequired,
transmit: PropTypes.func.isRequired
};
function mapStateToProps(state) {
return {};
}
function mapDispatchToProps(dispatch) {
return {
transmit: bindActionCreators(transmit, dispatch),
}
}
export default connect(mapStateToProps, mapDispatchToProps)(RealtimeChart);
|
ericoporto/ags-old
|
libsrc/allegro/include/allegro.h
|
<filename>libsrc/allegro/include/allegro.h
/* ______ ___ ___
* /\ _ \ /\_ \ /\_ \
* \ \ \L\ \\//\ \ \//\ \ __ __ _ __ ___
* \ \ __ \ \ \ \ \ \ \ /'__`\ /'_ `\/\`'__\/ __`\
* \ \ \/\ \ \_\ \_ \_\ \_/\ __//\ \L\ \ \ \//\ \L\ \
* \ \_\ \_\/\____\/\____\ \____\ \____ \ \_\\ \____/
* \/_/\/_/\/____/\/____/\/____/\/___L\ \/_/ \/___/
* /\____/
* \_/__/
*
* Main header file for the entire Allegro library.
* (separate modules can be included from the allegro/ directory)
*
* By <NAME>.
*
* <NAME> split the original allegro.h into separate headers.
*
* See readme.txt for copyright information.
*/
#ifndef ALLEGRO_H
#define ALLEGRO_H
#include "allegro/base.h"
#include "allegro/system.h"
#include "allegro/debug.h"
#include "allegro/unicode.h"
#include "allegro/palette.h"
#include "allegro/gfx.h"
#include "allegro/color.h"
#include "allegro/draw.h"
#include "allegro/fli.h"
#include "allegro/file.h"
#include "allegro/datafile.h"
#include "allegro/fixed.h"
#include "allegro/fmaths.h"
#ifdef ALLEGRO_EXTRA_HEADER
#include ALLEGRO_EXTRA_HEADER
#endif
#endif /* ifndef ALLEGRO_H */
|
cadentic/store_front
|
email-builder/src/store/mutations.js
|
// https://vuex.vuejs.org/en/mutations.html
export default {
SET_SECTIONS(state, data) {
state.sections = data;
},
SET_COLORS(state, data) {
state.colors = data;
}
}
|
lecho/conference-app
|
app/src/main/java/com/github/lecho/mobilization/ui/loader/EventViewDataLoader.java
|
package com.github.lecho.mobilization.ui.loader;
import android.content.Context;
import android.util.Log;
import com.github.lecho.mobilization.BuildConfig;
import com.github.lecho.mobilization.util.Optional;
import com.github.lecho.mobilization.viewmodel.EventViewModel;
/**
* Created by Leszek on 2015-09-03.
*/
public class EventViewDataLoader extends BaseRealmLoader<Optional<EventViewModel>> {
private static final String TAG = EventViewDataLoader.class.getSimpleName();
public static EventViewDataLoader getLoader(Context context) {
return new EventViewDataLoader(context);
}
private EventViewDataLoader(Context context) {
super(context);
}
@Override
public Optional<EventViewModel> loadInBackground() {
if (BuildConfig.DEBUG) {
Log.d(TAG, "Loading event view data");
}
Optional<EventViewModel> newData = realmFacade.loadEvent();
return newData;
}
}
|
joshrose/audacity
|
src/Journal.cpp
|
/**********************************************************************
Audacity: A Digital Audio Editor
Journal.cpp
<NAME>
*******************************************************************//*!
\namespace Journal
\brief Facilities for recording and playback of sequences of user interaction
*//*******************************************************************/
#include "Journal.h"
#include "JournalOutput.h"
#include "JournalRegistry.h"
#include <algorithm>
#include <wx/app.h>
#include <wx/filename.h>
#include "MemoryX.h"
#include "Prefs.h"
namespace Journal {
namespace {
wxString sFileNameIn;
wxTextFile sFileIn;
wxString sLine;
// Invariant: the input file has not been opened, or else sLineNumber counts
// the number of lines consumed by the tokenizer
int sLineNumber = -1;
BoolSetting JournalEnabled{ L"/Journal/Enabled", false };
inline void NextIn()
{
if ( !sFileIn.Eof() ) {
sLine = sFileIn.GetNextLine();
++sLineNumber;
}
}
wxArrayStringEx PeekTokens()
{
wxArrayStringEx tokens;
if ( Journal::IsReplaying() )
for ( ; !sFileIn.Eof(); NextIn() ) {
if ( sLine.StartsWith( CommentCharacter ) )
continue;
tokens = wxSplit( sLine, SeparatorCharacter, EscapeCharacter );
if ( tokens.empty() )
// Ignore blank lines
continue;
break;
}
return tokens;
}
constexpr auto VersionToken = wxT("Version");
// Numbers identifying the journal format version
int journalVersionNumbers[] = {
1
};
wxString VersionString()
{
wxString result;
for ( auto number : journalVersionNumbers ) {
auto str = wxString::Format( "%d", number );
result += ( result.empty() ? str : ( '.' + str ) );
}
return result;
}
//! True if value is an acceptable journal version number to be rerun
bool VersionCheck( const wxString &value )
{
auto strings = wxSplit( value, '.' );
std::vector<int> numbers;
for ( auto &string : strings ) {
long value;
if ( !string.ToCLong( &value ) )
return false;
numbers.push_back( value );
}
// OK if the static version number is not less than the given value
// Maybe in the future there will be a compatibility break
return !std::lexicographical_compare(
std::begin( journalVersionNumbers ), std::end( journalVersionNumbers ),
numbers.begin(), numbers.end() );
}
}
SyncException::SyncException()
{
// If the exception is ever constructed, cause nonzero program exit code
SetError();
}
SyncException::~SyncException() {}
void SyncException::DelayedHandlerAction()
{
// Simulate the application Exit menu item
wxCommandEvent evt{ wxEVT_MENU, wxID_EXIT };
wxTheApp->AddPendingEvent( evt );
}
bool RecordEnabled()
{
return JournalEnabled.Read();
}
bool SetRecordEnabled(bool value)
{
auto result = JournalEnabled.Write(value);
gPrefs->Flush();
return result;
}
bool IsReplaying()
{
return sFileIn.IsOpened();
}
void SetInputFileName(const wxString &path)
{
sFileNameIn = path;
}
bool Begin( const FilePath &dataDir )
{
if ( !GetError() && !sFileNameIn.empty() ) {
wxFileName fName{ sFileNameIn };
fName.MakeAbsolute( dataDir );
const auto path = fName.GetFullPath();
sFileIn.Open( path );
if ( !sFileIn.IsOpened() )
SetError();
else {
sLine = sFileIn.GetFirstLine();
sLineNumber = 0;
auto tokens = PeekTokens();
NextIn();
if( !(
tokens.size() == 2 &&
tokens[0] == VersionToken &&
VersionCheck( tokens[1] )
) )
SetError();
}
}
if ( !GetError() && RecordEnabled() ) {
wxFileName fName{ dataDir, "journal", "txt" };
const auto path = fName.GetFullPath();
if ( !OpenOut( path ) )
SetError();
else {
// Generate a header
Comment( wxString::Format(
wxT("Journal recorded by %s on %s")
, wxGetUserName()
, wxDateTime::Now().Format()
) );
Output({ VersionToken, VersionString() });
}
}
// Call other registered initialization steps
for (auto &initializer : GetInitializers()) {
if (initializer && !initializer()) {
SetError();
break;
}
}
return !GetError();
}
wxArrayStringEx GetTokens()
{
auto result = PeekTokens();
if ( !result.empty() ) {
NextIn();
return result;
}
throw SyncException{};
}
bool Dispatch()
{
if ( GetError() )
// Don't repeatedly indicate error
// Do nothing
return false;
if ( !IsReplaying() )
return false;
// This will throw if no lines remain. A proper journal should exit the
// program before that happens.
auto words = GetTokens();
// Lookup dispatch function by the first field of the line
auto &table = GetDictionary();
auto &name = words[0];
auto iter = table.find( name );
if ( iter == table.end() )
throw SyncException{};
// Pass all the fields including the command name to the function
if ( !iter->second( words ) )
throw SyncException{};
return true;
}
void Sync( const wxString &string )
{
if ( IsRecording() || IsReplaying() ) {
if ( IsRecording() )
Output( string );
if ( IsReplaying() ) {
if ( sFileIn.Eof() || sLine != string )
throw SyncException{};
NextIn();
}
}
}
void Sync( const wxArrayString &strings )
{
if ( IsRecording() || IsReplaying() ) {
auto string = ::wxJoin( strings, SeparatorCharacter, EscapeCharacter );
Sync( string );
}
}
void Sync( std::initializer_list< const wxString > strings )
{
return Sync( wxArrayStringEx( strings ) );
}
int IfNotPlaying(
const wxString &string, const InteractiveAction &action )
{
// Special journal word
Sync(string);
// Then read or write the return value on another journal line
if ( IsReplaying() ) {
auto tokens = GetTokens();
if ( tokens.size() == 1 ) {
try {
std::wstring str{ tokens[0].wc_str() };
size_t length = 0;
auto result = std::stoi(str, &length);
if (length == str.length()) {
if (IsRecording())
Journal::Output( std::to_wstring(result) );
return result;
}
}
catch ( const std::exception& ) {}
}
throw SyncException{};
}
else {
auto result = action ? action() : 0;
if ( IsRecording() )
Output( std::to_wstring( result ) );
return result;
}
}
int GetExitCode()
{
// Unconsumed commands remaining in the input file is also an error condition.
if( !GetError() && !PeekTokens().empty() ) {
NextIn();
SetError();
}
if ( GetError() ) {
// Return nonzero
// Returning the (1-based) line number at which the script failed is a
// simple way to communicate that information to the test driver script.
return sLineNumber ? sLineNumber : -1;
}
// Return zero to mean all is well, the convention for command-line tools
return 0;
}
}
|
samcom12/anuga_core
|
anuga/utilities/tests/test_file_utils.py
|
<gh_stars>100-1000
import unittest
import tempfile
import os
import shutil
import sys
from anuga.utilities.file_utils import copy_code_files, get_all_swwfiles
from anuga.utilities.file_utils import del_dir
from anuga.utilities.sww_merge import sww_merge, _sww_merge
class Test_FileUtils(unittest.TestCase):
def test_copy_code_files(self):
"""test that the copy_code_files() function is sane.
"""
def create_file(f):
fd = open(f, 'w')
fd.write('%s\n' % f)
fd.close()
# create working directories and test files
work_dir = tempfile.mkdtemp()
dst_dir = tempfile.mkdtemp(dir=work_dir)
src_dir = tempfile.mkdtemp(dir=work_dir)
f1 = 'file1'
filename1 = os.path.join(src_dir, f1)
create_file(filename1)
f2 = 'file2'
filename2 = os.path.join(src_dir, f2)
create_file(filename2)
f3 = 'file3'
filename3 = os.path.join(src_dir, f3)
create_file(filename3)
f4 = 'file4'
filename4 = os.path.join(src_dir, f4)
create_file(filename4)
f5 = 'file5'
filename5 = os.path.join(src_dir, f5)
create_file(filename5)
# exercise the copy function
copy_code_files(dst_dir, filename1)
copy_code_files(dst_dir, filename1, filename2)
copy_code_files(dst_dir, (filename4, filename5, filename3))
# test that files were actually copied
self.assertTrue(os.access(os.path.join(dst_dir, f1), os.F_OK))
self.assertTrue(os.access(os.path.join(dst_dir, f2), os.F_OK))
self.assertTrue(os.access(os.path.join(dst_dir, f3), os.F_OK))
self.assertTrue(os.access(os.path.join(dst_dir, f4), os.F_OK))
self.assertTrue(os.access(os.path.join(dst_dir, f5), os.F_OK))
# clean up
shutil.rmtree(work_dir)
def test_get_all_swwfiles(self):
try:
swwfiles = get_all_swwfiles('', 'test.txt') # Invalid
except IOError:
pass
else:
raise Exception('Should have raised exception')
def test_get_all_swwfiles1(self):
temp_dir = tempfile.mkdtemp('', 'sww_test')
filename0 = tempfile.mktemp('.sww', 'test', temp_dir)
filename1 = tempfile.mktemp('.sww', 'test', temp_dir)
filename2 = tempfile.mktemp('.sww', 'test', temp_dir)
filename3 = tempfile.mktemp('.sww', 'test', temp_dir)
# print'filename', filename0,filename1,filename2,filename3
fid0 = open(filename0, 'w')
fid1 = open(filename1, 'w')
fid2 = open(filename2, 'w')
fid3 = open(filename3, 'w')
fid0.write('hello')
fid1.write('hello')
fid2.write('hello')
fid3.write('hello')
fid0.close()
fid1.close()
fid2.close()
fid3.close()
dir, name0 = os.path.split(filename0)
# print 'dir',dir,name0
iterate = get_all_swwfiles(dir, 'test')
del_dir(temp_dir)
# removeall(temp_dir)
_, name0 = os.path.split(filename0)
# print'name0',name0[:-4],iterate[0]
_, name1 = os.path.split(filename1)
_, name2 = os.path.split(filename2)
_, name3 = os.path.split(filename3)
assert name0[:-4] in iterate
assert name1[:-4] in iterate
assert name2[:-4] in iterate
assert name3[:-4] in iterate
assert len(iterate) == 4
def test_merge_swwfiles(self):
from anuga.abstract_2d_finite_volumes.mesh_factory import rectangular, \
rectangular_cross
from anuga.shallow_water.shallow_water_domain import Domain
from anuga.file.sww import SWW_file
from anuga.abstract_2d_finite_volumes.generic_boundary_conditions import \
Dirichlet_boundary
Bd = Dirichlet_boundary([0.5, 0., 0.])
# Create shallow water domain
domain = Domain(*rectangular_cross(2, 2))
domain.set_name('test1')
domain.set_quantity('elevation', 2)
domain.set_quantity('stage', 5)
domain.set_boundary({'left': Bd, 'right': Bd, 'top': Bd, 'bottom': Bd})
for t in domain.evolve(yieldstep=0.5, finaltime=1):
pass
domain = Domain(*rectangular(3, 3))
domain.set_name('test2')
domain.set_quantity('elevation', 3)
domain.set_quantity('stage', 50)
domain.set_boundary({'left': Bd, 'right': Bd, 'top': Bd, 'bottom': Bd})
for t in domain.evolve(yieldstep=0.5, finaltime=1):
pass
outfile = 'test_out.sww'
_sww_merge(['test1.sww', 'test2.sww'], outfile)
self.assertTrue(os.access(outfile, os.F_OK))
# remove temp files
if not sys.platform == 'win32':
os.remove('test1.sww')
os.remove('test2.sww')
os.remove(outfile)
# -------------------------------------------------------------
if __name__ == "__main__":
suite = unittest.makeSuite(Test_FileUtils, 'test')
runner = unittest.TextTestRunner() # verbosity=2)
runner.run(suite)
|
sheedy/rizzo
|
app/assets/javascripts/lib/core/user_feed.js
|
// ------------------------------------------------------------------------------
//
// User Feed
//
// ------------------------------------------------------------------------------
define([ "jquery", "lib/utils/template", "lib/components/tabs", "lib/core/timeago" ], function($, Template, Tabs) {
"use strict";
var defaults = {
feedUrl: "https://www.lonelyplanet.com/thorntree/users/feed",
feedSelector: ".js-user-feed",
feedItemSelector: ".js-user-feed-item",
targetLinkSelector: ".js-user-feed-item-target-link",
activitiesSelector: "#js-user-feed-activities",
messagesSelector: "#js-user-feed-messages",
footerSelector: ".js-user-feed-footer",
unreadFeedNumberSelector: ".js-unread-feed-number",
unreadActivitiesNumberSelector: ".js-unread-activities-number",
unreadMessagesNumberSelector: ".js-unread-messages-number",
newFeedHighlightClass: "is-highlighted",
initialHighlightedActivitiesNumber: 0,
maxFeedActivities: 5,
fetchInterval: 15000
};
function UserFeed(args) {
this.config = $.extend({}, defaults, args);
this.$activities = $(this.config.activitiesSelector);
this.$messages = $(this.config.messagesSelector);
this.$footer = $(this.config.footerSelector);
this.$unreadActivitiesIndicator = $(this.config.unreadActivitiesNumberSelector);
this.$unreadMessagesIndicator = $(this.config.unreadMessagesNumberSelector);
this.$unreadFeedIndicator = $(this.config.unreadFeedNumberSelector);
this.currentActivities;
this.highlightedActivitiesNumber = this.config.initialHighlightedActivitiesNumber;
this.init();
}
// ------------------------------------------------------------------------------
// Initialise
// ------------------------------------------------------------------------------
UserFeed.prototype.init = function() {
this._tabsInstance = new Tabs({ selector: this.config.feedSelector });
this._fetchFeed();
};
// -------------------------------------------------------------------------
// Private Functions
// -------------------------------------------------------------------------
UserFeed.prototype._bindLinks = function() {
var _this = this;
$(this.config.feedSelector + " " + this.config.feedItemSelector).off("click").on("click", function() {
_this._goToUrl($(this).find(_this.config.targetLinkSelector).attr("href"));
});
};
UserFeed.prototype._goToUrl = function(url) {
window.location.href = url;
};
UserFeed.prototype._updateUnreadFeedIndicator = function(newFeedItemsNumber) {
if (newFeedItemsNumber > 0) {
this.$unreadFeedIndicator.text(newFeedItemsNumber).removeClass("is-hidden");
} else {
this.$unreadFeedIndicator.addClass("is-hidden");
}
};
UserFeed.prototype._createUserActivities = function(feedActivities) {
var _this = this,
activitiesHtml = "",
i = 0;
// Concatenate activities
while ((i < feedActivities.length) && (i < this.config.maxFeedActivities)) {
activitiesHtml += feedActivities[i].text;
i++;
}
// Update activities list
this.$activities.html(activitiesHtml);
// Bind target links to whole item
this._bindLinks();
// Highlight new activities
this.$activities
.children()
.slice(0, _this.highlightedActivitiesNumber)
.addClass(_this.config.newFeedHighlightClass);
// Update new activities number
this.$unreadActivitiesIndicator.text(_this.highlightedActivitiesNumber);
};
UserFeed.prototype._createUserMessages = function(feedMessages, newMessagesNumber) {
var messagesHtml = "",
i = 0;
// Concatenate messages
while ((i < feedMessages.length) && (i < this.config.maxFeedActivities)) {
if (!feedMessages[i]["read?"]) {
// Add highlight class if message has unread flag
messagesHtml += $(feedMessages[i].text).addClass(this.config.newFeedHighlightClass)[0].outerHTML;
} else {
messagesHtml += feedMessages[i].text;
}
i++;
}
// Update messages list
this.$messages.html(messagesHtml).append(this.$footer);
// Bind target links to whole item
this._bindLinks();
// Update new messages number
this.$unreadMessagesIndicator.text(newMessagesNumber);
};
UserFeed.prototype._getActivityNumber = function(feed) {
if (!feed.activities) { return; }
var newActivitiesCount = 0,
i = 0;
for (i; i < feed.activities.length; i++) {
this._isNewActivity(feed.activities[i].timestamp) && newActivitiesCount++;
}
return newActivitiesCount;
};
UserFeed.prototype._isNewActivity = function(timestamp) {
for (var j = 0; j < this.currentActivities.length; j++) {
if ( timestamp == this.currentActivities[j].timestamp ) {
return false;
}
}
return true;
};
UserFeed.prototype._updateActivities = function(feed) {
if (this.currentActivities) {
var newActivitiesNumber = this._getActivityNumber(feed);
if (this.highlightedActivitiesNumber < newActivitiesNumber) {
this.highlightedActivitiesNumber = newActivitiesNumber;
}
newActivitiesNumber && this._createUserActivities(feed.activities);
} else {
// Create activities list
feed.activities && feed.activities.length && this._createUserActivities(feed.activities, feed.activities.length);
this.currentActivities = feed.activities;
}
};
UserFeed.prototype._updateMessages = function(feed) {
var newMessagesNumber = feed.unreadMessagesCount;
feed.messages && feed.messages.length && this._createUserMessages(feed.messages, newMessagesNumber);
this._updateUnreadFeedIndicator(this.highlightedActivitiesNumber + newMessagesNumber);
// Update timeago for feed content only
$(this.config.feedSelector + " time.timeago").timeago();
};
UserFeed.prototype._updateFeed = function(fetchedFeed) {
if (!fetchedFeed) { return; }
this._updateActivities(fetchedFeed);
this._updateMessages(fetchedFeed);
// Init fetch loop
setTimeout(this._fetchFeed.bind(this), this.config.fetchInterval);
};
UserFeed.prototype._fetchFeed = function() {
$.ajax({
url: this.config.feedUrl,
cache: false,
dataType: "json",
success: this._updateFeed.bind(this),
error: this._updateFeed.bind(this)
});
};
return UserFeed;
});
|
WalkerOfDarkness/IV-5
|
commands/rob.js
|
const balanceSchema = require('../models/balance-schema')
const boosterSchema = require('../models/boost-schema')
module.exports = {
name: 'rob',
description: 'Rob a user. Be carefull though it could come back to bite you',
category: 'Economy',
globalCooldown: '3m',
slash: true,
guildOnly: true,
options: [
{
name: 'user',
description: 'The user to rob',
type: 'USER',
required: true,
},
],
callback: async({interaction}) => {
const blacklistSchema = require('../models/blacklist-schema')
const blacklist = await blacklistSchema.findOne({userId: interaction.user.id})
if (blacklist) {
return
}
const maintenanceSchema = require('../models/mantenance-schema')
const maintenance = await maintenanceSchema.findOne({maintenance: true})
if (maintenance && interaction.user.id !== '804265795835265034') {
return
}
const target = interaction.options.getUser('user')
const theif = interaction.user
const theifBal = await balanceSchema.findOne({guildId: interaction.guild.id, userId: theif.id})
const targetBal = await balanceSchema.findOne({guildId: interaction.guild.id, userId: target.id})
const boosterTheif = await boosterSchema.findOne({guildId: interaction.guild.id, userId: theif.id, type: 'ar'})
const boosterTarget = await boosterSchema.findOne({guildId: interaction.guild.id, userId: target.id, type: 'ar'})
const robLuck = Math.round(Math.random())
const takeFromTheif = Math.round(Math.random() * Math.round((30 / 100) * theifBal.amount))
const takeFromTarget = Math.round(Math.random() * Math.round((40 / 100) * targetBal.amount))
if (boosterTarget) return `That user has an anti-rob booster active lol`
if (boosterTheif) return `Don't try and rob others when they can't rob you`
if (theif.id === target.id) return `You can't rob yourself`
if (theifBal.amount < 3000) return `You need at least φ\`3000\` to rob someone`
if (targetBal.amount < 3000) return `Its not nice to rob the poor`
if (robLuck === 0) {
targetBal.amount += takeFromTheif
targetBal.save()
theifBal.amount -= takeFromTheif
theifBal.save()
target.send({content: `${theif} tried to rob you but failed. They were made to pay φ\`${takeFromTheif}\` so yay for you.`})
return `You failed to rob ${target}. You lost φ\`${takeFromTheif}\``
} else {
targetBal.amount -= takeFromTarget
targetBal.save()
theifBal.amount += takeFromTarget
theifBal.save()
target.send({content: `${theif} robbed you. They managed to steal φ\`${takeFromTarget}\`.`})
return `You successfully robbed ${target}. You stole φ\`${takeFromTarget}\``
}
}
}
|
Minecodecraft/LeetCode-Minecode
|
Top Interview Questions/334. Increasing Triplet Subsequence/main.cpp
|
//
// main.cpp
// 334. Increasing Triplet Subsequence
//
// Created by 边俊林 on 2019/10/31.
// Copyright © 2019 Minecode.Link. All rights reserved.
//
#include <map>
#include <set>
#include <queue>
#include <string>
#include <stack>
#include <vector>
#include <cstdio>
#include <numeric>
#include <cstdlib>
#include <utility>
#include <iostream>
#include <algorithm>
#include <unordered_map>
#include <unordered_set>
using namespace std;
/// Solution:
//
// Solution 1: Best solution 😁
/*
class Solution {
public:
bool increasingTriplet(vector<int>& nums) {
int a = INT_MAX, b = INT_MAX;
for (int i = 0; i < nums.size(); ++i) {
if (a >= nums[i]) a = nums[i]; // must have `equal`
else if (b >= nums[i]) b = nums[i]; // must have `equal`
else return true;
}
return false;
}
};
*/
// Solution 2: Not an available solution, but it's still claver
// The problem ask me to solve in O(1) space complexity, but in fact this is O(n)
class Solution {
public:
bool increasingTriplet(vector<int>& nums) {
if (nums.size() < 3) return false;
vector<int> pre(nums.size(), nums.front()), nxt(nums.size(), nums.back());
for (int i = 1; i < nums.size(); ++i)
pre[i] = min(pre[i-1], nums[i]);
for (int i = nums.size()-2; i >= 0; --i)
nxt[i] = max(nxt[i+1], nums[i]);
for (int i = 1; i < nums.size()-1; ++i)
if (nums[i] > pre[i] && nums[i] < nxt[i])
return true;
return false;
}
};
int main() {
Solution sol = Solution();
vector<int> nums = {
// 1, 2, 3, 4, 5
5, 4, 3, 2, 1
// 1, 2, 1, 4 ,5
// 5,1,5,5,2,5,4
};
bool res = sol.increasingTriplet(nums);
cout << (res ? "true" : "false") << endl;
return 0;
}
|
rsghotra/AbdulBari
|
CPPMCQ/60.cpp
|
#include <iostream>
using namespace std;
/*
Concept: Consutrctor Delegation and Private Constructor
- Not All constructors can be private
- there must atleast be one public constructor
*/
class Base {
int _x;
Base(int x): _x{x} {cout << 1;}
public:
Base():Base(0) {cout << 0;}
};
int main() {
Base b;
return 0;
}
//O/P: 10
|
larshesel/mzbench
|
server/dashboard/js/components/TimelineElement.react.js
|
import React from 'react';
import moment from 'moment';
import RelativeDate from './RelativeDate.react';
import MZBenchActions from '../actions/MZBenchActions';
import Star from './Star.react';
import MZBenchRouter from '../utils/MZBenchRouter';
import GlobalStore from '../stores/GlobalStore';
import PropTypes from 'prop-types';
import {OverlayTrigger, Tooltip} from 'react-bootstrap';
class TimelineElement extends React.Component {
render() {
if (GlobalStore.isDashboardModeOn()) {
return this.renderDashboard();
} else {
return this.renderBench();
}
}
renderDashboard() {
let item = this.props.bench;
let isSelected = this.props.isSelected;
let cssClass = "bs bs-progress";
if (isSelected) {
cssClass += " bs-selected";
}
return (
<a href={`#/dashboard/${item.id}/overview`} className="bs-link">
<div className={cssClass}>
<h6 className="no-overflow">
#{item.id} {item.name}
</h6>
</div>
</a>
);
}
renderBench() {
let { bench, isSelected, duration } = this.props;
let cssClass = "bs bs-" + (bench.isRunning() ? "progress" : bench.status);
if (isSelected) {
cssClass += " bs-selected";
}
var tags = bench.tags.length <= 0 ? [] :
<div className="timeline-tags no-overflow">
{bench.tags.map(
(t, i) => {
return <span key={i}>
<span className={isSelected?"timeline-tag-link-selected":"timeline-tag-link"}
onClick={(e) => { return this._onTagClick(t, e) }} key={i}>{"#"+t}
</span>
</span>
})}
</div>;
let author = null;
if (bench.author != "anonymous") {
if (bench.author_name == "") {
author = (<div>by {bench.author}</div>);
} else {
author = (<div>by {bench.author_name}</div>);
}
}
const searchTooltip = <Tooltip id="search-tooltip">Search for similar benchmarks</Tooltip>;
return (
<a href={`#/bench/${bench.id}/overview`} className="bs-link">
<div className={cssClass}>
<h6 className="no-overflow">
<Star selected={bench.tags.indexOf("favorites") > -1} onClick={(v) => {
if (v == true) MZBenchActions.addBenchTag(bench.id, "favorites");
else MZBenchActions.removeBenchTag(bench.id, "favorites");
}}/>
#{bench.id} {bench.name}
{bench.isRunning() ? <span className="label">{bench.status}</span> : null}
<OverlayTrigger delay={200} placement="top" overlay={searchTooltip}>
<span className="search-bench-character glyphicon glyphicon-search" aria-hidden="true"
onClick={(e) => {
e.preventDefault();
MZBenchRouter.navigate("/timeline", {q: bench.name});
}}/>
</OverlayTrigger>
</h6>
{tags}
<div><i className="glyphicon glyphicon-calendar"></i> <RelativeDate date = {bench.create_time_client} /></div>
<div><i className="glyphicon glyphicon-time"></i> {moment.duration(duration).humanize()}</div>
{author}
</div>
</a>
);
}
_onTagClick(tag, event) {
event.preventDefault();
MZBenchRouter.navigate("/timeline", {q: "#"+tag});
}
_onClick() {
MZBenchActions.selectBenchById(this.props.bench.id);
}
}
TimelineElement.propTypes = {
bench: PropTypes.object.isRequired,
isSelected: PropTypes.bool
};
TimelineElement.defaultProps = {
isSelected: false
};
export default TimelineElement;
|
npocmaka/Windows-Server-2003
|
drivers/wdm/audio/sysaudio/notify.cpp
|
//---------------------------------------------------------------------------
//
// Module: notify.cpp
//
// Description:
//
//
//@@BEGIN_MSINTERNAL
// Development Team:
// <NAME>
//
// History: Date Author Comment
//
// To Do: Date Author Comment
//
//@@END_MSINTERNAL
//
// THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY
// KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR
// PURPOSE.
//
// Copyright (c) 1996-1999 Microsoft Corporation. All Rights Reserved.
//
//---------------------------------------------------------------------------
#include "common.h"
//
// Include safe string library for safe string manipulation.
//
#define STRSAFE_NO_DEPRECATE // Use safe and unsafe functions interchangeably.
#include "strsafe.h"
#define DEVICE_NAME_TAG L"\\\\?\\"
//---------------------------------------------------------------------------
//---------------------------------------------------------------------------
CONST GUID *apguidCategories[] = {
&KSCATEGORY_AUDIO,
&KSCATEGORY_AUDIO_GFX,
&KSCATEGORY_TOPOLOGY,
&KSCATEGORY_BRIDGE,
&KSCATEGORY_RENDER,
&KSCATEGORY_CAPTURE,
&KSCATEGORY_MIXER,
&KSCATEGORY_DATATRANSFORM,
&KSCATEGORY_ACOUSTIC_ECHO_CANCEL,
&KSCATEGORY_INTERFACETRANSFORM,
&KSCATEGORY_MEDIUMTRANSFORM,
&KSCATEGORY_DATACOMPRESSOR,
&KSCATEGORY_DATADECOMPRESSOR,
&KSCATEGORY_COMMUNICATIONSTRANSFORM,
&KSCATEGORY_SPLITTER,
&KSCATEGORY_AUDIO_SPLITTER,
&KSCATEGORY_SYNTHESIZER,
&KSCATEGORY_DRM_DESCRAMBLE,
&KSCATEGORY_MICROPHONE_ARRAY_PROCESSOR,
};
ULONG aulFilterType[] = {
FILTER_TYPE_AUDIO,
FILTER_TYPE_GFX,
FILTER_TYPE_TOPOLOGY,
FILTER_TYPE_BRIDGE,
FILTER_TYPE_RENDERER,
FILTER_TYPE_CAPTURER,
FILTER_TYPE_MIXER,
FILTER_TYPE_DATA_TRANSFORM,
FILTER_TYPE_AEC,
FILTER_TYPE_INTERFACE_TRANSFORM,
FILTER_TYPE_MEDIUM_TRANSFORM,
FILTER_TYPE_DATA_TRANSFORM,
FILTER_TYPE_DATA_TRANSFORM,
FILTER_TYPE_COMMUNICATION_TRANSFORM,
FILTER_TYPE_SPLITTER,
FILTER_TYPE_SPLITTER,
FILTER_TYPE_SYNTHESIZER,
FILTER_TYPE_DRM_DESCRAMBLE,
FILTER_TYPE_MIC_ARRAY_PROCESSOR,
};
PVOID pNotificationHandle = NULL;
//---------------------------------------------------------------------------
//---------------------------------------------------------------------------
NTSTATUS
RegisterForPlugPlayNotifications(
)
{
NTSTATUS Status;
DPF(50, "RegisterForPlugPlayNotifications");
ASSERT(gpDeviceInstance != NULL);
ASSERT(gpDeviceInstance->pPhysicalDeviceObject != NULL);
Status = IoRegisterPlugPlayNotification(
EventCategoryDeviceInterfaceChange,
PNPNOTIFY_DEVICE_INTERFACE_INCLUDE_EXISTING_INTERFACES,
(LPGUID)&KSCATEGORY_AUDIO,
gpDeviceInstance->pPhysicalDeviceObject->DriverObject,
(NTSTATUS (*)(PVOID, PVOID)) AudioDeviceInterfaceNotification,
NULL,
&pNotificationHandle);
if(!NT_SUCCESS(Status)) {
Trap();
goto exit;
}
exit:
return(Status);
}
VOID
UnregisterForPlugPlayNotifications(
)
{
if(pNotificationHandle != NULL) {
IoUnregisterPlugPlayNotification(pNotificationHandle);
}
}
VOID
DecrementAddRemoveCount(
)
{
if(InterlockedDecrement(&glPendingAddDelete) == 0) {
DPF(50, "DecrementAddRemoveCount: sending event");
KsGenerateEventList(
NULL,
KSEVENT_SYSAUDIO_ADDREMOVE_DEVICE,
&gEventQueue,
KSEVENTS_SPINLOCK,
&gEventLock);
}
}
NTSTATUS
AddFilterWorker(
PWSTR pwstrDeviceInterface,
PVOID pReference
)
{
AddFilter(pwstrDeviceInterface, NULL);
ExFreePool(pwstrDeviceInterface);
DecrementAddRemoveCount();
// Dereference sysaudio PDO.
KsDereferenceSoftwareBusObject(gpDeviceInstance->pDeviceHeader);
return(STATUS_SUCCESS);
}
NTSTATUS
DeleteFilterWorker(
PWSTR pwstrDeviceInterface,
PVOID pReference
)
{
DeleteFilter(pwstrDeviceInterface);
ExFreePool(pwstrDeviceInterface);
DecrementAddRemoveCount();
// Dereference sysaudio PDO.
KsDereferenceSoftwareBusObject(gpDeviceInstance->pDeviceHeader);
return(STATUS_SUCCESS);
}
NTSTATUS
AudioDeviceInterfaceNotification(
IN PDEVICE_INTERFACE_CHANGE_NOTIFICATION pNotification,
IN PVOID Context
)
{
NTSTATUS Status = STATUS_SUCCESS;
PWSTR pwstrDeviceInterface;
DPF1(50, "AudioDeviceInterfaceNotification: (%s)",
DbgUnicode2Sz(pNotification->SymbolicLinkName->Buffer));
//
// SECURITY NOTE:
// We trust the Buffer, because it is passed to us as part of notification
// from PnP subsystem.
//
pwstrDeviceInterface = (PWSTR)
ExAllocatePoolWithTag(
PagedPool,
(wcslen(pNotification->SymbolicLinkName->Buffer) + 1) * sizeof(WCHAR),
POOLTAG_SYSA);
if(pwstrDeviceInterface == NULL) {
Status = STATUS_INSUFFICIENT_RESOURCES;
goto exit;
}
// The notification sends null terminated unicode strings
wcscpy(pwstrDeviceInterface, pNotification->SymbolicLinkName->Buffer);
if(IsEqualGUID(&pNotification->Event, &GUID_DEVICE_INTERFACE_ARRIVAL)) {
//
// Keep a reference so that SWENUM does not REMOVE the device
// when the Worker thread is running.
// If the thread is scheduled successfully, it will remove the reference
// when exiting.
//
Status = KsReferenceSoftwareBusObject(gpDeviceInstance->pDeviceHeader);
if(!NT_SUCCESS(Status)) {
goto exit;
}
InterlockedIncrement(&glPendingAddDelete);
Status = QueueWorkList(
(UTIL_PFN)AddFilterWorker,
pwstrDeviceInterface,
NULL);
if (!NT_SUCCESS(Status)) {
KsDereferenceSoftwareBusObject(gpDeviceInstance->pDeviceHeader);
}
}
else if(IsEqualGUID(&pNotification->Event, &GUID_DEVICE_INTERFACE_REMOVAL)) {
//
// Keep a reference so that SWENUM does not REMOVE the device
// when the Worker thread is running.
// If the thread is scheduled successfully, it will remove the reference
// when exiting.
//
Status = KsReferenceSoftwareBusObject(gpDeviceInstance->pDeviceHeader);
if(!NT_SUCCESS(Status)) {
goto exit;
}
InterlockedIncrement(&glPendingAddDelete);
Status = QueueWorkList(
(UTIL_PFN)DeleteFilterWorker,
pwstrDeviceInterface,
NULL);
if (!NT_SUCCESS(Status)) {
KsDereferenceSoftwareBusObject(gpDeviceInstance->pDeviceHeader);
}
}
else {
//
// SECURITY NOTE:
// Sysaudio is registering only for EventCategoryDeviceInterfaceChange.
// This should send ARRIVAL and REMOVAL.
// If anything else comes up, we will return SUCCESS.
// However we are making sure that pwstrDeviceInterface is not leaked.
//
if (pwstrDeviceInterface) {
ExFreePool(pwstrDeviceInterface);
pwstrDeviceInterface = NULL;
}
}
exit:
if (!NT_SUCCESS(Status))
{
if (pwstrDeviceInterface) {
ExFreePool(pwstrDeviceInterface);
pwstrDeviceInterface = NULL;
}
}
return(Status);
}
NTSTATUS
AddFilter(
PWSTR pwstrDeviceInterface,
PFILTER_NODE *ppFilterNode // if !NULL, physical connection addfilter
)
{
PFILTER_NODE pFilterNodeDuplicate = NULL;
PFILTER_NODE pFilterNode = NULL;
UNICODE_STRING ustrFilterName;
UNICODE_STRING ustrAliasName;
UNICODE_STRING ustrName;
NTSTATUS Status;
ULONG fulType;
int i;
DPF1(50, "AddFilter: (%s)", DbgUnicode2Sz(pwstrDeviceInterface));
fulType = 0;
RtlInitUnicodeString(&ustrFilterName, pwstrDeviceInterface);
//
// For each Interface in apguidCategories, get interface alias of
// the new device. Check for duplicate interfaces.
//
for(i = 0; i < SIZEOF_ARRAY(apguidCategories); i++) {
Status = IoGetDeviceInterfaceAlias(
&ustrFilterName,
apguidCategories[i],
&ustrAliasName);
if(NT_SUCCESS(Status)) {
HANDLE hAlias;
Status = OpenDevice(ustrAliasName.Buffer, &hAlias);
if(NT_SUCCESS(Status)) {
DPF2(100, "AddFilter: alias (%s) aulFilterType %08x",
DbgUnicode2Sz(ustrAliasName.Buffer),
aulFilterType[i]);
fulType |= aulFilterType[i];
ZwClose(hAlias);
if(pFilterNodeDuplicate == NULL) {
FOR_EACH_LIST_ITEM(gplstFilterNode, pFilterNode) {
if(pFilterNode->GetDeviceInterface() == NULL) {
continue;
}
RtlInitUnicodeString(
&ustrName,
pFilterNode->GetDeviceInterface());
if(RtlEqualUnicodeString(
&ustrAliasName,
&ustrName,
TRUE)) {
DPF(50, "AddFilter: dup");
pFilterNodeDuplicate = pFilterNode;
break;
}
} END_EACH_LIST_ITEM
}
}
else {
DPF1(10, "AddFilter: OpenDevice FAILED on alias (%s)",
DbgUnicode2Sz(ustrAliasName.Buffer));
}
RtlFreeUnicodeString(&ustrAliasName);
}
}
pFilterNode = pFilterNodeDuplicate;
Status = STATUS_SUCCESS;
//
// Create a new Filter_Node if this is not a duplicate.
//
if(pFilterNodeDuplicate == NULL) {
pFilterNode = new FILTER_NODE(fulType);
if(pFilterNode == NULL) {
Status = STATUS_INSUFFICIENT_RESOURCES;
Trap();
goto exit;
}
Status = pFilterNode->Create(pwstrDeviceInterface);
if(!NT_SUCCESS(Status)) {
goto exit;
}
Status = pFilterNode->DuplicateForCapture();
if(!NT_SUCCESS(Status)) {
goto exit;
}
DPF1(50, "AddFilter: new CFilterNode fulType %08x", fulType);
}
//
// If this is called from Interface Notification Callback,
// create a new DeviceNode for the new FilterNode.
//
if(ppFilterNode == NULL) {
if(pFilterNode->GetType() & FILTER_TYPE_ENDPOINT) {
//
// Check if a DeviceNode has already been created for
// this FilterNode.
//
if (NULL != pFilterNodeDuplicate &&
NULL != pFilterNodeDuplicate->pDeviceNode) {
DPF1(5, "Duplicate FilterNode %X. Skip DeviceNode Create",
pFilterNode);
}
else {
pFilterNode->pDeviceNode = new DEVICE_NODE;
if(pFilterNode->pDeviceNode == NULL) {
Status = STATUS_INSUFFICIENT_RESOURCES;
Trap();
goto exit;
}
Status = pFilterNode->pDeviceNode->Create(pFilterNode);
if(!NT_SUCCESS(Status)) {
goto exit;
}
}
}
else {
DPF(50, "AddFilter: DestroyAllGraphs");
DestroyAllGraphs();
}
}
exit:
if(!NT_SUCCESS(Status)) {
DPF2(5, "AddFilter: FAILED (%s) %08x",
DbgUnicode2Sz(pwstrDeviceInterface),
Status);
if(pFilterNode != NULL && pFilterNodeDuplicate == NULL) {
delete pFilterNode;
pFilterNode = NULL;
}
}
if(ppFilterNode != NULL) {
*ppFilterNode = pFilterNode;
}
return(Status);
}
NTSTATUS
DeleteFilter(
PWSTR pwstrDeviceInterface
)
{
UNICODE_STRING ustrFilterName;
UNICODE_STRING ustrAliasName;
UNICODE_STRING ustrName;
PFILTER_NODE pFilterNode;
NTSTATUS Status;
int i;
DPF1(50, "DeleteFilter: (%s)", DbgUnicode2Sz(pwstrDeviceInterface));
RtlInitUnicodeString(&ustrFilterName, pwstrDeviceInterface);
//
// First delete all filter nodes which have the device interface which is
// going away
//
FOR_EACH_LIST_ITEM_DELETE(gplstFilterNode, pFilterNode) {
if(pFilterNode->GetDeviceInterface() == NULL) {
continue;
}
RtlInitUnicodeString(
&ustrName,
pFilterNode->GetDeviceInterface());
if(RtlEqualUnicodeString(
&ustrFilterName,
&ustrName,
TRUE)) {
delete pFilterNode;
DELETE_LIST_ITEM(gplstFilterNode);
}
} END_EACH_LIST_ITEM
for(i = 0; i < SIZEOF_ARRAY(apguidCategories); i++) {
//
// According to PnP group, it is perfectly safe to ask for aliases
// during removal. The interface itself will be enabled or disabled. But
// we will still get the correct aliases.
//
Status = IoGetDeviceInterfaceAlias(
&ustrFilterName,
apguidCategories[i],
&ustrAliasName);
if(NT_SUCCESS(Status)) {
FOR_EACH_LIST_ITEM_DELETE(gplstFilterNode, pFilterNode) {
if(pFilterNode->GetDeviceInterface() == NULL) {
continue;
}
RtlInitUnicodeString(
&ustrName,
pFilterNode->GetDeviceInterface());
if(RtlEqualUnicodeString(
&ustrAliasName,
&ustrName,
TRUE)) {
delete pFilterNode;
DELETE_LIST_ITEM(gplstFilterNode);
}
} END_EACH_LIST_ITEM
RtlFreeUnicodeString(&ustrAliasName);
}
}
return(STATUS_SUCCESS);
}
#define GFX_VERBOSE_LEVEL 50
//=============================================================================
// Assumptions:
// - SysaudioGfx.ulType has been already validated.
//
NTSTATUS AddGfx(
PSYSAUDIO_GFX pSysaudioGfx,
ULONG cbMaxLength
)
{
NTSTATUS Status;
PFILE_OBJECT pFileObject;
PFILTER_NODE pFilterNode;
ULONG Flags;
PWSTR pwstrDeviceName;
ULONG GfxOrderBase, GfxOrderCeiling;
ASSERT(pSysaudioGfx);
pFileObject = NULL;
pwstrDeviceName = NULL;
pFilterNode = NULL;
GfxOrderBase = GfxOrderCeiling = 0;
DPF1(GFX_VERBOSE_LEVEL, "AddGfx :: Request to add Gfx %x", pSysaudioGfx);
DPF1(GFX_VERBOSE_LEVEL, " hGfx = %x", pSysaudioGfx->hGfx);
DPF1(GFX_VERBOSE_LEVEL, " ulOrder = %x", pSysaudioGfx->ulOrder);
DPF1(GFX_VERBOSE_LEVEL, " ulType = %x", pSysaudioGfx->ulType);
DPF1(GFX_VERBOSE_LEVEL, " Flags = %x", pSysaudioGfx->ulFlags);
//
// Setup GFX Order's base & ceiling for future usage
//
if (pSysaudioGfx->ulType == GFX_DEVICETYPE_RENDER) {
GfxOrderBase = ORDER_RENDER_GFX_FIRST;
GfxOrderCeiling = ORDER_RENDER_GFX_LAST;
}
if (pSysaudioGfx->ulType == GFX_DEVICETYPE_CAPTURE) {
GfxOrderBase = ORDER_CAPTURE_GFX_FIRST;
GfxOrderCeiling = ORDER_CAPTURE_GFX_LAST;
}
ASSERT(GfxOrderBase);
ASSERT(GfxOrderCeiling);
//
// validate that order is within range
//
if (pSysaudioGfx->ulOrder >= (GfxOrderCeiling - GfxOrderBase)) {
Status = STATUS_INVALID_PARAMETER;
Trap();
goto exit;
}
//
// Allocate a Filter Node for the new GFX
//
pFilterNode = new FILTER_NODE(FILTER_TYPE_GFX);
if(pFilterNode == NULL) {
Trap();
Status = STATUS_INSUFFICIENT_RESOURCES;
goto exit;
}
pFilterNode->SetRenderCaptureFlags(pSysaudioGfx->ulType);
//
// Copy the Device Name (on which the gfx needs to be attached) into a local
// copy for our own use
//
Status = SafeCopyDeviceName(
(PWSTR) ((CHAR *) pSysaudioGfx + pSysaudioGfx->ulDeviceNameOffset),
cbMaxLength,
&pwstrDeviceName);
if (!NT_SUCCESS(Status)) {
goto exit;
}
DPF1(GFX_VERBOSE_LEVEL, " On DI = %s", DbgUnicode2Sz(pwstrDeviceName));
//
// Make sure that there are no other GFXes with the same order on this device
//
if ((FindGfx(pFilterNode,
0, // wild card for handle
pwstrDeviceName,
pSysaudioGfx->ulOrder+GfxOrderBase))) {
delete [] pwstrDeviceName;
Status = STATUS_INVALID_PARAMETER;
goto exit;
}
//
// Get the FileObject of the GFX for future use
// SECURITY NOTE:
// The handle is coming from UserMode. So we have to specify UserMode.
// Also we are explicitly interested in FileObjects. The rest should be
// rejected.
//
Status = ObReferenceObjectByHandle(
pSysaudioGfx->hGfx,
FILE_GENERIC_READ | FILE_GENERIC_WRITE,
*IoFileObjectType,
UserMode,
(PVOID*)&pFileObject,
NULL);
if (!NT_SUCCESS(Status) || NULL == pFileObject) {
DPF1(GFX_VERBOSE_LEVEL, "AddGfx :: ObReference failed %x", Status);
delete [] pwstrDeviceName;
goto exit;
}
//
// Add the device name string to global memory to be freed
//
Status = pFilterNode->lstFreeMem.AddList(pwstrDeviceName);
if(!NT_SUCCESS(Status)) {
Trap();
delete [] pwstrDeviceName;
goto exit;
}
//
// Indicate that this Gfx needs be loaded only on the device pointed to be
// pwstrDeviceName
//
Status = pFilterNode->AddDeviceInterfaceMatch(pwstrDeviceName);
if(!NT_SUCCESS(Status)) {
Trap();
delete [] pwstrDeviceName;
goto exit;
}
//
// Set the Gfx order in the filter node
//
pFilterNode->SetOrder(pSysaudioGfx->ulOrder+GfxOrderBase);
//
// Profile the GFX and create pin infos, logical filter nodes etc
//
Status = pFilterNode->ProfileFilter(pFileObject);
if(!NT_SUCCESS(Status)) {
Trap();
goto exit;
}
//
// Fix the GFX glitching problem. Send the property blindly to GFX
// filter. KS will handle the property.
// Failures are not important, ignore them.
//
SetKsFrameHolding(pFileObject);
exit:
if(!NT_SUCCESS(Status)) {
DPF1(GFX_VERBOSE_LEVEL, "AddGfx :: Failed, Status = %x", Status);
if(pFilterNode != NULL) {
delete pFilterNode;
pFilterNode = NULL;
}
if(pFileObject != NULL) {
ObDereferenceObject(pFileObject);
}
}
else {
DPF1(GFX_VERBOSE_LEVEL, "AddGfx :: Added GFX FilterNode %x", pFilterNode);
DPF1(GFX_VERBOSE_LEVEL, " order = %x", pFilterNode->GetOrder());
DPF1(GFX_VERBOSE_LEVEL, " type = %x", pFilterNode->GetType());
DPF1(GFX_VERBOSE_LEVEL, " flags = %x", pFilterNode->GetFlags());
//
// Setup file handle details for later use of
// the user mode handle passed in
//
pFilterNode->SetFileDetails(pSysaudioGfx->hGfx,
pFileObject,
PsGetCurrentProcess());
//
// Force a rebuild of graph nodes
//
DestroyAllGraphs();
}
return(Status);
}
//=============================================================================
// Assumptions:
// - SysaudioGfx.ulType has been already validated.
//
NTSTATUS RemoveGfx(
PSYSAUDIO_GFX pSysaudioGfx,
ULONG cbMaxLength
)
{
NTSTATUS Status;
PFILTER_NODE pFilterNode;
PWSTR pwstrDeviceName;
ULONG GfxOrderBase, GfxOrderCeiling;
pFilterNode = NULL;
GfxOrderBase = GfxOrderCeiling = 0;
pwstrDeviceName = NULL;
DPF1(GFX_VERBOSE_LEVEL, "RemoveGfx :: Request to remove Gfx %x", pSysaudioGfx);
DPF1(GFX_VERBOSE_LEVEL, " hGfx = %x", pSysaudioGfx->hGfx);
DPF1(GFX_VERBOSE_LEVEL, " ulOrder = %x", pSysaudioGfx->ulOrder);
DPF1(GFX_VERBOSE_LEVEL, " ulType = %x", pSysaudioGfx->ulType);
DPF1(GFX_VERBOSE_LEVEL, " Flags = %x", pSysaudioGfx->ulFlags);
//
// Setup GFX Order's base & ceiling for future usage
//
if (pSysaudioGfx->ulType == GFX_DEVICETYPE_RENDER) {
GfxOrderBase = ORDER_RENDER_GFX_FIRST;
GfxOrderCeiling = ORDER_RENDER_GFX_LAST;
}
if (pSysaudioGfx->ulType == GFX_DEVICETYPE_CAPTURE ) {
GfxOrderBase = ORDER_CAPTURE_GFX_FIRST;
GfxOrderCeiling = ORDER_CAPTURE_GFX_LAST;
}
ASSERT(GfxOrderBase);
ASSERT(GfxOrderCeiling);
//
// Copy the Device Name (on which the gfx needs to be attached) into a local copy for our own use
//
Status = SafeCopyDeviceName(
(PWSTR) ((CHAR *) pSysaudioGfx + pSysaudioGfx->ulDeviceNameOffset),
cbMaxLength,
&pwstrDeviceName);
if (!NT_SUCCESS(Status)) {
goto exit;
}
DPF1(GFX_VERBOSE_LEVEL, " On DI = %s", DbgUnicode2Sz(pwstrDeviceName));
//
// Find the FilterNode for the Gfx
//
if ((pFilterNode = FindGfx(NULL,
pSysaudioGfx->hGfx,
pwstrDeviceName,
pSysaudioGfx->ulOrder+GfxOrderBase)) == NULL) {
Status = STATUS_INVALID_PARAMETER;
goto exit;
}
//
// Should we validate the FileHandle Value?
//
//
// Dereference the file object
//
pFilterNode->ClearFileDetails();
exit:
if(!NT_SUCCESS(Status)) {
DPF1(GFX_VERBOSE_LEVEL, "RemoveGfx :: Failed, Status = %x", Status);
Trap();
}
else {
delete pFilterNode;
}
delete pwstrDeviceName;
return(Status);
}
PFILTER_NODE
FindGfx(
PFILTER_NODE pnewFilterNode,
HANDLE hGfx,
PWSTR pwstrDeviceName,
ULONG GfxOrder
)
{
PFILTER_NODE pFilterNode;
ULONG DeviceCount;
UNICODE_STRING usInDevice, usfnDevice;
PWSTR pwstr;
DPF2(90, "FindGfx:: Looking for GFX with order = %x attached to %s)", GfxOrder, DbgUnicode2Sz(pwstrDeviceName));
FOR_EACH_LIST_ITEM(gplstFilterNode, pFilterNode) {
//
// Skip the one we just added
//
if (pFilterNode == pnewFilterNode) {
continue;
}
//
// Check whether this pFilterNode matches the Gfx we are looking for
//
if (pFilterNode->DoesGfxMatch(hGfx, pwstrDeviceName, GfxOrder)) {
return (pFilterNode);
}
} END_EACH_LIST_ITEM
return(NULL);
}
//=============================================================================
//
// Copies a UNICODE DeviceName to a new location. The source string is coming
// from user mode.
// There are assumptions in the code that the size should be greater than 4
// characters. (see DEVICE_NAME_TAG)
// Caller must make sure that this is BUFFERRED IO.
//
NTSTATUS
SafeCopyDeviceName(
PWSTR pwstrDeviceName,
ULONG cbMaxLength,
PWSTR *String
)
{
NTSTATUS ntStatus;
ULONG cchLength;
PWSTR pwstrString = NULL;
*String = NULL;
//
// SECURITY_NOTE:
// pwstrDeviceName points to a NULL-terminated UNICODE string.
// The string is coming from user mode, through BUFFERRED IO. So try/
// except is not necessary. Also Probe would not catch any errors.
// The IRP OutputBufferLength limits, the size of the string.
//
if (S_OK !=
StringCchLength(pwstrDeviceName, (size_t) cbMaxLength / sizeof(WCHAR), (size_t *) &cchLength))
{
DPF(5, "SafeCopyDeviceName: DeviceName is not zero-terminated.");
ntStatus = STATUS_INVALID_PARAMETER;
goto exit;
}
//
// SECURITY NOTE:
// There are assumptions further in the code about DeviceName string.
// Make sure those assumptions hold.
// One assumption is that DeviceName should be greater than 4
// characters.
//
if (cchLength <= wcslen(DEVICE_NAME_TAG)) {
DPF(5, "SafeCopyDeviceName: DeviceName is not well-formed");
ntStatus = STATUS_INVALID_PARAMETER;
goto exit;
}
pwstrString = new(WCHAR[cchLength + 1]) ;
if(pwstrString == NULL) {
ntStatus = STATUS_INSUFFICIENT_RESOURCES;
goto exit;
}
//
// SECURITY NOTE:
// Note that Length does not include the terminating NULL.
// Use n version of string copy in case the buffer changes.
// Also make sure that the string is NULL terminated.
//
wcsncpy(pwstrString, pwstrDeviceName, cchLength);
pwstrString[cchLength] = UNICODE_NULL;
ntStatus = STATUS_SUCCESS;
exit:
*String = pwstrString;
return ntStatus;
}
NTSTATUS
GetFilterTypeFromGuid(
IN LPGUID pguid,
OUT PULONG pfulType
)
{
int i;
for(i = 0; i < SIZEOF_ARRAY(apguidCategories); i++) {
if (memcmp (apguidCategories[i], pguid, sizeof(GUID)) == 0) {
*pfulType |= aulFilterType[i];
return(STATUS_SUCCESS);
}
}
return(STATUS_INVALID_DEVICE_REQUEST);
}
//---------------------------------------------------------------------------
// End of File: notify.cpp
//---------------------------------------------------------------------------
|
Bouke/djangobench
|
djangobench/benchmarks/query_delete_related/settings.py
|
from djangobench.base_settings import *
INSTALLED_APPS = ['query_delete_related']
|
bowring/ET_Redux
|
src/main/java/org/earthtime/Tripoli/massSpecSetups/singleCollector/ThermoFinnigan/LaserchronElementIISetupUPb_C.java
|
<reponame>bowring/ET_Redux
/*
* LaserchronElementIISetupUPb_C
*
* Copyright 2006-2018 <NAME>, CIRDLES.org, and Earth-Time.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.earthtime.Tripoli.massSpecSetups.singleCollector.ThermoFinnigan;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
import org.earthtime.Tripoli.dataModels.DataModelInterface;
import org.earthtime.Tripoli.dataModels.RawIntensityDataModel;
import org.earthtime.Tripoli.dataModels.RawRatioDataModel;
import org.earthtime.Tripoli.dataModels.VirtualCollectorModel;
import org.earthtime.Tripoli.dataModels.aquisitionTypeDataModels.IsotopeMappingModel;
import org.earthtime.Tripoli.dataModels.collectorModels.AbstractCollectorModel;
import org.earthtime.Tripoli.dataModels.collectorModels.IonCounterCollectorModel;
import org.earthtime.Tripoli.fractions.TripoliFraction;
import org.earthtime.Tripoli.massSpecSetups.AbstractMassSpecSetup;
import org.earthtime.UPb_Redux.ReduxConstants;
import org.earthtime.UPb_Redux.valueModels.ValueModel;
import org.earthtime.dataDictionaries.IsotopeNames;
import org.earthtime.dataDictionaries.MassSpecTypeEnum;
import org.earthtime.dataDictionaries.RawRatioNames;
import org.earthtime.isotopes.IsotopesEnum;
/**
*
* @author <NAME>
*/
public final class LaserchronElementIISetupUPb_C extends AbstractMassSpecSetup {
private static LaserchronElementIISetupUPb_C instance = new LaserchronElementIISetupUPb_C();
private LaserchronElementIISetupUPb_C() {
super();
NAME = "Laserchron Element II Setup";
massSpecType = MassSpecTypeEnum.SINGLE;
VIRTUAL_COLLECTOR_COUNT = 18;
COLLECTOR_DATA_FREQUENCY_MILLISECS = 325;
countOfAcquisitions = 0;
isotopeMappingModel = new IsotopeMappingModel();
collectorNameToModelMap = new TreeMap<>();
useConstantBackgroundFitFunction = false;
this.commonLeadCorrectionHighestLevel = "B2";
AbstractCollectorModel singleCollector
= new IonCounterCollectorModel(//
"Single", //
new ValueModel("DeadTime", //
new BigDecimal(22.0e-9, //
ReduxConstants.mathContext10), //
"ABS", //
new BigDecimal(0.0e-9, ReduxConstants.mathContext10), //
BigDecimal.ZERO), //
IonCounterCollectorModel.CollectedDataStyle.SEM);
// fro GG Feb 2016
// Dead Time should be 22 ns
//
// Not sure how you are counting the integration time. Note that values are measured four times on each peak. Values below are for each one of the four measurements:
// 176 = 0.0050
// 202 = 0.0052
// 204 = 0.0078
// 206 = 0.0202
// 207 = 0.0284
// 208 = 0.0026
// 232 = 0.0026
// 235 = 0.0154
// 238 = 0.0104
// Noah says times 4 each
isotopeMappingModel.getIsotopeToCollectorMap().put(//
IsotopesEnum.U238, singleCollector);
isotopeMappingModel.getIsotopeToIntegrationTimeMap().put( //
IsotopesEnum.U238, 0.0416);
isotopeMappingModel.getIsotopeToCollectorMap().put(//
IsotopesEnum.U235, singleCollector);
isotopeMappingModel.getIsotopeToIntegrationTimeMap().put( //
IsotopesEnum.U235, 0.0616);
isotopeMappingModel.getIsotopeToCollectorMap().put(//
IsotopesEnum.Th232, singleCollector);
isotopeMappingModel.getIsotopeToIntegrationTimeMap().put( //
IsotopesEnum.Th232, 0.0104);
isotopeMappingModel.getIsotopeToCollectorMap().put(//
IsotopesEnum.Pb208, singleCollector);
isotopeMappingModel.getIsotopeToIntegrationTimeMap().put( //
IsotopesEnum.Pb208, 0.0104);
isotopeMappingModel.getIsotopeToCollectorMap().put(//
IsotopesEnum.Pb207, singleCollector);
isotopeMappingModel.getIsotopeToIntegrationTimeMap().put( //
IsotopesEnum.Pb207, 0.1136);
isotopeMappingModel.getIsotopeToCollectorMap().put(//
IsotopesEnum.Pb206, singleCollector);
isotopeMappingModel.getIsotopeToIntegrationTimeMap().put( //
IsotopesEnum.Pb206, 0.0808);
isotopeMappingModel.getIsotopeToCollectorMap().put(//
IsotopesEnum.Pb204, singleCollector);
isotopeMappingModel.getIsotopeToIntegrationTimeMap().put( //
IsotopesEnum.Pb204, 0.0312);
isotopeMappingModel.getIsotopeToCollectorMap().put(//
IsotopesEnum.Hg202, singleCollector);
isotopeMappingModel.getIsotopeToIntegrationTimeMap().put( //
IsotopesEnum.Hg202, 0.0208);
isotopeMappingModel.getIsotopeToCollectorMap().put(//
IsotopesEnum.Hf176, singleCollector);
isotopeMappingModel.getIsotopeToIntegrationTimeMap().put( //
IsotopesEnum.Hf176, 0.020);
collectorNameToModelMap.put("Single", singleCollector);
}
/**
*
* @return
*/
public static LaserchronElementIISetupUPb_C getInstance() {
return instance;
}
/**
*
*
* @param intensitiesScan
* @param fractionID the value of fractionID
* @param usingFullPropagation the value of usingFullPropagation
* @param tripoliFraction the value of tripoliFraction
* @return the
* java.util.SortedSet<org.earthtime.Tripoli.dataModels.DataModelInterface>
*/
@Override
public SortedSet<DataModelInterface> rawRatiosFactory(String[][] intensitiesScan, String fractionID, boolean usingFullPropagation, TripoliFraction tripoliFraction) {
countOfAcquisitions = intensitiesScan.length;
return rawRatiosFactoryRevised();
}
/**
* yRevised(); }
*
* /**
*
*
* @param intensitiesScan
* @param isStandard the value of isStandard
* @param fractionID the value of fractionID
* @param usingFullPropagation the value of usingFullPropagation
* @param tripoliFraction the value of tripoliFraction
* @return the
* java.util.SortedSet<org.earthtime.Tripoli.dataModels.DataModelInterface>
*/
@Override
public SortedSet<DataModelInterface> rawRatiosFactoryRevised() {
virtualCollectors = new ArrayList<>(VIRTUAL_COLLECTOR_COUNT);
for (int i = 0; i < VIRTUAL_COLLECTOR_COUNT; i++) {
virtualCollectors.add(new VirtualCollectorModel(i + 1));
}
// background
virtualCollectors.get(9 - 1).updateCollector(true);
virtualCollectors.get(8 - 1).updateCollector(true);
virtualCollectors.get(7 - 1).updateCollector(true);
virtualCollectors.get(6 - 1).updateCollector(true);
virtualCollectors.get(5 - 1).updateCollector(true);
virtualCollectors.get(4 - 1).updateCollector(true);
virtualCollectors.get(3 - 1).updateCollector(true);
virtualCollectors.get(2 - 1).updateCollector(true);
virtualCollectors.get(1 - 1).updateCollector(true);
// on peak
virtualCollectors.get(18 - 1).updateCollector(false);
virtualCollectors.get(17 - 1).updateCollector(false);
virtualCollectors.get(16 - 1).updateCollector(false);
virtualCollectors.get(15 - 1).updateCollector(false);
virtualCollectors.get(14 - 1).updateCollector(false);
virtualCollectors.get(13 - 1).updateCollector(false);
virtualCollectors.get(12 - 1).updateCollector(false);
virtualCollectors.get(11 - 1).updateCollector(false);
virtualCollectors.get(10 - 1).updateCollector(false);
// isotope models
genericIsotopeModels = new TreeSet<>();
U238 = new RawIntensityDataModel( //
IsotopeNames.U238, virtualCollectors.get(9 - 1), virtualCollectors.get(18 - 1), COLLECTOR_DATA_FREQUENCY_MILLISECS,//
isotopeMappingModel.getIsotopeToCollectorMap().get(IsotopesEnum.U238));
genericIsotopeModels.add(U238);
isotopeToRawIntensitiesMap.put(IsotopesEnum.U238, U238);
U235 = new RawIntensityDataModel( //
IsotopeNames.U235, virtualCollectors.get(8 - 1), virtualCollectors.get(17 - 1), COLLECTOR_DATA_FREQUENCY_MILLISECS,//
isotopeMappingModel.getIsotopeToCollectorMap().get(IsotopesEnum.U235));
genericIsotopeModels.add(U235);
isotopeToRawIntensitiesMap.put(IsotopesEnum.U235, U235);
Th232 = new RawIntensityDataModel( //
IsotopeNames.Th232, virtualCollectors.get(7 - 1), virtualCollectors.get(16 - 1), COLLECTOR_DATA_FREQUENCY_MILLISECS,//
isotopeMappingModel.getIsotopeToCollectorMap().get(IsotopesEnum.Th232));
genericIsotopeModels.add(Th232);
isotopeToRawIntensitiesMap.put(IsotopesEnum.Th232, Th232);
Pb208 = new RawIntensityDataModel( //
IsotopeNames.Pb208, virtualCollectors.get(6 - 1), virtualCollectors.get(15 - 1), COLLECTOR_DATA_FREQUENCY_MILLISECS,//
isotopeMappingModel.getIsotopeToCollectorMap().get(IsotopesEnum.Pb208));
genericIsotopeModels.add(Pb208);
isotopeToRawIntensitiesMap.put(IsotopesEnum.Pb208, Pb208);
Pb207 = new RawIntensityDataModel( //
IsotopeNames.Pb207, virtualCollectors.get(5 - 1), virtualCollectors.get(14 - 1), COLLECTOR_DATA_FREQUENCY_MILLISECS,//
isotopeMappingModel.getIsotopeToCollectorMap().get(IsotopesEnum.Pb207));
genericIsotopeModels.add(Pb207);
isotopeToRawIntensitiesMap.put(IsotopesEnum.Pb207, Pb207);
Pb206 = new RawIntensityDataModel( //
IsotopeNames.Pb206, virtualCollectors.get(4 - 1), virtualCollectors.get(13 - 1), COLLECTOR_DATA_FREQUENCY_MILLISECS,//
isotopeMappingModel.getIsotopeToCollectorMap().get(IsotopesEnum.Pb206));
genericIsotopeModels.add(Pb206);
isotopeToRawIntensitiesMap.put(IsotopesEnum.Pb206, Pb206);
Pb204 = new RawIntensityDataModel( //
IsotopeNames.Pb204, virtualCollectors.get(3 - 1), virtualCollectors.get(12 - 1), COLLECTOR_DATA_FREQUENCY_MILLISECS,//
isotopeMappingModel.getIsotopeToCollectorMap().get(IsotopesEnum.Pb204));
genericIsotopeModels.add(Pb204);
isotopeToRawIntensitiesMap.put(IsotopesEnum.Pb204, Pb204);
Hg202 = new RawIntensityDataModel( //
IsotopeNames.Hg202, virtualCollectors.get(2 - 1), virtualCollectors.get(11 - 1), COLLECTOR_DATA_FREQUENCY_MILLISECS,//
isotopeMappingModel.getIsotopeToCollectorMap().get(IsotopesEnum.Hg202));
genericIsotopeModels.add(Hg202);
isotopeToRawIntensitiesMap.put(IsotopesEnum.Hg202, Hg202);
Hf176 = new RawIntensityDataModel( //
IsotopeNames.Hf176, virtualCollectors.get(1 - 1), virtualCollectors.get(10 - 1), COLLECTOR_DATA_FREQUENCY_MILLISECS,//
isotopeMappingModel.getIsotopeToCollectorMap().get(IsotopesEnum.Hf176));
genericIsotopeModels.add(Hf176);
isotopeToRawIntensitiesMap.put(IsotopesEnum.Hf176, Hf176);
isotopeMappingModel.setIsotopeToRawIntensitiesMap(isotopeToRawIntensitiesMap);
virtualCollectorModelMapToFieldIndexes = new HashMap<>();
getVirtualCollectorModelMapToFieldIndexes().put(Hf176, 0);
getVirtualCollectorModelMapToFieldIndexes().put(Hg202, 1);
getVirtualCollectorModelMapToFieldIndexes().put(Pb204, 2);
getVirtualCollectorModelMapToFieldIndexes().put(Pb206, 3);
getVirtualCollectorModelMapToFieldIndexes().put(Pb207, 4);
getVirtualCollectorModelMapToFieldIndexes().put(Pb208, 5);
getVirtualCollectorModelMapToFieldIndexes().put(Th232, 6);
getVirtualCollectorModelMapToFieldIndexes().put(U235, 7);
getVirtualCollectorModelMapToFieldIndexes().put(U238, 8);
// raw ratios
rawRatios = new TreeSet<>();
DataModelInterface r206_238w = new RawRatioDataModel(RawRatioNames.r206_238w, Pb206, U238, true, false, COLLECTOR_DATA_FREQUENCY_MILLISECS);
rawRatios.add(r206_238w);
DataModelInterface r206_207w = new RawRatioDataModel(RawRatioNames.r206_207w, Pb206, Pb207, true, false, COLLECTOR_DATA_FREQUENCY_MILLISECS);
rawRatios.add(r206_207w);
DataModelInterface r208_232w = new RawRatioDataModel(RawRatioNames.r208_232w, Pb208, Th232, true, false, COLLECTOR_DATA_FREQUENCY_MILLISECS);
rawRatios.add(r208_232w);
// special case to handle mercury isotope
rawRatios.add(new RawRatioDataModel(RawRatioNames.r202_202w, Hg202, Hg202, false, false, COLLECTOR_DATA_FREQUENCY_MILLISECS));
// oct 2014 to handle B schemas for common lead correction
DataModelInterface r206_204w = new RawRatioDataModel(RawRatioNames.r206_204w, Pb206, Pb204, false, true, COLLECTOR_DATA_FREQUENCY_MILLISECS);
rawRatios.add(r206_204w);
DataModelInterface r207_204w = new RawRatioDataModel(RawRatioNames.r207_204w, Pb207, Pb204, false, true, COLLECTOR_DATA_FREQUENCY_MILLISECS);
rawRatios.add(r207_204w);
DataModelInterface r208_204w = new RawRatioDataModel(RawRatioNames.r208_204w, Pb208, Pb204, false, true, COLLECTOR_DATA_FREQUENCY_MILLISECS);
rawRatios.add(r208_204w);
// special case to handle 176 Halfnium isotope
rawRatios.add(new RawRatioDataModel(RawRatioNames.r176_176w, Hf176, Hf176, false, false, COLLECTOR_DATA_FREQUENCY_MILLISECS));
// special case to handle 235 Uranium isotope
rawRatios.add(new RawRatioDataModel(RawRatioNames.r235_235w, U235, U235, false, false, COLLECTOR_DATA_FREQUENCY_MILLISECS));
return rawRatios;
}
/**
*
* @param integrationTime
*/
@Override
public void assignIntegrationTime(double integrationTime) {
throw new UnsupportedOperationException("Not legal.");
}
}
|
gustavopinto/entente
|
seeds/WebKit/es6/destructuring_chained_iterable_destructuring.js
|
function test() {
var a,b,c,d;
[a,b] = [c,d] = [1,2];
return a === 1 && b === 2 && c === 1 && d === 2;
}
if (!test())
throw new Error("Test failed");
|
deadok22/intellij-erlang
|
gen/org/intellij/erlang/parser/_ErlangLexer.java
|
<filename>gen/org/intellij/erlang/parser/_ErlangLexer.java
/* The following code was generated by JFlex 1.4.3 on 11/26/14 3:42 PM */
package org.intellij.erlang.parser;
import com.intellij.lexer.FlexLexer;
import com.intellij.psi.tree.IElementType;
import static org.intellij.erlang.ErlangTypes.*;
import static org.intellij.erlang.ErlangParserDefinition.*;
/**
* This class is a scanner generated by
* <a href="http://www.jflex.de/">JFlex</a> 1.4.3
* on 11/26/14 3:42 PM from the specification file
* <tt>/home/user/projects/intellij-erlang/src/org/intellij/erlang/parser/Erlang.flex</tt>
*/
public class _ErlangLexer implements FlexLexer {
/** initial size of the lookahead buffer */
private static final int ZZ_BUFFERSIZE = 16384;
/** lexical states */
public static final int IN_QUOTES = 2;
public static final int YYINITIAL = 0;
/**
* ZZ_LEXSTATE[l] is the state in the DFA for the lexical state l
* ZZ_LEXSTATE[l+1] is the state in the DFA for the lexical state l
* at the beginning of a line
* l is of the form l = 2*k, k a non negative integer
*/
private static final int ZZ_LEXSTATE[] = {
0, 0, 1, 1
};
/**
* Translates characters to character classes
*/
private static final String ZZ_CMAP_PACKED =
"\1\1\10\0\1\1\1\6\2\0\1\4\21\0\1\1\1\1\1\3"+
"\1\24\1\2\1\43\1\5\1\0\1\35\1\70\1\71\1\63\1\62"+
"\1\45\1\16\1\17\1\64\10\21\2\11\1\57\1\73\1\61\1\44"+
"\1\60\1\74\1\22\4\12\1\14\1\12\24\7\1\36\1\20\1\40"+
"\1\23\1\46\1\42\1\47\1\25\1\13\1\26\1\15\1\27\1\52"+
"\1\51\1\53\2\10\1\67\1\65\1\30\1\54\2\10\1\31\1\32"+
"\1\33\1\55\1\34\1\50\1\66\1\56\1\10\1\37\1\72\1\41"+
"\uff82\0";
/**
* Translates characters to character classes
*/
private static final char [] ZZ_CMAP = zzUnpackCMap(ZZ_CMAP_PACKED);
/**
* Translates DFA states to action switch labels.
*/
private static final int [] ZZ_ACTION = zzUnpackAction();
private static final String ZZ_ACTION_PACKED_0 =
"\2\0\1\1\1\2\1\3\1\4\1\5\1\6\1\7"+
"\1\10\2\7\1\11\1\12\1\1\6\7\1\13\1\14"+
"\1\15\1\16\1\17\1\20\1\21\1\22\4\7\1\23"+
"\1\24\1\25\1\26\1\27\1\30\1\7\1\31\1\32"+
"\1\33\1\34\1\35\1\7\1\0\1\36\1\37\1\5"+
"\1\40\1\0\1\10\1\0\2\7\1\41\1\42\1\43"+
"\2\0\1\44\14\7\2\20\1\45\1\0\1\46\1\47"+
"\1\0\3\7\1\50\1\51\1\52\1\53\1\54\1\55"+
"\1\56\1\57\1\60\1\61\1\62\1\63\1\7\1\64"+
"\1\0\1\40\1\65\2\10\1\66\2\7\1\67\1\70"+
"\2\7\1\71\1\72\1\7\1\73\1\7\1\74\1\75"+
"\1\76\1\7\1\77\1\100\1\20\1\0\1\101\1\102"+
"\1\7\1\103\2\7\1\104\1\0\1\105\2\7\1\106"+
"\1\107\1\110\1\7\1\20\2\7\1\111\1\7\1\66"+
"\1\0\1\112\1\113\1\7\1\114\4\7\1\115\1\116"+
"\1\117";
private static int [] zzUnpackAction() {
int [] result = new int[156];
int offset = 0;
offset = zzUnpackAction(ZZ_ACTION_PACKED_0, offset, result);
return result;
}
private static int zzUnpackAction(String packed, int offset, int [] result) {
int i = 0; /* index in packed string */
int j = offset; /* index in unpacked array */
int l = packed.length();
while (i < l) {
int count = packed.charAt(i++);
int value = packed.charAt(i++);
do result[j++] = value; while (--count > 0);
}
return j;
}
/**
* Translates a state to a row index in the transition table
*/
private static final int [] ZZ_ROWMAP = zzUnpackRowMap();
private static final String ZZ_ROWMAP_PACKED_0 =
"\0\0\0\75\0\172\0\267\0\364\0\172\0\u0131\0\u016e"+
"\0\u01ab\0\u01e8\0\u0225\0\u0262\0\u029f\0\u02dc\0\u0319\0\u0356"+
"\0\u0393\0\u03d0\0\u040d\0\u044a\0\u0487\0\u04c4\0\172\0\172"+
"\0\172\0\172\0\u0501\0\u053e\0\172\0\u057b\0\u05b8\0\u05f5"+
"\0\u0632\0\u066f\0\u06ac\0\u06e9\0\u0726\0\172\0\u0763\0\u07a0"+
"\0\172\0\172\0\u07dd\0\172\0\172\0\u081a\0\u0857\0\u04c4"+
"\0\u0894\0\u08d1\0\u090e\0\u094b\0\u0988\0\u09c5\0\u0a02\0\u0a3f"+
"\0\172\0\172\0\u0a7c\0\u0319\0\u0ab9\0\172\0\u0af6\0\u0b33"+
"\0\u0b70\0\u0bad\0\u0bea\0\u0c27\0\u0c64\0\u0ca1\0\u0cde\0\u0d1b"+
"\0\u0d58\0\172\0\172\0\u0d95\0\172\0\u0dd2\0\172\0\172"+
"\0\u0e0f\0\u0e4c\0\u0e89\0\u0ec6\0\u01ab\0\u01ab\0\u0f03\0\172"+
"\0\172\0\172\0\172\0\172\0\172\0\172\0\172\0\172"+
"\0\u0f40\0\172\0\u0f7d\0\u0fba\0\u0ff7\0\u094b\0\u1034\0\u1071"+
"\0\u10ae\0\u10eb\0\u01ab\0\172\0\u1128\0\u1165\0\u01ab\0\u01ab"+
"\0\u11a2\0\u01ab\0\u11df\0\u01ab\0\u01ab\0\u01ab\0\u121c\0\u01ab"+
"\0\u01ab\0\u1259\0\u1296\0\172\0\172\0\u12d3\0\u1310\0\u134d"+
"\0\u138a\0\u01ab\0\u13c7\0\u01ab\0\u1404\0\u1441\0\u01ab\0\u01ab"+
"\0\u01ab\0\u147e\0\u14bb\0\u14f8\0\u1535\0\u01ab\0\u1572\0\u15af"+
"\0\u15af\0\u01ab\0\u01ab\0\u15ec\0\u01ab\0\u1629\0\u1666\0\u16a3"+
"\0\u16e0\0\u01ab\0\u01ab\0\u01ab";
private static int [] zzUnpackRowMap() {
int [] result = new int[156];
int offset = 0;
offset = zzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result);
return result;
}
private static int zzUnpackRowMap(String packed, int offset, int [] result) {
int i = 0; /* index in packed string */
int j = offset; /* index in unpacked array */
int l = packed.length();
while (i < l) {
int high = packed.charAt(i++) << 16;
result[j++] = high | packed.charAt(i++);
}
return j;
}
/**
* The transition table of the DFA
*/
private static final int ZZ_TRANS [] = {
2, 3, 4, 5, 2, 6, 3, 7, 8, 9,
7, 10, 7, 11, 12, 13, 2, 9, 2, 2,
14, 15, 16, 17, 18, 19, 8, 20, 8, 21,
22, 23, 24, 25, 2, 26, 27, 28, 7, 29,
30, 8, 8, 31, 32, 8, 8, 33, 34, 35,
36, 37, 38, 8, 39, 8, 40, 41, 42, 43,
44, 45, 45, 45, 45, 45, 45, 45, 45, 45,
45, 45, 45, 45, 45, 45, 45, 46, 45, 45,
45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
47, 45, 45, 45, 45, 45, 45, 45, 45, 45,
45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
45, 45, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, 3, -1, -1, -1, -1, 3,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, 48, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, 49, 49, 49, 49, -1,
50, -1, 49, 49, 49, 49, 49, 49, 49, 49,
49, 49, 49, 49, 49, 49, 49, 49, 49, 49,
49, 49, 49, 49, 49, 49, 49, 49, 49, 49,
49, 49, 49, 49, 49, 49, 49, 49, 49, 49,
49, 49, 49, 49, 49, 49, 49, 49, 49, 49,
49, 49, 49, 49, 49, 49, -1, -1, -1, -1,
-1, -1, -1, 7, 7, 7, 7, 7, 7, 7,
-1, -1, -1, 7, 7, -1, -1, 7, 7, 7,
7, 7, 7, 7, 7, -1, -1, -1, -1, -1,
-1, -1, -1, -1, 7, 7, 7, 7, 7, 7,
7, 7, 7, -1, -1, -1, -1, -1, -1, 7,
7, 7, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, 8, 8, 8, 8, 8, 8,
8, -1, -1, -1, 8, 8, -1, -1, 8, 8,
8, 8, 8, 8, 8, 8, -1, -1, -1, -1,
-1, -1, -1, -1, -1, 8, 8, 8, 8, 8,
8, 8, 8, 8, -1, -1, -1, -1, -1, -1,
8, 8, 8, -1, -1, -1, -1, -1, -1, -1,
51, -1, -1, -1, -1, -1, -1, 52, -1, -1,
-1, -1, -1, 53, -1, 52, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, 8, 8, 8, 8,
8, 8, 8, -1, -1, -1, 8, 8, -1, -1,
8, 8, 8, 8, 8, 8, 8, 8, -1, -1,
-1, -1, -1, -1, -1, -1, -1, 8, 54, 8,
8, 8, 8, 8, 8, 8, -1, -1, -1, -1,
-1, -1, 8, 8, 8, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, 8, 8, 8,
8, 8, 8, 8, -1, -1, -1, 8, 8, -1,
-1, 8, 8, 8, 55, 8, 8, 8, 8, -1,
-1, -1, -1, -1, -1, -1, -1, -1, 8, 8,
8, 8, 8, 8, 8, 8, 8, -1, -1, -1,
-1, -1, -1, 8, 8, 8, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, 56, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, 57,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, 58, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, 59, 59, 59, 59, 59, 59, 59,
59, 59, 59, 59, 59, 59, 59, 59, 59, 60,
59, 59, 59, 61, 59, 59, 59, 59, 59, 59,
59, 59, 59, 59, 59, 59, 59, 59, 59, 59,
59, 59, 59, 59, 59, 59, 59, 59, 59, 59,
59, 59, 59, 59, 59, 59, 59, 59, 59, 59,
59, 59, 59, 59, -1, -1, -1, -1, -1, -1,
-1, 8, 8, 8, 8, 8, 8, 62, -1, -1,
-1, 8, 8, -1, -1, 8, 8, 8, 63, 8,
64, 8, 8, -1, -1, -1, -1, -1, -1, -1,
-1, -1, 8, 65, 8, 8, 8, 8, 66, 8,
8, -1, -1, -1, -1, -1, -1, 8, 67, 8,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, 8, 8, 8, 8, 8, 8, 8, -1,
-1, -1, 8, 8, -1, -1, 8, 8, 8, 8,
8, 8, 8, 8, -1, -1, -1, -1, -1, -1,
-1, -1, -1, 8, 8, 8, 8, 8, 68, 8,
8, 8, -1, -1, -1, -1, -1, -1, 8, 8,
8, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, 8, 8, 8, 8, 8, 8, 8,
-1, -1, -1, 8, 8, -1, -1, 8, 8, 8,
8, 8, 8, 8, 8, -1, -1, -1, -1, -1,
-1, -1, -1, -1, 8, 8, 8, 8, 8, 8,
8, 69, 8, -1, -1, -1, -1, -1, -1, 8,
8, 8, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, 8, 8, 8, 8, 8, 8,
8, -1, -1, -1, 8, 8, -1, -1, 8, 8,
8, 8, 8, 8, 8, 8, -1, -1, -1, -1,
-1, -1, -1, -1, -1, 8, 8, 8, 8, 8,
8, 70, 8, 8, -1, -1, -1, -1, -1, -1,
8, 8, 8, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, 8, 8, 8, 8, 8,
8, 71, -1, -1, -1, 8, 8, -1, -1, 8,
8, 8, 8, 8, 8, 8, 8, -1, -1, -1,
-1, -1, -1, -1, -1, -1, 8, 8, 8, 8,
8, 8, 8, 8, 8, -1, -1, -1, -1, -1,
-1, 8, 8, 8, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, 8, 8, 8, 8,
8, 8, 8, -1, -1, -1, 8, 8, -1, -1,
8, 8, 8, 8, 72, 8, 8, 8, -1, -1,
-1, -1, -1, -1, -1, -1, -1, 8, 8, 8,
8, 8, 8, 8, 8, 8, -1, -1, -1, -1,
-1, -1, 8, 8, 8, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, 73,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, 74, 74, 74, 74, 74, 74, -1, 74, 74,
74, 74, 74, 74, 74, 74, 74, 75, 74, 74,
74, 74, 74, 74, 74, 74, 74, 74, 74, 74,
74, 74, 74, 74, 74, 74, 74, 74, 74, 74,
74, 74, 74, 74, 74, 74, 74, 74, 74, 74,
74, 74, 74, 74, 74, 74, 74, 74, 74, 74,
74, 74, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, 76, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, 77,
78, 79, -1, -1, 80, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
8, 8, 8, 8, 8, 8, 8, -1, -1, -1,
8, 8, -1, -1, 8, 8, 81, 82, 8, 8,
8, 8, -1, -1, -1, -1, -1, -1, -1, -1,
-1, 8, 8, 8, 8, 8, 8, 8, 8, 8,
-1, -1, -1, -1, -1, -1, 8, 8, 8, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, 8, 8, 8, 8, 8, 8, 8, -1, -1,
-1, 8, 8, -1, -1, 8, 8, 8, 8, 8,
8, 8, 8, -1, -1, -1, -1, -1, -1, -1,
-1, -1, 8, 8, 8, 83, 8, 8, 8, 8,
8, -1, -1, -1, -1, -1, -1, 8, 8, 8,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, 8, 8, 8, 8, 8, 8, 8, -1,
-1, -1, 8, 8, -1, -1, 8, 8, 84, 8,
8, 8, 8, 8, -1, -1, -1, -1, -1, -1,
-1, -1, -1, 8, 8, 8, 8, 8, 8, 8,
8, 8, -1, -1, -1, -1, -1, -1, 8, 8,
8, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, 8, 8, 8, 8, 8, 8, 8,
-1, -1, -1, 8, 8, -1, -1, 8, 8, 85,
8, 86, 8, 8, 8, -1, -1, -1, -1, -1,
-1, -1, -1, -1, 8, 8, 8, 8, 8, 8,
8, 8, 8, -1, -1, -1, -1, -1, -1, 8,
8, 8, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, 87, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, 88, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, 89, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, 90, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, 91, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, 92, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, 93, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
94, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, 95, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, 8,
8, 8, 8, 8, 8, 8, -1, -1, -1, 8,
8, -1, -1, 8, 8, 8, 8, 8, 8, 8,
8, -1, -1, -1, -1, -1, -1, -1, -1, -1,
8, 8, 8, 8, 8, 8, 96, 8, 8, -1,
-1, -1, -1, -1, -1, 8, 8, 8, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, 97, -1, -1, 45, 45, 45, 45, 45, 45,
45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
46, 45, 45, 45, 45, 45, 45, 45, 45, 45,
45, 45, 45, -1, 45, 45, 45, 45, 45, 45,
45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
45, 45, 45, 45, 45, -1, -1, -1, -1, -1,
45, -1, -1, -1, -1, -1, -1, -1, 45, -1,
45, 45, 45, -1, 98, 45, 45, 45, 45, 45,
45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
45, 45, 45, 45, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, 48, 48, 48, 48,
-1, 48, -1, 48, 48, 48, 48, 48, 48, 48,
48, 48, 48, 48, 48, 48, 48, 48, 48, 48,
48, 48, 48, 48, 48, 48, 48, 48, 48, 48,
48, 48, 48, 48, 48, 48, 48, 48, 48, 48,
48, 48, 48, 48, 48, 48, 48, 48, 48, 48,
48, 48, 48, 48, 48, 48, 48, 49, 49, 49,
49, -1, 49, -1, 49, 49, 49, 49, 49, 49,
49, 49, 49, 49, 49, 49, 49, 49, 49, 49,
49, 49, 49, 49, 49, 49, 49, 49, 49, 49,
49, 49, 49, 49, 49, 49, 49, 49, 49, 49,
49, 49, 49, 49, 49, 49, 49, 49, 49, 49,
49, 49, 49, 49, 49, 49, 49, 49, 99, 99,
99, 99, -1, 100, -1, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99, 99, -1,
-1, -1, -1, -1, -1, -1, -1, -1, 101, 101,
101, 101, 101, -1, -1, -1, 101, -1, -1, -1,
101, 101, 101, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, 101, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, 51, -1, -1, -1, -1, -1, -1, 102,
-1, -1, -1, -1, -1, 53, -1, 102, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
103, -1, -1, -1, -1, -1, -1, -1, 103, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, 8,
8, 8, 8, 8, 8, 8, -1, -1, -1, 8,
8, -1, -1, 8, 8, 8, 8, 8, 104, 105,
8, -1, -1, -1, -1, -1, -1, -1, -1, -1,
8, 8, 8, 8, 8, 8, 8, 8, 8, -1,
-1, -1, -1, -1, -1, 8, 8, 8, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
8, 8, 8, 8, 8, 8, 8, -1, -1, -1,
8, 8, -1, -1, 8, 106, 8, 8, 8, 8,
8, 8, -1, -1, -1, -1, -1, -1, -1, -1,
-1, 8, 8, 8, 8, 8, 8, 8, 8, 8,
-1, -1, -1, -1, -1, -1, 8, 8, 8, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, 107,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, 59, 59, 59, 59, 59,
59, 59, 59, 59, 59, 59, 59, 59, 59, 59,
59, 59, 59, 59, 59, 59, 59, 59, 59, 59,
59, 59, 59, 59, 59, 59, 59, 59, 59, 59,
59, 59, 59, 59, 59, 59, 59, 59, 59, 59,
59, 59, 59, 59, 59, 59, 59, 59, 59, 59,
59, 59, 59, 59, 59, 59, -1, -1, -1, -1,
-1, -1, -1, 8, 8, 8, 8, 8, 8, 8,
-1, -1, -1, 8, 8, -1, -1, 8, 8, 8,
8, 8, 8, 8, 8, -1, -1, -1, -1, -1,
-1, -1, -1, -1, 8, 8, 8, 8, 108, 8,
8, 8, 8, -1, -1, -1, -1, -1, -1, 8,
8, 8, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, 8, 8, 8, 8, 8, 8,
8, -1, -1, -1, 8, 8, -1, -1, 8, 8,
8, 8, 8, 8, 8, 8, -1, -1, -1, -1,
-1, -1, -1, -1, -1, 8, 8, 8, 8, 8,
8, 109, 8, 8, -1, -1, -1, -1, -1, -1,
8, 8, 8, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, 8, 8, 8, 8, 8,
8, 8, -1, -1, -1, 8, 8, -1, -1, 8,
8, 8, 8, 110, 8, 8, 8, -1, -1, -1,
-1, -1, -1, -1, -1, -1, 8, 8, 8, 8,
8, 8, 8, 8, 8, -1, -1, -1, -1, -1,
-1, 8, 8, 111, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, 8, 8, 8, 8,
8, 8, 8, -1, -1, -1, 8, 8, -1, -1,
8, 8, 8, 112, 8, 8, 8, 8, -1, -1,
-1, -1, -1, -1, -1, -1, -1, 8, 8, 8,
8, 8, 8, 8, 8, 8, -1, -1, -1, -1,
-1, -1, 8, 8, 8, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, 8, 8, 8,
8, 8, 8, 8, -1, -1, -1, 8, 8, -1,
-1, 8, 8, 8, 8, 113, 8, 8, 8, -1,
-1, -1, -1, -1, -1, -1, -1, -1, 8, 8,
8, 8, 8, 8, 8, 8, 8, -1, -1, -1,
-1, -1, -1, 8, 8, 8, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, 8, 8,
8, 8, 8, 8, 8, -1, -1, -1, 8, 8,
-1, -1, 8, 8, 8, 8, 8, 8, 8, 8,
-1, -1, -1, -1, -1, -1, -1, -1, -1, 8,
8, 8, 8, 8, 8, 114, 8, 8, -1, -1,
-1, -1, -1, -1, 8, 8, 8, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, 8,
8, 8, 8, 8, 8, 8, -1, -1, -1, 8,
8, -1, -1, 8, 8, 8, 8, 8, 8, 8,
115, -1, -1, -1, -1, -1, -1, -1, -1, -1,
8, 8, 8, 8, 8, 8, 8, 8, 8, -1,
-1, -1, -1, -1, -1, 8, 8, 8, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
8, 8, 8, 8, 8, 8, 8, -1, -1, -1,
8, 8, -1, -1, 8, 8, 8, 116, 8, 8,
8, 8, -1, -1, -1, -1, -1, -1, -1, -1,
-1, 8, 8, 8, 8, 8, 8, 8, 8, 8,
-1, -1, -1, -1, -1, -1, 8, 8, 8, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, 8, 8, 8, 8, 8, 8, 8, -1, -1,
-1, 8, 8, -1, -1, 8, 8, 8, 8, 8,
8, 117, 8, -1, -1, -1, -1, -1, -1, -1,
-1, -1, 8, 8, 8, 8, 8, 8, 8, 8,
8, -1, -1, -1, -1, -1, -1, 8, 8, 8,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, 8, 8, 8, 8, 118, 8, 8, -1,
-1, -1, 8, 8, -1, -1, 8, 8, 8, 8,
8, 8, 8, 8, -1, -1, -1, -1, -1, -1,
-1, -1, -1, 8, 8, 8, 8, 8, 8, 8,
8, 8, -1, -1, -1, -1, -1, -1, 119, 8,
8, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, 8, 8, 8, 8, 8, 8, 8,
-1, -1, -1, 8, 8, -1, -1, 8, 8, 8,
8, 8, 8, 8, 8, -1, -1, -1, -1, -1,
-1, -1, -1, -1, 8, 8, 8, 8, 8, 8,
8, 8, 120, -1, -1, -1, -1, -1, -1, 8,
8, 8, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, 74, -1, -1, -1, -1, -1, -1, -1,
74, -1, 74, 74, 121, -1, 122, 74, 74, 74,
74, 74, 74, 74, 74, 74, 74, 74, 74, 74,
74, 74, 74, 74, 74, 74, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, 123, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, 124, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, 8, 8, 8,
8, 8, 8, 8, -1, -1, -1, 8, 8, -1,
-1, 8, 8, 8, 8, 8, 8, 125, 8, -1,
-1, -1, -1, -1, -1, -1, -1, -1, 8, 8,
8, 8, 8, 8, 8, 8, 8, -1, -1, -1,
-1, -1, -1, 8, 8, 8, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, 8, 8,
8, 8, 8, 8, 8, -1, -1, -1, 8, 8,
-1, -1, 8, 126, 8, 8, 8, 8, 8, 8,
-1, -1, -1, -1, -1, -1, -1, -1, -1, 8,
8, 8, 8, 8, 8, 8, 8, 8, -1, -1,
-1, -1, -1, -1, 8, 8, 8, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, 8,
8, 8, 8, 8, 8, 127, -1, -1, -1, 8,
8, -1, -1, 8, 8, 8, 8, 8, 8, 8,
8, -1, -1, -1, -1, -1, -1, -1, -1, -1,
8, 8, 8, 8, 8, 8, 8, 8, 8, -1,
-1, -1, -1, -1, -1, 8, 8, 8, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
8, 8, 8, 8, 8, 8, 128, -1, -1, -1,
8, 8, -1, -1, 8, 8, 8, 8, 8, 8,
8, 8, -1, -1, -1, -1, -1, -1, -1, -1,
-1, 8, 8, 8, 8, 8, 8, 8, 8, 8,
-1, -1, -1, -1, -1, -1, 8, 8, 8, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, 8, 8, 8, 8, 8, 8, 8, -1, -1,
-1, 8, 8, -1, -1, 8, 8, 8, 8, 129,
8, 8, 8, -1, -1, -1, -1, -1, -1, -1,
-1, -1, 8, 8, 8, 8, 8, 8, 8, 8,
8, -1, -1, -1, -1, -1, -1, 8, 8, 8,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, 45, -1, -1, 45, -1, 45, -1, -1,
-1, 45, -1, 45, 45, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, 45, -1, 45, -1, -1,
-1, -1, -1, 45, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, 99, 99, 99, 99,
-1, 99, -1, 99, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 100, 100, 100,
100, -1, 100, -1, 100, 100, 100, 100, 100, 100,
100, 100, 100, 100, 100, 100, 100, 100, 100, 100,
100, 100, 100, 100, 100, 100, 100, 100, 100, 100,
100, 100, 100, 100, 100, 100, 100, 100, 100, 100,
100, 100, 100, 100, 100, 100, 100, 100, 100, 100,
100, 100, 100, 100, 100, 100, 100, 100, -1, -1,
-1, -1, -1, -1, -1, -1, -1, 102, -1, -1,
-1, -1, -1, 53, -1, 102, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, 103, -1,
-1, 130, 130, -1, -1, -1, 103, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, 8, 8, 8,
8, 8, 8, 131, -1, -1, -1, 8, 8, -1,
-1, 8, 8, 8, 8, 8, 8, 8, 8, -1,
-1, -1, -1, -1, -1, -1, -1, -1, 8, 8,
8, 8, 8, 8, 8, 8, 8, -1, -1, -1,
-1, -1, -1, 8, 8, 8, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, 8, 8,
8, 8, 132, 8, 8, -1, -1, -1, 8, 8,
-1, -1, 8, 8, 8, 8, 8, 8, 8, 8,
-1, -1, -1, -1, -1, -1, -1, -1, -1, 8,
8, 8, 8, 8, 8, 8, 8, 8, -1, -1,
-1, -1, -1, -1, 8, 8, 8, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, 8,
8, 8, 8, 8, 8, 8, -1, -1, -1, 8,
8, -1, -1, 8, 8, 8, 8, 8, 8, 8,
8, -1, -1, -1, -1, -1, -1, -1, -1, -1,
8, 8, 8, 8, 8, 133, 8, 8, 8, -1,
-1, -1, -1, -1, -1, 8, 8, 8, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
8, 8, 8, 8, 8, 8, 8, -1, -1, -1,
8, 8, -1, -1, 8, 8, 8, 8, 8, 8,
134, 8, -1, -1, -1, -1, -1, -1, -1, -1,
-1, 8, 8, 8, 8, 8, 8, 8, 8, 8,
-1, -1, -1, -1, -1, -1, 8, 8, 8, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, 8, 8, 8, 8, 8, 8, 8, -1, -1,
-1, 8, 8, -1, -1, 8, 135, 8, 8, 8,
8, 8, 8, -1, -1, -1, -1, -1, -1, -1,
-1, -1, 8, 8, 8, 8, 8, 8, 8, 8,
8, -1, -1, -1, -1, -1, -1, 8, 8, 8,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, 8, 8, 8, 8, 8, 8, 8, -1,
-1, -1, 8, 8, -1, -1, 8, 8, 8, 8,
136, 8, 8, 8, -1, -1, -1, -1, -1, -1,
-1, -1, -1, 8, 8, 8, 8, 8, 8, 8,
8, 8, -1, -1, -1, -1, -1, -1, 8, 8,
8, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, 8, 8, 8, 8, 8, 8, 137,
-1, -1, -1, 8, 8, -1, -1, 8, 8, 8,
8, 8, 8, 8, 8, -1, -1, -1, -1, -1,
-1, -1, -1, -1, 8, 8, 8, 8, 8, 8,
8, 8, 8, -1, -1, -1, -1, -1, -1, 8,
8, 8, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, 138, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, 74, -1, -1, 74, -1,
74, -1, -1, -1, 74, -1, 74, 74, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, 74, -1,
74, -1, -1, -1, -1, -1, 74, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, 8, 8, 8, 8,
8, 8, 139, -1, -1, -1, 8, 8, -1, -1,
8, 8, 8, 8, 8, 8, 8, 8, -1, -1,
-1, -1, -1, -1, -1, -1, -1, 8, 8, 8,
8, 8, 8, 8, 8, 8, -1, -1, -1, -1,
-1, -1, 8, 8, 8, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, 8, 8, 8,
8, 8, 8, 8, -1, -1, -1, 8, 8, -1,
-1, 8, 8, 8, 8, 8, 8, 8, 8, -1,
-1, -1, -1, -1, -1, -1, -1, -1, 8, 140,
8, 8, 8, 8, 8, 8, 8, -1, -1, -1,
-1, -1, -1, 8, 8, 8, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, 8, 8,
8, 8, 8, 8, 8, -1, -1, -1, 8, 8,
-1, -1, 8, 8, 8, 141, 8, 8, 8, 8,
-1, -1, -1, -1, -1, -1, -1, -1, -1, 8,
8, 8, 8, 8, 8, 8, 8, 8, -1, -1,
-1, -1, -1, -1, 8, 8, 8, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, 8,
8, 8, 8, 8, 8, 8, -1, -1, -1, 8,
8, -1, -1, 8, 8, 8, 8, 8, 8, 8,
8, -1, -1, -1, -1, -1, -1, -1, -1, -1,
8, 8, 8, 8, 8, 8, 8, 8, 8, -1,
-1, -1, -1, -1, -1, 8, 8, 142, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, 143, -1, -1, -1, -1, 144, -1, -1,
143, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, 144, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, 8, 8, 8, 8, 8, 8, 8, -1, -1,
-1, 8, 8, -1, -1, 8, 8, 8, 8, 8,
8, 8, 8, -1, -1, -1, -1, -1, -1, -1,
-1, -1, 8, 8, 8, 145, 8, 8, 8, 8,
8, -1, -1, -1, -1, -1, -1, 8, 8, 8,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, 8, 8, 8, 8, 8, 8, 8, -1,
-1, -1, 8, 8, -1, -1, 8, 8, 8, 146,
8, 8, 8, 8, -1, -1, -1, -1, -1, -1,
-1, -1, -1, 8, 8, 8, 8, 8, 8, 8,
8, 8, -1, -1, -1, -1, -1, -1, 8, 8,
8, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, 8, 8, 8, 8, 8, 8, 8,
-1, -1, -1, 8, 8, -1, -1, 8, 8, 8,
8, 8, 8, 8, 8, -1, -1, -1, -1, -1,
-1, -1, -1, -1, 8, 8, 8, 8, 8, 147,
8, 8, 8, -1, -1, -1, -1, -1, -1, 8,
8, 8, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, 74, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, 8, 8, 8, 8, 8,
8, 8, -1, -1, -1, 8, 8, -1, -1, 8,
8, 8, 8, 148, 8, 8, 8, -1, -1, -1,
-1, -1, -1, -1, -1, -1, 8, 8, 8, 8,
8, 8, 8, 8, 8, -1, -1, -1, -1, -1,
-1, 8, 8, 8, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, 8, 8, 8, 8,
8, 8, 8, -1, -1, -1, 8, 8, -1, -1,
8, 8, 8, 8, 8, 8, 8, 8, -1, -1,
-1, -1, -1, -1, -1, -1, -1, 8, 8, 8,
8, 8, 8, 8, 8, 8, -1, -1, -1, -1,
-1, -1, 8, 8, 149, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, 8, 8, 8,
8, 8, 8, 8, -1, -1, -1, 8, 8, -1,
-1, 8, 8, 8, 8, 8, 150, 8, 8, -1,
-1, -1, -1, -1, -1, -1, -1, -1, 8, 8,
8, 8, 8, 8, 8, 8, 8, -1, -1, -1,
-1, -1, -1, 8, 8, 8, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
143, -1, -1, -1, -1, -1, -1, -1, 143, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, 8,
8, 8, 8, 8, 8, 8, -1, -1, -1, 8,
8, -1, -1, 8, 8, 8, 8, 8, 8, 8,
151, -1, -1, -1, -1, -1, -1, -1, -1, -1,
8, 8, 8, 8, 8, 8, 8, 8, 8, -1,
-1, -1, -1, -1, -1, 8, 8, 8, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
8, 8, 8, 8, 8, 8, 8, -1, -1, -1,
8, 8, -1, -1, 8, 8, 8, 8, 8, 152,
8, 8, -1, -1, -1, -1, -1, -1, -1, -1,
-1, 8, 8, 8, 8, 8, 8, 8, 8, 8,
-1, -1, -1, -1, -1, -1, 8, 8, 8, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, 8, 8, 8, 8, 8, 8, 153, -1, -1,
-1, 8, 8, -1, -1, 8, 8, 8, 8, 8,
8, 8, 8, -1, -1, -1, -1, -1, -1, -1,
-1, -1, 8, 8, 8, 8, 8, 8, 8, 8,
8, -1, -1, -1, -1, -1, -1, 8, 8, 8,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, 8, 8, 8, 8, 8, 8, 154, -1,
-1, -1, 8, 8, -1, -1, 8, 8, 8, 8,
8, 8, 8, 8, -1, -1, -1, -1, -1, -1,
-1, -1, -1, 8, 8, 8, 8, 8, 8, 8,
8, 8, -1, -1, -1, -1, -1, -1, 8, 8,
8, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, 8, 8, 8, 8, 8, 8, 8,
-1, -1, -1, 8, 8, -1, -1, 8, 8, 8,
8, 8, 8, 8, 8, -1, -1, -1, -1, -1,
-1, -1, -1, -1, 8, 8, 8, 8, 8, 8,
155, 8, 8, -1, -1, -1, -1, -1, -1, 8,
8, 8, -1, -1, -1, -1, -1,
};
/* error codes */
private static final int ZZ_UNKNOWN_ERROR = 0;
private static final int ZZ_NO_MATCH = 1;
private static final int ZZ_PUSHBACK_2BIG = 2;
private static final char[] EMPTY_BUFFER = new char[0];
private static final int YYEOF = -1;
private static java.io.Reader zzReader = null; // Fake
/* error messages for the codes above */
private static final String ZZ_ERROR_MSG[] = {
"Unkown internal scanner error",
"Error: could not match input",
"Error: pushback value was too large"
};
/**
* ZZ_ATTRIBUTE[aState] contains the attributes of state <code>aState</code>
*/
private static final int [] ZZ_ATTRIBUTE = zzUnpackAttribute();
private static final String ZZ_ATTRIBUTE_PACKED_0 =
"\2\0\1\11\2\1\1\11\20\1\4\11\2\1\1\11"+
"\10\1\1\11\2\1\2\11\1\1\2\11\1\1\1\0"+
"\4\1\1\0\1\1\1\0\2\1\2\11\1\1\2\0"+
"\1\11\13\1\2\11\1\1\1\11\1\0\2\11\1\0"+
"\6\1\11\11\1\1\1\11\1\0\10\1\1\11\16\1"+
"\1\0\2\11\5\1\1\0\15\1\1\0\13\1";
private static int [] zzUnpackAttribute() {
int [] result = new int[156];
int offset = 0;
offset = zzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result);
return result;
}
private static int zzUnpackAttribute(String packed, int offset, int [] result) {
int i = 0; /* index in packed string */
int j = offset; /* index in unpacked array */
int l = packed.length();
while (i < l) {
int count = packed.charAt(i++);
int value = packed.charAt(i++);
do result[j++] = value; while (--count > 0);
}
return j;
}
/** the current state of the DFA */
private int zzState;
/** the current lexical state */
private int zzLexicalState = YYINITIAL;
/** this buffer contains the current text to be matched and is
the source of the yytext() string */
private CharSequence zzBuffer = "";
/** this buffer may contains the current text array to be matched when it is cheap to acquire it */
private char[] zzBufferArray;
/** the textposition at the last accepting state */
private int zzMarkedPos;
/** the textposition at the last state to be included in yytext */
private int zzPushbackPos;
/** the current text position in the buffer */
private int zzCurrentPos;
/** startRead marks the beginning of the yytext() string in the buffer */
private int zzStartRead;
/** endRead marks the last character in the buffer, that has been read
from input */
private int zzEndRead;
/**
* zzAtBOL == true <=> the scanner is currently at the beginning of a line
*/
private boolean zzAtBOL = true;
/** zzAtEOF == true <=> the scanner is at the EOF */
private boolean zzAtEOF;
/* user code: */
public _ErlangLexer() {
this((java.io.Reader)null);
}
public _ErlangLexer(java.io.Reader in) {
this.zzReader = in;
}
/**
* Creates a new scanner.
* There is also java.io.Reader version of this constructor.
*
* @param in the java.io.Inputstream to read input from.
*/
public _ErlangLexer(java.io.InputStream in) {
this(new java.io.InputStreamReader(in));
}
/**
* Unpacks the compressed character translation table.
*
* @param packed the packed character translation table
* @return the unpacked character translation table
*/
private static char [] zzUnpackCMap(String packed) {
char [] map = new char[0x10000];
int i = 0; /* index in packed string */
int j = 0; /* index in unpacked array */
while (i < 142) {
int count = packed.charAt(i++);
char value = packed.charAt(i++);
do map[j++] = value; while (--count > 0);
}
return map;
}
public final int getTokenStart(){
return zzStartRead;
}
public final int getTokenEnd(){
return getTokenStart() + yylength();
}
public void reset(CharSequence buffer, int start, int end,int initialState){
zzBuffer = buffer;
zzBufferArray = com.intellij.util.text.CharArrayUtil.fromSequenceWithoutCopying(buffer);
zzCurrentPos = zzMarkedPos = zzStartRead = start;
zzPushbackPos = 0;
zzAtEOF = false;
zzAtBOL = true;
zzEndRead = end;
yybegin(initialState);
}
/**
* Refills the input buffer.
*
* @return <code>false</code>, iff there was new input.
*
* @exception java.io.IOException if any I/O-Error occurs
*/
private boolean zzRefill() throws java.io.IOException {
return true;
}
/**
* Returns the current lexical state.
*/
public final int yystate() {
return zzLexicalState;
}
/**
* Enters a new lexical state
*
* @param newState the new lexical state
*/
public final void yybegin(int newState) {
zzLexicalState = newState;
}
/**
* Returns the text matched by the current regular expression.
*/
public final CharSequence yytext() {
return zzBuffer.subSequence(zzStartRead, zzMarkedPos);
}
/**
* Returns the character at position <tt>pos</tt> from the
* matched text.
*
* It is equivalent to yytext().charAt(pos), but faster
*
* @param pos the position of the character to fetch.
* A value from 0 to yylength()-1.
*
* @return the character at position pos
*/
public final char yycharat(int pos) {
return zzBufferArray != null ? zzBufferArray[zzStartRead+pos]:zzBuffer.charAt(zzStartRead+pos);
}
/**
* Returns the length of the matched text region.
*/
public final int yylength() {
return zzMarkedPos-zzStartRead;
}
/**
* Reports an error that occured while scanning.
*
* In a wellformed scanner (no or only correct usage of
* yypushback(int) and a match-all fallback rule) this method
* will only be called with things that "Can't Possibly Happen".
* If this method is called, something is seriously wrong
* (e.g. a JFlex bug producing a faulty scanner etc.).
*
* Usual syntax/scanner level error handling should be done
* in error fallback rules.
*
* @param errorCode the code of the errormessage to display
*/
private void zzScanError(int errorCode) {
String message;
try {
message = ZZ_ERROR_MSG[errorCode];
}
catch (ArrayIndexOutOfBoundsException e) {
message = ZZ_ERROR_MSG[ZZ_UNKNOWN_ERROR];
}
throw new Error(message);
}
/**
* Pushes the specified amount of characters back into the input stream.
*
* They will be read again by then next call of the scanning method
*
* @param number the number of characters to be read again.
* This number must not be greater than yylength()!
*/
public void yypushback(int number) {
if ( number > yylength() )
zzScanError(ZZ_PUSHBACK_2BIG);
zzMarkedPos -= number;
}
/**
* Resumes scanning until the next regular expression is matched,
* the end of input is encountered or an I/O-Error occurs.
*
* @return the next token
* @exception java.io.IOException if any I/O-Error occurs
*/
public IElementType advance() throws java.io.IOException {
int zzInput;
int zzAction;
// cached fields:
int zzCurrentPosL;
int zzMarkedPosL;
int zzEndReadL = zzEndRead;
CharSequence zzBufferL = zzBuffer;
char[] zzBufferArrayL = zzBufferArray;
char [] zzCMapL = ZZ_CMAP;
int [] zzTransL = ZZ_TRANS;
int [] zzRowMapL = ZZ_ROWMAP;
int [] zzAttrL = ZZ_ATTRIBUTE;
while (true) {
zzMarkedPosL = zzMarkedPos;
zzAction = -1;
zzCurrentPosL = zzCurrentPos = zzStartRead = zzMarkedPosL;
zzState = ZZ_LEXSTATE[zzLexicalState];
zzForAction: {
while (true) {
if (zzCurrentPosL < zzEndReadL)
zzInput = (zzBufferArrayL != null ? zzBufferArrayL[zzCurrentPosL++] : zzBufferL.charAt(zzCurrentPosL++));
else if (zzAtEOF) {
zzInput = YYEOF;
break zzForAction;
}
else {
// store back cached positions
zzCurrentPos = zzCurrentPosL;
zzMarkedPos = zzMarkedPosL;
boolean eof = zzRefill();
// get translated positions and possibly new buffer
zzCurrentPosL = zzCurrentPos;
zzMarkedPosL = zzMarkedPos;
zzBufferL = zzBuffer;
zzEndReadL = zzEndRead;
if (eof) {
zzInput = YYEOF;
break zzForAction;
}
else {
zzInput = (zzBufferArrayL != null ? zzBufferArrayL[zzCurrentPosL++] : zzBufferL.charAt(zzCurrentPosL++));
}
}
int zzNext = zzTransL[ zzRowMapL[zzState] + zzCMapL[zzInput] ];
if (zzNext == -1) break zzForAction;
zzState = zzNext;
int zzAttributes = zzAttrL[zzState];
if ( (zzAttributes & 1) == 1 ) {
zzAction = zzState;
zzMarkedPosL = zzCurrentPosL;
if ( (zzAttributes & 8) == 8 ) break zzForAction;
}
}
}
// store back cached position
zzMarkedPos = zzMarkedPosL;
switch (zzAction < 0 ? zzAction : ZZ_ACTION[zzAction]) {
case 5:
{ return ERL_COMMENT;
}
case 80: break;
case 20:
{ return ERL_OP_GT;
}
case 81: break;
case 31:
{ return getTokenStart() == 0 ? ERL_SHEBANG : com.intellij.psi.TokenType.ERROR_ELEMENT;
}
case 82: break;
case 79:
{ return ERL_ANDALSO;
}
case 83: break;
case 37:
{ return ERL_OP_EQ_EQ;
}
case 84: break;
case 46:
{ return ERL_BIN_END;
}
case 85: break;
case 32:
{ return ERL_FUNCTION_DOC_COMMENT;
}
case 86: break;
case 55:
{ return ERL_END;
}
case 87: break;
case 27:
{ return ERL_OP_OR;
}
case 88: break;
case 36:
{ return ERL_STRING;
}
case 89: break;
case 54:
{ return ERL_FLOAT;
}
case 90: break;
case 78:
{ return ERL_RECEIVE;
}
case 91: break;
case 70:
{ return ERL_BNOT;
}
case 92: break;
case 24:
{ return ERL_OP_AR_DIV;
}
case 93: break;
case 14:
{ return ERL_BRACKET_RIGHT;
}
case 94: break;
case 13:
{ return ERL_CURLY_LEFT;
}
case 95: break;
case 30:
{ yybegin(YYINITIAL); return ERL_SINGLE_QUOTE;
}
case 96: break;
case 35:
{ return ERL_DOT_DOT;
}
case 97: break;
case 29:
{ return ERL_QMARK;
}
case 98: break;
case 63:
{ return ERL_REM;
}
case 99: break;
case 61:
{ return ERL_FUN;
}
case 100: break;
case 77:
{ return ERL_ORELSE;
}
case 101: break;
case 2:
{ return com.intellij.psi.TokenType.WHITE_SPACE;
}
case 102: break;
case 68:
{ return ERL_XOR;
}
case 103: break;
case 60:
{ return ERL_DIV;
}
case 104: break;
case 15:
{ return ERL_CURLY_RIGHT;
}
case 105: break;
case 33:
{ return ERL_OP_MINUS_MINUS;
}
case 106: break;
case 8:
{ return ERL_INTEGER;
}
case 107: break;
case 65:
{ return ERL_OP_EQ_COL_EQ;
}
case 108: break;
case 73:
{ return ERL_WHEN;
}
case 109: break;
case 43:
{ return ERL_MATCH;
}
case 110: break;
case 75:
{ return ERL_BEGIN;
}
case 111: break;
case 39:
{ return ERL_OP_EQ_LT;
}
case 112: break;
case 53:
{ return ERL_MODULE_DOC_COMMENT;
}
case 113: break;
case 66:
{ return ERL_OP_EQ_DIV_EQ;
}
case 114: break;
case 34:
{ return ERL_ARROW;
}
case 115: break;
case 10:
{ return ERL_DOT;
}
case 116: break;
case 9:
{ return ERL_OP_MINUS;
}
case 117: break;
case 71:
{ return ERL_BAND;
}
case 118: break;
case 44:
{ return ERL_COLON_COLON;
}
case 119: break;
case 38:
{ return ERL_ASSOC;
}
case 120: break;
case 58:
{ return ERL_BSL;
}
case 121: break;
case 19:
{ return ERL_COLON;
}
case 122: break;
case 28:
{ return ERL_SEMI;
}
case 123: break;
case 16:
{ return ERL_CHAR;
}
case 124: break;
case 57:
{ return ERL_BSR;
}
case 125: break;
case 17:
{ return ERL_OP_EQ;
}
case 126: break;
case 51:
{ return ERL_OP_DIV_EQ;
}
case 127: break;
case 4:
{ return ERL_OP_EXL;
}
case 128: break;
case 48:
{ return ERL_OP_LT_EQ;
}
case 129: break;
case 25:
{ return ERL_PAR_LEFT;
}
case 130: break;
case 23:
{ return ERL_OP_AR_MUL;
}
case 131: break;
case 50:
{ return ERL_OP_PLUS_PLUS;
}
case 132: break;
case 6:
{ return ERL_VAR;
}
case 133: break;
case 62:
{ return ERL_NOT;
}
case 134: break;
case 59:
{ return ERL_BOR;
}
case 135: break;
case 52:
{ return ERL_OR_OR;
}
case 136: break;
case 72:
{ return ERL_BXOR;
}
case 137: break;
case 41:
{ return ERL_OF;
}
case 138: break;
case 74:
{ return ERL_CATCH;
}
case 139: break;
case 18:
{ return ERL_COMMA;
}
case 140: break;
case 42:
{ return ERL_OR;
}
case 141: break;
case 45:
{ return ERL_OP_GT_EQ;
}
case 142: break;
case 7:
{ return ERL_ATOM_NAME;
}
case 143: break;
case 76:
{ return ERL_AFTER;
}
case 144: break;
case 22:
{ return ERL_OP_PLUS;
}
case 145: break;
case 47:
{ return ERL_OP_LT_MINUS;
}
case 146: break;
case 1:
{ return com.intellij.psi.TokenType.BAD_CHARACTER;
}
case 147: break;
case 12:
{ return ERL_BRACKET_LEFT;
}
case 148: break;
case 26:
{ return ERL_PAR_RIGHT;
}
case 149: break;
case 21:
{ return ERL_OP_LT;
}
case 150: break;
case 11:
{ yybegin(IN_QUOTES); return ERL_SINGLE_QUOTE;
}
case 151: break;
case 49:
{ return ERL_BIN_START;
}
case 152: break;
case 3:
{ return ERL_RADIX;
}
case 153: break;
case 69:
{ return ERL_CASE;
}
case 154: break;
case 40:
{ return ERL_IF;
}
case 155: break;
case 64:
{ return ERL_TRY;
}
case 156: break;
case 56:
{ return ERL_DOT_DOT_DOT;
}
case 157: break;
case 67:
{ return ERL_AND;
}
case 158: break;
default:
if (zzInput == YYEOF && zzStartRead == zzCurrentPos) {
zzAtEOF = true;
return null;
}
else {
zzScanError(ZZ_NO_MATCH);
}
}
}
}
}
|
LorekSSBM/runelite
|
runelite-client/src/main/java/net/runelite/client/plugins/achievementdiary/diaries/KaramjaDiaryRequirement.java
|
<reponame>LorekSSBM/runelite<filename>runelite-client/src/main/java/net/runelite/client/plugins/achievementdiary/diaries/KaramjaDiaryRequirement.java<gh_stars>10-100
/*
* Copyright (c) 2018, Marshall <https://github.com/marshdevs>
* Copyright (c) 2018, Adam <<EMAIL>>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.client.plugins.achievementdiary.diaries;
import net.runelite.api.Quest;
import net.runelite.api.Skill;
import net.runelite.client.plugins.achievementdiary.CombatLevelRequirement;
import net.runelite.client.plugins.achievementdiary.GenericDiaryRequirement;
import net.runelite.client.plugins.achievementdiary.OrRequirement;
import net.runelite.client.plugins.achievementdiary.QuestRequirement;
import net.runelite.client.plugins.achievementdiary.SkillRequirement;
public class KaramjaDiaryRequirement extends GenericDiaryRequirement
{
public KaramjaDiaryRequirement()
{
// EASY
add("Use the rope swing to travel to the small island north-west of Karamja, where the " +
"moss giants are.",
new SkillRequirement(Skill.AGILITY, 10));
add("Mine some gold from the rocks on the north-west peninsula of Karamja.",
new SkillRequirement(Skill.MINING, 40));
add("Explore Cairn Island to the west of Karamja.",
new SkillRequirement(Skill.AGILITY, 15));
// MEDIUM
add("Claim a ticket from the Agility Arena in Brimhaven.",
new SkillRequirement(Skill.AGILITY, 30));
add("Discover hidden wall in the dungeon below the volcano.",
new QuestRequirement(Quest.DRAGON_SLAYER, true));
add("Visit the Isle of Crandor via the dungeon below the volcano.",
new QuestRequirement(Quest.DRAGON_SLAYER, true));
add("Use Vigroy and Hajedy's cart service.",
new QuestRequirement(Quest.SHILO_VILLAGE));
add("Earn 100% favour in the village of Tai Bwo Wannai.",
new SkillRequirement(Skill.WOODCUTTING, 10),
new QuestRequirement(Quest.JUNGLE_POTION));
add("Cook a spider on a stick.",
new SkillRequirement(Skill.COOKING, 16));
add("Charter the Lady of the Waves from Cairn Isle to Port Khazard.",
new QuestRequirement(Quest.SHILO_VILLAGE));
add("Cut a log from a teak tree.",
new SkillRequirement(Skill.WOODCUTTING, 35),
new QuestRequirement(Quest.JUNGLE_POTION));
add("Cut a log from a mahogany tree.",
new SkillRequirement(Skill.WOODCUTTING, 50),
new QuestRequirement(Quest.JUNGLE_POTION));
add("Catch a karambwan.",
new SkillRequirement(Skill.FISHING, 65),
new QuestRequirement(Quest.TAI_BWO_WANNAI_TRIO, true));
add("Exchange gems for a machete.",
new QuestRequirement(Quest.JUNGLE_POTION));
add("Use the gnome glider to travel to Karamja.",
new QuestRequirement(Quest.THE_GRAND_TREE));
add("Grow a healthy fruit tree in the patch near Brimhaven.",
new SkillRequirement(Skill.FARMING, 27));
add("Trap a horned graahk.",
new SkillRequirement(Skill.HUNTER, 41));
add("Chop the vines to gain deeper access to Brimhaven Dungeon.",
new SkillRequirement(Skill.WOODCUTTING, 10));
add("Cross the lava using the stepping stones within Brimhaven Dungeon.",
new SkillRequirement(Skill.AGILITY, 12));
add("Climb the stairs within Brimhaven Dungeon.",
new SkillRequirement(Skill.WOODCUTTING, 10));
add("Charter a ship from the shipyard in the far east of Karamja.",
new QuestRequirement(Quest.THE_GRAND_TREE));
add("Mine a red topaz from a gem rock.",
new SkillRequirement(Skill.MINING, 40),
new OrRequirement(
new QuestRequirement(Quest.SHILO_VILLAGE),
new QuestRequirement(Quest.JUNGLE_POTION)
)
);
// HARD
add("Craft some nature runes.",
new SkillRequirement(Skill.RUNECRAFT, 44),
new QuestRequirement(Quest.RUNE_MYSTERIES));
add("Cook a karambwan thoroughly.",
new SkillRequirement(Skill.COOKING, 30),
new QuestRequirement(Quest.TAI_BWO_WANNAI_TRIO));
add("Kill a deathwing in the dungeon under the Kharazi Jungle.",
new SkillRequirement(Skill.WOODCUTTING, 15),
new SkillRequirement(Skill.STRENGTH, 50),
new SkillRequirement(Skill.AGILITY, 50),
new SkillRequirement(Skill.THIEVING, 50),
new SkillRequirement(Skill.MINING, 52),
new QuestRequirement(Quest.LEGENDS_QUEST));
add("Use the crossbow short cut south of the volcano.",
new SkillRequirement(Skill.AGILITY, 53),
new SkillRequirement(Skill.RANGED, 42),
new SkillRequirement(Skill.STRENGTH, 21));
add("Collect 5 palm leaves.",
new SkillRequirement(Skill.WOODCUTTING, 15),
new QuestRequirement(Quest.LEGENDS_QUEST));
add("Be assigned a Slayer task by Duradel north of Shilo Village.",
new CombatLevelRequirement(100),
new SkillRequirement(Skill.SLAYER, 50),
new QuestRequirement(Quest.SHILO_VILLAGE));
add("Kill a metal dragon in Brimhaven Dungeon.",
new SkillRequirement(Skill.AGILITY, 12),
new SkillRequirement(Skill.WOODCUTTING, 34));
// ELITE
add("Craft 56 Nature runes at once.",
new SkillRequirement(Skill.RUNECRAFT, 91));
add("Check the health of a palm tree in Brimhaven.",
new SkillRequirement(Skill.FARMING, 68));
add("Create an antivenom potion whilst standing in the horse shoe mine.",
new SkillRequirement(Skill.HERBLORE, 87));
add("Check the health of your Calquat tree patch.",
new SkillRequirement(Skill.FARMING, 72));
}
}
|
zoopaper/netty-study
|
src/main/java/org/netty/study/channelhandler/ChannelCloseOutbound.java
|
<reponame>zoopaper/netty-study
package org.netty.study.channelhandler;
import io.netty.channel.ChannelHandlerAdapter;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelPromise;
/**
* @author krisjin
*/
public class ChannelCloseOutbound extends ChannelHandlerAdapter {
public void close(ChannelHandlerContext ctx, ChannelPromise promise)
throws Exception {
System.out.println("TCP Closing!");
ctx.close(promise);
}
public static void main(String[] args){
String hex =Integer.toHexString(100);
System.out.println((String.valueOf(Long.MAX_VALUE).length()));
}
}
|
freebird-airlines/WhirlyGlobe
|
ios/library/WhirlyGlobe-MaplyComponent/include/MaplyLAZShader.h
|
//
// LAZShader.h
// LidarViewer
//
// Created by <NAME> on 10/27/15.
// Copyright © 2015-2017 mousebird consulting. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "MaplyShader.h"
#import "MaplyBaseViewController.h"
// Name of the point size uniform attribute.
extern NSString* const kMaplyLAZShaderPointSize;
// Name of the zMin uniform attribute (for the ramp shader)
extern NSString* const kMaplyLAZShaderZMin;
// Name of the zMax uniform attribute (for the ramp shader)
extern NSString* const kMaplyLAZShaderZMax;
// This is a simple point shader that passes colors in
MaplyShader *MaplyLAZBuildPointShader(NSObject<MaplyRenderControllerProtocol> *viewC);
// This shader uses a ramp shader texture to look up colors
MaplyShader *MaplyLAZBuildRampPointShader(NSObject<MaplyRenderControllerProtocol> *viewC,UIImage *colorRamp);
|
chusopr/cloudbreak
|
template-manager-blueprint/src/main/java/com/sequenceiq/cloudbreak/blueprint/SmartsenseConfigurationLocator.java
|
<filename>template-manager-blueprint/src/main/java/com/sequenceiq/cloudbreak/blueprint/SmartsenseConfigurationLocator.java<gh_stars>0
package com.sequenceiq.cloudbreak.blueprint;
import java.util.Optional;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import com.sequenceiq.cloudbreak.domain.SmartSenseSubscription;
@Component
public class SmartsenseConfigurationLocator {
private static final String HST_SERVER_COMPONENT = "HST_SERVER";
@Value("${cb.smartsense.configure:false}")
private boolean configureSmartSense;
public boolean smartsenseConfigurableBySubscriptionId(Optional<String> smartSenseSubscriptionId) {
return configureSmartSense && smartSenseSubscriptionId.isPresent();
}
public boolean smartsenseConfigurable(Optional<SmartSenseSubscription> subscription) {
return smartsenseConfigurableBySubscriptionId(subscription.isPresent() ? Optional.of(subscription.get().getSubscriptionId()) : Optional.empty());
}
}
|
mhotchen/programming-in-scala
|
src/simulation/Test.scala
|
package simulation
object Test extends App {
val circuit = new Circuit with Adders
val ain = new circuit.Wire("ain", true)
val bin = new circuit.Wire("bin", false)
val cin = new circuit.Wire("cin", true)
val sout = new circuit.Wire("sout")
val cout = new circuit.Wire("cout")
circuit.probe(ain)
circuit.probe(bin)
circuit.probe(cin)
circuit.probe(sout)
circuit.probe(cout)
circuit.fullAdder(ain, bin, cin, sout, cout)
circuit.start()
}
|
wonedays/titanium_mobile
|
common/Resources/ti.internal/extensions/node/events.js
|
/**
* @param {EventEmitter} emitter the EventEmitter instance to use to register for it's events
* @param {string} eventName the name of the event to register for
* @param {function} listener the listener callback/function to invoke when the event is emitted
* @param {boolean} prepend whether to prepend or append the listener
* @returns {EventEmitter}
*/
function _addListener(emitter, eventName, listener, prepend) {
if (!emitter._eventsToListeners) { // no events/listeners registered
emitter._eventsToListeners = {}; // initialize it
}
// if there's someone listening to 'newListener' events, emit that **before** we add the listener (to avoid infinite recursion)
if (emitter._eventsToListeners.newListener) {
emitter.emit('newListener', eventName, listener);
}
const eventListeners = emitter._eventsToListeners[eventName] || [];
if (prepend) {
eventListeners.unshift(listener);
} else {
eventListeners.push(listener);
}
emitter._eventsToListeners[eventName] = eventListeners;
// Check max listeners and spit out warning if >
const max = emitter.getMaxListeners();
const length = eventListeners.length;
if (max > 0 && length > max) {
const w = new Error(`Possible EventEmitter memory leak detected. ${length} ${eventName} listeners added. Use emitter.setMaxListeners() to increase limit`);
w.name = 'MaxListenersExceededWarning';
w.emitter = emitter;
w.type = eventName;
w.count = length;
process.emitWarning(w);
}
return emitter;
}
function onceWrap(emitter, eventName, listener) {
function wrapper(...args) {
this.emitter.removeListener(this.eventName, this.wrappedFunc); // remove ourselves
this.listener.apply(this.emitter, args); // then forward the event callback
}
// we have to use bind with a custom 'this', because events fire with 'this' pointing at the emitter
const wrapperThis = {
emitter, eventName, listener
};
const bound = wrapper.bind(wrapperThis); // bind to force "this" to refer to our custom object tracking the wrapper/emitter/listener
bound.listener = listener; // have to add listener property for "unwrapping"
wrapperThis.wrappedFunc = bound;
return bound;
}
// many consumers make use of this via util.inherits, which does not chain constructor calls!
// so we need to be aware that _eventsToListeners maye be null/undefined on instances, and check in methods before accessing it
export default class EventEmitter {
constructor() {
this._eventsToListeners = {};
this._maxListeners = undefined;
}
addListener(eventName, listener) {
return _addListener(this, eventName, listener, false);
}
on(eventName, listener) {
return this.addListener(eventName, listener);
}
prependListener(eventName, listener) {
return _addListener(this, eventName, listener, true);
}
once(eventName, listener) {
this.on(eventName, onceWrap(this, eventName, listener));
}
prependOnceListener(eventName, listener) {
this.prependListener(eventName, onceWrap(this, eventName, listener));
}
removeListener(eventName, listener) {
if (!this._eventsToListeners) { // no events/listeners registered
return this;
}
const eventListeners = this._eventsToListeners[eventName] || [];
const length = eventListeners.length;
let foundIndex = -1;
let unwrappedListener;
// Need to search LIFO, and need to handle wrapped functions (once wrappers)
for (let i = length - 1; i >= 0; i--) {
if (eventListeners[i] === listener || eventListeners[i].listener === listener) {
foundIndex = i;
unwrappedListener = eventListeners[i].listener;
break;
}
}
if (foundIndex !== -1) {
if (length === 1) { // length was 1 and we want to remove last entry, so delete the event type from our listener mapping now!
delete this._eventsToListeners[eventName];
} else { // we had 2+ listeners, so store array without this given listener
eventListeners.splice(foundIndex, 1); // modifies in place, no need to assign to this.listeners[eventName]
}
// Don't emit if there's no listeners for 'removeListener' type!
if (this._eventsToListeners.removeListener) {
this.emit('removeListener', eventName, unwrappedListener || listener);
}
}
return this;
}
off(eventName, listener) {
return this.removeListener(eventName, listener);
}
emit(eventName, ...args) {
if (!this._eventsToListeners) { // no events/listeners registered
return false;
}
const eventListeners = this._eventsToListeners[eventName] || [];
for (const listener of eventListeners.slice()) { // must operate on copy because listeners ,ay get remove as side-effect of calling
listener.call(this, ...args);
}
return eventListeners.length !== 0;
}
listenerCount(eventName) {
if (!this._eventsToListeners) { // no events/listeners registered
return 0;
}
const eventListeners = this._eventsToListeners[eventName] || [];
return eventListeners.length;
}
eventNames() {
return Object.getOwnPropertyNames(this._eventsToListeners || {});
}
listeners(eventName) {
if (!this._eventsToListeners) { // no events/listeners registered
return [];
}
// Need to "unwrap" once wrappers!
const raw = (this._eventsToListeners[eventName] || []);
return raw.map(l => l.listener || l); // here we unwrap the once wrapper if there is one or fall back to listener function
}
rawListeners(eventName) {
if (!this._eventsToListeners) { // no events/listeners registered
return [];
}
return (this._eventsToListeners[eventName] || []).slice(0); // return a copy
}
getMaxListeners() {
return this._maxListeners || EventEmitter.defaultMaxListeners;
}
setMaxListeners(n) {
this._maxListeners = n; // TODO: Type check n, make sure >= 0 (o equals no limit)
return this;
}
removeAllListeners(eventName) {
if (!this._eventsToListeners) { // no events/listeners registered
this._eventsToListeners = {}; // initialize it
}
if (!this._eventsToListeners.removeListener) {
// no need to emit! we can just wipe!
if (eventName === undefined) {
// remove every type!
this._eventsToListeners = {};
} else {
// remove specific type
delete this._eventsToListeners[eventName];
}
return this;
}
// yuck, we'll have to emit 'removeListener' events as we go
if (eventName === undefined) {
// Remove all types (but do 'removeListener' last!)
const names = Object.keys(this._eventsToListeners).filter(name => name !== 'removeListener');
names.forEach(name => this.removeAllListeners(name));
this.removeAllListeners('removeListener');
this._eventsToListeners = {};
} else {
// remove listeners for one type, back to front (Last-in, first-out, except where prepend f-ed it up)
const listeners = this._eventsToListeners[eventName] || [];
for (let i = listeners.length - 1; i >= 0; i--) {
this.removeListener(eventName, listeners[i]);
}
}
return this;
}
}
EventEmitter.defaultMaxListeners = 10;
EventEmitter.listenerCount = function (emitter, eventName) {
return emitter.listenerCount(eventName);
};
EventEmitter.EventEmitter = EventEmitter;
|
scelestino/flowly4j
|
flowly4j-example/src/main/java/com/flowly4j/example/FinishA.java
|
package com.flowly4j.example;
import com.flowly4j.core.tasks.FinishTask;
class FinishA extends FinishTask {
}
|
qianranow/cplusplus
|
cpp_03standard_template_library/CN_13mapandmultimap.cpp
|
<gh_stars>0
//
// Created by qianranow on 2022/2/25.
//
#include <iostream>
#include <map>
#include <string>
using namespace std;
//map是关联式容器,容器自身有规则,通过键值排序,map容器中的元素是对组
//map容器和multimap容器的区别是multiset允许有相同的元素
//对组的第一个元素是键值,不能改变,第二个元素是实值,可以改变
//数据结构:红黑树,一种平衡二叉树
//迭代器:双向迭代器
void test()
{
map<int, string>::iterator it;
it++;
it--;
//it + 1;err
//双向迭代器
}
/*
插入数据元素操作
map.insert(...); //往容器插入元素,返回pair<iterator,bool>
map<int, string> mapStu;
// 第一种 通过pair的方式插入对象
mapStu.insert(pair<int, string>(3, "小张"));
// 第二种 通过pair的方式插入对象
mapStu.inset(make_pair(-1, "校长"));
// 第三种 通过value_type的方式插入对象
mapStu.insert(map<int, string>::value_type(1, "小李"));
// 第四种 通过数组的方式插入值
mapStu[3] = "小刘";
mapStu[5] = "小王";
*/
template<class T>
void printMap(T &m)
{
for (map<int, string>::iterator it = m.begin();it!=m.end();++it)
{
cout << "key:" << it->first << "value:" << it->second << endl;
}
}
void test01()
{
map<int, string> mymap;
mymap.insert(pair<int, string>(3, "aaa"));
mymap.insert(make_pair(6, "bbb"));
mymap.insert(map<int, string>::value_type(2, "ccc"));
mymap[4] = "ddd";
printMap(mymap);
}
//改变规则
struct mapFunc
{
bool operator()(const int& key1, const int& key2) const
{
return key1 > key2;
}
};
void test02()
{
map<int, string, mapFunc> m;
m.insert(pair<int, string>(3, "aaa"));
m.insert(make_pair(6, "bbb"));
m.insert(map<int, string>::value_type(2, "ccc"));
m[4] = "ddd";
printMap(m);
}
//注意:[]方式插入数据,如果没有实值,那么键值也是存在的
void test03()
{
map<int, string> mymap;
mymap.insert(pair<int, string>(3, "aaa"));
mymap.insert(make_pair(6, "bbb"));
mymap.insert(map<int, string>::value_type(2, "ccc"));
mymap[4] = "ddd";
printMap(mymap);
cout << "size:" << mymap.size() << endl;
cout << mymap[100] << endl;//插入键值,返回的是实值
cout << "size:" << mymap.size() << endl;
}
/*
查找操作
find(key);//查找键key是否存在,若存在,返回该键的元素的迭代器;/若不存在,返回map.end();
count(keyElem);//返回容器中key为keyElem的对组个数。对map来说,要么是0,要么是1。对multimap来说,值可能大于1。
lower_bound(keyElem);//返回第一个key>=keyElem元素的迭代器。
upper_bound(keyElem);//返回第一个key>keyElem元素的迭代器。
equal_range(keyElem);//返回容器中key与keyElem相等的上下限的两个迭代器。
*/
void test04()
{
map<int, string> mymap;
mymap[1] = "aaa";
mymap[2] = "bbb";
mymap[3] = "ccc";
mymap[4] = "ddd";
mymap[5] = "eee";
map<int, string>::iterator it = mymap.find(2);
if (it==mymap.end())
{
cout << "查找失败" << endl;
}
else
{
cout << "key:" << it->first << "value:" << it->second << endl;
}
//查找大于或者等于3的最小的数
it = mymap.lower_bound(3);
if (it==mymap.end())
{
cout << "查找失败" << endl;
}
else
{
cout << "key:" << it->first << "value:" << it->second << endl;
}
//查找大于3的最小的数
it = mymap.upper_bound(3);
if (it==mymap.end())
{
cout << "查找失败" << endl;
}
else
{
cout << "key:" << it->first << "value:" << it->second << endl;
}
cout << "----------------------" << endl;
//返回大于或者等于3的两个最小的数,如果有3那么就返回3和大于3的最小数
pair<map<int, string>::iterator, map<int, string>::iterator> ret = mymap.equal_range(3);
if (ret.first != mymap.end())
{
cout << "key:" << ret.first->first << "value:" << ret.first->second << endl;
}
if (ret.second != mymap.end())
{
cout << "key:" << ret.second->first << "value:" << ret.second->second << endl;
}
}
int main()
{
test04();
return 0;
}
|
johnson-ajar/ieds-circuit
|
com.soa.circuit.backend/src/main/java/com/soa/circuit/backend/sample/HelloWorldEndpoint.java
|
package com.soa.circuit.backend.sample;
import java.util.ArrayList;
import java.util.List;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import org.springframework.stereotype.Component;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.soa.circuit.backend.rs.Service;
@Component
@Path("/hello")
public class HelloWorldEndpoint {
private final Service service;
private final ObjectMapper objectMapper;
public HelloWorldEndpoint(Service service, ObjectMapper objectMapper){
this.service = service;
this.objectMapper = objectMapper;
}
@GET
public String message(){
return "Hello "+this.service.message();
}
private class SimpleClass {
@SuppressWarnings("unused")
public String foo="bar";
@SuppressWarnings("unused")
public String foo2 = "ba2r";
}
@GET
@Path("/world")
@Produces(MediaType.APPLICATION_JSON)
public SimpleClass messageJson(){
SimpleClass simpleClass = new SimpleClass();
simpleClass.foo2 = objectMapper.toString();
return simpleClass;
}
@GET()
@Path("/worlds")
@Produces(MediaType.APPLICATION_JSON)
public List<SimpleClass> getWorlds(){
List<SimpleClass> r = new ArrayList<SimpleClass>();
SimpleClass c1 = new SimpleClass();
c1.foo=objectMapper.toString();
SimpleClass c2 = new SimpleClass();
c2.foo2 = objectMapper.toString();
r.add(c1);
r.add(c2);
return r;
}
}
|
xcorail/OTB
|
Modules/Wrappers/ApplicationEngine/include/otbWrapperListViewParameter.h
|
/*
* Copyright (C) 2005-2017 Centre National d'Etudes Spatiales (CNES)
*
* This file is part of Orfeo Toolbox
*
* https://www.orfeo-toolbox.org/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef otbWrapperListViewParameter_h
#define otbWrapperListViewParameter_h
#include "otbWrapperParameterGroup.h"
#include <string>
namespace otb
{
namespace Wrapper
{
/** \class ListViewParameter
* \brief This class represent a ListWidget parameter for the wrapper framework
*
* The row selected in the ListWidget are stored in a std::vector. The
* method GetSelectedItems() allow the user to access to this method.
*
*
* \ingroup OTBApplicationEngine
*/
class OTBApplicationEngine_EXPORT ListViewParameter
: public Parameter
{
public:
/** Standard class typedef */
typedef ListViewParameter Self;
typedef Parameter Superclass;
typedef itk::SmartPointer<Self> Pointer;
typedef itk::SmartPointer<const Self> ConstPointer;
/** Defining ::New() static method */
itkNewMacro(Self);
/** RTTI support */
itkTypeMacro(ListViewParameter, Parameter);
itkSetMacro(SingleSelection,bool);
itkGetMacro(SingleSelection,bool);
itkBooleanMacro(SingleSelection);
/** Add a value to the choice */
void AddChoice( std::string choicekey, std::string choiceName );
/** Get the key of a specific choice value */
std::string GetChoiceKey( int i );
/** Get the list of the different choice keys */
std::vector<std::string> GetChoiceKeys();
/** Get the long name of a specific choice value */
std::string GetChoiceName( int i );
/** Get the list of the different choice keys */
std::vector<std::string> GetChoiceNames();
/** Get the number of available choice */
unsigned int GetNbChoices( void );
/** Set choice value */
virtual void SetValue(unsigned int v);
/** Set choice value by its key */
virtual void SetValue(std::string choiceKey);
/** Return any value */
virtual unsigned int GetValue();
bool HasValue() const override
{
// a choice parameter always has a value
return true;
}
void ClearValue() override
{
// nothing to do : a choice parameter always has a value
}
void ClearChoices();
std::vector<int> GetSelectedItems()
{
return m_SelectedItems;
}
void SetSelectedNames(std::vector<std::string> selectedNames);
std::vector<std::string> GetSelectedNames()
{
return m_SelectedNames;
}
void SetSelectedKeys(std::vector<std::string> selectedKeys);
std::vector<std::string> GetSelectedKeys()
{
return m_SelectedKeys;
}
/** Set selected items using a lit of selected keys.
* OBSOLETE : this method is not needed anymore and does nothing. */
void SetSelectedItemsByKeys(){}
/** Set selected items using a lit of selected names.
* OBSOLETE : this method is not needed anymore and does nothing. */
void SetSelectedItemsByNames(){}
void SetSelectedItems(std::vector<std::string> selectedItems)
{
std::vector<int> items;
for( unsigned int i=0; i<selectedItems.size(); i++ )
{
items.push_back( atoi( selectedItems[i].c_str() ) );
}
this->SetSelectedItems(items);
}
void SetSelectedItems(std::vector<int> selectedItems)
{
m_SelectedItems = selectedItems;
m_SelectedNames.clear();
m_SelectedKeys.clear();
// update selected names and keys
std::vector<std::string> names = this->GetChoiceNames();
std::vector<std::string> keys = this->GetChoiceKeys();
for (unsigned int i=0 ; i<m_SelectedItems.size() ; i++)
{
m_SelectedNames.push_back(names[m_SelectedItems[i]]);
m_SelectedKeys.push_back(keys[m_SelectedItems[i]]);
}
}
protected:
/** Constructor */
ListViewParameter();
/** Destructor */
~ListViewParameter() override;
struct ListViewChoice
{
ListViewChoice() {}
std::string m_Key;
std::string m_Name;
};
typedef std::vector<ListViewChoice> ChoiceList;
ChoiceList m_ChoiceList;
unsigned int m_CurrentChoice;
std::vector<int> m_SelectedItems;
std::vector<std::string> m_SelectedKeys;
std::vector<std::string> m_SelectedNames;
bool m_SingleSelection;
private:
ListViewParameter(const ListViewParameter &) = delete;
void operator =(const ListViewParameter&) = delete;
}; // End class Parameter
} // End namespace Wrapper
} // End namespace otb
#endif
|
heavencross/alexa-auto-sdk
|
modules/core/platform/include/AACE/Logger/LoggerConfiguration.h
|
<reponame>heavencross/alexa-auto-sdk<filename>modules/core/platform/include/AACE/Logger/LoggerConfiguration.h
/*
* Copyright 2017-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#ifndef AACE_LOGGER_LOGGER_CONFIGURATION_H
#define AACE_LOGGER_LOGGER_CONFIGURATION_H
#include <utility>
#include "AACE/Core/EngineConfiguration.h"
#include "LoggerEngineInterfaces.h"
/** @file */
namespace aace {
namespace logger {
namespace config {
class SinkConfiguration;
class RuleConfiguration;
/**
* @code{.json}
* {
* "aace.logger":
* {
* "sinks": [<Sink>],
* "rules": [{"sink": "<SINK_ID>", "rule": <Rule>}]
* }
* }
*
* <Sink>: {
* "id": "<SINK_ID>"
* "type": "<SINK_TYPE>",
* "config": {
* <CONFIG_DATA>
* },
* "rules": [<RuleConfiguration>]
* }
*
* <Rule>: {
* "level": "<LOG_LEVEL>",
* "source": "<SOURCE_FILTER>",
* "tag": "<TAG_FILTER>",
* "message": "<MESSAGE_FILTER>"
* }
* @endcode
*/
class LoggerConfiguration {
public:
/**
* Specifies the severity level of a log message
* @sa @c aace::logger::LoggerEngineInterface::Level
*/
using Level = aace::logger::LoggerEngineInterface::Level;
/**
* Factory method used to programmatically generate logger configuration data for a console sink.
* The data generated by this method is equivalent to providing the following JSON values
* in a configuration file:
*
* @code{.json}
* {
* "aace.logger":
* {
* "sinks": [{
* "id": "<SINK_ID>",
* "type": "aace.logger.sink.console",
* "rules": [{
* "level": <LOG_LEVEL>
* }]
* }
* }
* }
* @endcode
*
* @param [in] id The id of sink object
* @param [in] level The log level to be used to filter logs to this sink
*/
static std::shared_ptr<aace::core::config::EngineConfiguration> createConsoleSinkConfig( const std::string& id, Level level );
/**
* Factory method used to programmatically generate logger configuration data for a syslog sink.
* The data generated by this method is equivalent to providing the following JSON values
* in a configuration file:
*
* @code{.json}
* {
* "aace.logger":
* {
* "sinks": [{
* "id": "<SINK_ID>",
* "type": "aace.logger.sink.syslog",
* "rules": [{
* "level": <LOG_LEVEL>
* }]
* }
* }
* }
* @endcode
*
* @param [in] id The id of sink object
* @param [in] level The log level to be used to filter logs to this sink
*/
static std::shared_ptr<aace::core::config::EngineConfiguration> createSyslogSinkConfig( const std::string& id, Level level );
/**
* Factory method used to programmatically generate logger configuration data for a file sink.
* The data generated by this method is equivalent to providing the following JSON values
* in a configuration file:
*
* @code{.json}
* {
* "aace.logger":
* {
* "sinks": [{
* "id": "<SINK_ID>",
* "type": "aace.logger.sink.file",
* "config": {
* "path": "<PATH>",
* "prefix": "<PREFIX>",
* "maxSize": <MAX_SIZE>,
* "maxFiles": <MAX_FILES>,
* "append": <APPEND>
* }
* "rules": [{
* "level": <LOG_LEVEL>
* }]
* }
* }
* }
* @endcode
*
* @param [in] id The id of sink object
* @param [in] level The log level to be used to filter logs to this sink
* @param [in] path The parent path where the log files will be written (must exist)
* @param [in] prefix The prefix name given to the log file
* @param [in] maxSize The maximum log file size in bytes
* @param [in] maxFiles The maximum number of log files to rotate
* @param [in] append @c true If the logs should be appended to the existing file, @c false if the file should be overwritten
*/
static std::shared_ptr<aace::core::config::EngineConfiguration> createFileSinkConfig( const std::string& id, Level level, const std::string& path, const std::string& prefix = "aace", uint32_t maxSize = 5242880, uint32_t maxFiles = 3, bool append = true );
/**
* Factory method used to programmatically generate configuration data for a logger rule.
* The data generated by this method is equivalent to providing the following JSON values
* in a configuration file:
*
* @code{.json}
* {
* "aace.logger":
* {
* "rules": [{
* "sink": "<SINK_ID>",
* "rule": {
* "level": <LOG_LEVEL>,
* "source": "<SOURCE_FILTER>",
* "tag": "<TAG_FILTER>",
* "message": "<MESSAGE_FILTER>"
* }
* }
* }
* }
* @endcode
*
* @param [in] sink The id of sink object to which this rule is applied
* @param [in] level The log level to be used as a filter for this rule
* @param [in] sourceFilter The source regex to be used as a filter for this rule
* @param [in] tagFilter The tag regex to be used as a filter for this rule
* @param [in] messageFilter The message regex to be used as a filter for this rule
*/
static std::shared_ptr<aace::core::config::EngineConfiguration> createLoggerRuleConfig( const std::string& sink, Level level, const std::string& sourceFilter = "", const std::string& tagFilter = "", const std::string& messageFilter = "" );
};
} // aace::logger::config
} // aace::logger
} // aace
#endif // AACE_LOGGER_LOGGER_CONFIGURATION_H
|
raismaulana/blogP
|
usecase/deleteuser/interactor.go
|
<gh_stars>1-10
package deleteuser
import (
"context"
"github.com/raismaulana/blogP/application/apperror"
"github.com/raismaulana/blogP/domain/repository"
)
//go:generate mockery --name Outport -output mocks/
type deleteUserInteractor struct {
outport Outport
}
// NewUsecase is constructor for create default implementation of usecase DeleteUser
func NewUsecase(outputPort Outport) Inport {
return &deleteUserInteractor{
outport: outputPort,
}
}
// Execute the usecase DeleteUser
func (r *deleteUserInteractor) Execute(ctx context.Context, req InportRequest) (*InportResponse, error) {
res := &InportResponse{}
// code your usecase definition here ...
err := repository.WithTransaction(ctx, r.outport, func(ctx context.Context) error {
userObj, err := r.outport.FindUserByID(ctx, req.ID)
if err != nil {
return apperror.ObjectNotFound.Var(userObj)
}
err = r.outport.DeleteUser(ctx, userObj)
if err != nil {
return err
}
return nil
})
if err != nil {
return nil, err
}
return res, nil
}
|
AWelk/bigtable-orm
|
compiler/src/test/java/com/bettercloud/bigtable/orm/GeneratedFieldTest.java
|
package com.bettercloud.bigtable.orm;
import com.bettercloud.bigtable.orm.annotations.Column;
import com.bettercloud.bigtable.orm.annotations.Entity;
import com.bettercloud.bigtable.orm.annotations.KeyComponent;
import com.bettercloud.bigtable.orm.annotations.Table;
import org.junit.Test;
import java.util.Objects;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
public class GeneratedFieldTest {
@Test
public void testStringFieldGetterReturnsValueSetBySetter() {
final String stringValue = "hello world";
final StringFieldEntity entity = new StringFieldEntity();
entity.setStringField(stringValue);
final String result = entity.getStringField();
assertEquals(stringValue, result);
}
@Test
public void testBooleanFieldGetterReturnsValueSetBySetter() {
final Boolean booleanValue = true;
final BooleanFieldEntity entity = new BooleanFieldEntity();
entity.setBooleanField(booleanValue);
final Boolean result = entity.getBooleanField();
assertEquals(booleanValue, result);
}
@Test
public void testPrimitiveIntBecomesNullableBoxedValue() {
final PrimitiveIntEntity entity = new PrimitiveIntEntity();
entity.setIntField(5);
assertEquals(5, (int) entity.getIntField());
entity.setIntField(null);
assertNull(entity.getIntField());
}
@Test
public void testNestedObjectGetterReturnsValueSetBySetter() {
final NestedObject nestedObject = new NestedObject();
nestedObject.setNestedValue("hello");
final NestedObjectEntity entity = new NestedObjectEntity();
entity.setNestedObject(nestedObject);
final NestedObject result = entity.getNestedObject();
assertEquals(nestedObject, result);
}
@Test
public void testVersionedFieldGetterReturnsValueSetBySetter() {
final VersionedFieldEntity entity = new VersionedFieldEntity();
entity.setIntField(10, 1234L);
assertEquals(10, (int) entity.getIntField());
entity.setIntField(null, 4321L);
assertNull(entity.getIntField());
entity.setIntField(5);
assertEquals(5, (int) entity.getIntField());
entity.setIntField(null);
assertNull(entity.getIntField());
}
@Table("field_table")
private class FieldTableConfiguration {
@Entity(keyComponents = {
@KeyComponent(constant = "constant")
})
private class StringFieldEntity {
@Column(family = "family")
private String stringField;
}
@Entity(keyComponents = {
@KeyComponent(constant = "constant")
})
private class BooleanFieldEntity {
@Column(family = "family")
private Boolean booleanField;
}
@Entity(keyComponents = {
@KeyComponent(constant = "constant")
})
private class PrimitiveIntEntity {
@Column(family = "family")
private int intField;
}
@Entity(keyComponents = {
@KeyComponent(constant = "constant")
})
private class NestedObjectEntity {
@Column(family = "family")
private NestedObject nestedObject;
}
@Entity(keyComponents = {
@KeyComponent(constant = "constant")
})
private class VersionedFieldEntity {
@Column(family = "family", versioned = true)
private int intField;
}
}
public static class NestedObject {
private String nestedValue;
String getNestedValue() {
return nestedValue;
}
void setNestedValue(final String nestedValue) {
this.nestedValue = nestedValue;
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final NestedObject that = (NestedObject) o;
return Objects.equals(nestedValue, that.nestedValue);
}
@Override
public int hashCode() {
return Objects.hash(nestedValue);
}
}
}
|
weikm/sandcarSimulation2
|
Source/Dynamics/RigidBody/Transform3d.h
|
<gh_stars>0
#pragma once
#include "Core/Matrix/matrix_mxn.h"
#include "Core/Vector/vector_nd.h"
#include "Core/Matrix/matrix_3x3.h"
#include "Core/Quaternion/quaternion.h"
#include "Inertia.h"
#include <memory>
namespace PhysIKA {
/**
* @brief Transformation for spatial vector
*
* @note
* A, B are frames with origins at O and P.
* That is: r is Op vector in A frame. q rotate a vector from A frame to B frame.
*/
template <typename T>
class Transform3d
{
public:
Transform3d();
Transform3d(const VectorBase<float>& r, const Quaternion<float>& q);
Transform3d(const Vector3f& r, const Quaternion<float>& q);
Transform3d(const Vector3f& r, const Matrix3f& m);
void set(const Vector3f& r, const Quaternion<float>& q);
void setTranslation(const VectorBase<float>& r);
void setRotation(const Quaternion<float>& q);
const SpatialVector<T>& transformF(const SpatialVector<T>& f) const;
//MatrixMN<T> transformF(const MatrixMN<T>& f);
const SpatialVector<T>& transformM(const SpatialVector<T>& m) const;
const Inertia<T> transformI(const Inertia<T>& inertia) const;
const MatrixMN<T> transformI(const MatrixMN<T>& inertia) const;
//MatrixMN<T> transformM(const MatrixMN<T>& m);
// Merge two transformation into one
// X = X1 * X2
const Transform3d<T> operator*(const Transform3d<T>& trans) const;
const Transform3d<T> inverseTransform() const;
const Matrix3f& getRotationMatrix() const
{
return m_rotation;
}
const Quaternion<float>& getRotation() const
{
return m_rotation_q;
}
const Vector3f& getTranslation() const
{
return m_translation;
}
private:
Quaternion<float> m_rotation_q;
Matrix3f m_rotation;
Vector3f m_translation;
};
template <typename T>
inline Transform3d<T>::Transform3d()
: m_translation(0), m_rotation(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0)
{
}
template <typename T>
inline Transform3d<T>::Transform3d(const VectorBase<float>& r, const Quaternion<float>& q)
{
m_translation[0] = r[0];
m_translation[1] = r[1];
m_translation[2] = r[2];
m_rotation = q.get3x3Matrix();
m_rotation_q = q;
}
template <typename T>
inline Transform3d<T>::Transform3d(const Vector3f& r, const Quaternion<float>& q)
: m_translation(r)
{
m_rotation = q.get3x3Matrix();
m_rotation_q = q;
}
template <typename T>
inline Transform3d<T>::Transform3d(const Vector3f& r, const Matrix3f& m)
: m_translation(r), m_rotation(m), m_rotation_q(m)
{
}
template <typename T>
inline void Transform3d<T>::set(const Vector3f& r, const Quaternion<float>& q)
{
//m_translation = r;
m_translation[0] = r[0];
m_translation[1] = r[1];
m_translation[2] = r[2];
m_rotation = q.get3x3Matrix();
m_rotation_q = q;
}
template <typename T>
inline void Transform3d<T>::setTranslation(const VectorBase<float>& r)
{
m_translation[0] = r[0];
m_translation[1] = r[1];
m_translation[2] = r[2];
}
template <typename T>
inline void Transform3d<T>::setRotation(const Quaternion<float>& q)
{
m_rotation = q.get3x3Matrix();
m_rotation_q = q;
}
template <typename T>
inline const SpatialVector<T>& Transform3d<T>::transformF(const SpatialVector<T>& f) const
{
SpatialVector<T> res;
// translation: new_torque = torque - r x f;
res[0] = f[0];
res[1] = f[1];
res[2] = f[2];
res[0] -= m_translation[1] * f[5] - m_translation[2] * f[4];
res[1] -= m_translation[2] * f[3] - m_translation[0] * f[5];
res[2] -= m_translation[0] * f[4] - m_translation[1] * f[3];
// translation: new_f = f;
res[3] = f[3];
res[4] = f[4];
res[5] = f[5];
// rotation: new_torque = rotate * torque;
T tmpx, tmpy, tmpz;
tmpx = m_rotation(0, 0) * res[0] + m_rotation(0, 1) * res[1] + m_rotation(0, 2) * res[2];
tmpy = m_rotation(1, 0) * res[0] + m_rotation(1, 1) * res[1] + m_rotation(1, 2) * res[2];
tmpz = m_rotation(2, 0) * res[0] + m_rotation(2, 1) * res[1] + m_rotation(2, 2) * res[2];
res[0] = tmpx;
res[1] = tmpy;
res[2] = tmpz;
// rotation: new_f = rotate * f;
tmpx = m_rotation(0, 0) * res[3] + m_rotation(0, 1) * res[4] + m_rotation(0, 2) * res[5];
tmpy = m_rotation(1, 0) * res[3] + m_rotation(1, 1) * res[4] + m_rotation(1, 2) * res[5];
tmpz = m_rotation(2, 0) * res[3] + m_rotation(2, 1) * res[4] + m_rotation(2, 2) * res[5];
res[3] = tmpx;
res[4] = tmpy;
res[5] = tmpz;
return res;
}
template <typename T>
inline const SpatialVector<T>& Transform3d<T>::transformM(const SpatialVector<T>& m) const
{
SpatialVector<T> res;
// translation: new_w = w;
res[0] = m[0];
res[1] = m[1];
res[2] = m[2];
// translation: new_v = v - r x w;
res[3] = m[3];
res[4] = m[4];
res[5] = m[5];
res[3] -= m_translation[1] * m[2] - m_translation[2] * m[1];
res[4] -= m_translation[2] * m[0] - m_translation[0] * m[2];
res[5] -= m_translation[0] * m[1] - m_translation[1] * m[0];
// rotation: new_w = rotate * w;
T tmpx, tmpy, tmpz;
tmpx = m_rotation(0, 0) * res[0] + m_rotation(0, 1) * res[1] + m_rotation(0, 2) * res[2];
tmpy = m_rotation(1, 0) * res[0] + m_rotation(1, 1) * res[1] + m_rotation(1, 2) * res[2];
tmpz = m_rotation(2, 0) * res[0] + m_rotation(2, 1) * res[1] + m_rotation(2, 2) * res[2];
res[0] = tmpx;
res[1] = tmpy;
res[2] = tmpz;
// rotation: new_v = rotate * v;
tmpx = m_rotation(0, 0) * res[3] + m_rotation(0, 1) * res[4] + m_rotation(0, 2) * res[5];
tmpy = m_rotation(1, 0) * res[3] + m_rotation(1, 1) * res[4] + m_rotation(1, 2) * res[5];
tmpz = m_rotation(2, 0) * res[3] + m_rotation(2, 1) * res[4] + m_rotation(2, 2) * res[5];
res[3] = tmpx;
res[4] = tmpy;
res[5] = tmpz;
return res;
}
// Transformation of inertia matrix
// I_2 = X_12f * I_1 * X_21m
template <typename T>
inline const MatrixMN<T> Transform3d<T>::transformI(const MatrixMN<T>& inertia) const
{
MatrixMN<T> res(6, 6);
/// I_tmp = (X_12f * I_1)
for (int i = 0; i < 6; ++i)
{
SpatialVector<T> tmpv(inertia(0, i), inertia(1, i), inertia(2, i), inertia(3, i), inertia(4, i), inertia(5, i));
SpatialVector<T> tmpres = this->transformF(tmpv);
/// res = (X_12f * I_1) = I_tmp
res(0, i) = tmpres[0];
res(1, i) = tmpres[1];
res(2, i) = tmpres[2];
res(3, i) = tmpres[3];
res(4, i) = tmpres[4];
res(5, i) = tmpres[5];
}
/// I_2 = I_tmp * X_21m = (X_12f * I_tmp^T)^T
for (int i = 0; i < 6; ++i)
{
SpatialVector<T> tmpv(res(i, 0), res(i, 1), res(i, 2), res(i, 3), res(i, 4), res(i, 5));
SpatialVector<T> tmpres = this->transformF(tmpv);
res(i, 0) = tmpres[0];
res(i, 1) = tmpres[1];
res(i, 2) = tmpres[2];
res(i, 3) = tmpres[3];
res(i, 4) = tmpres[4];
res(i, 5) = tmpres[5];
}
return res;
}
// Merge tow transformation into one
// X1 = [ E1 0 ] X2 = [ E2 0 ]
// [-E1*r1x E1] [-E2*r2x E2]
//==>
//X = X1 * X2 = [ E1*E2 0 ] = [ E1*E2 0 ]
// [-E1*r1x*E2-E1*E2*r2x E1*E2] [-E1*E2*(E2^T*r1x*E2+r2x) E1*E2]
//==>
// X = X1*X2 = [ E1*E2 0 ]
// [-E1*E2*(r2x + (E2^T*r1)x)) E1*E2]
//
// ==>
// E = E1*E2
// r = E2^T * r1 + r2
template <typename T>
inline const Transform3d<T> Transform3d<T>::operator*(const Transform3d<T>& trans) const
{
Transform3d<T> res;
res.m_rotation_q = this->m_rotation_q * trans.m_rotation_q;
res.m_rotation = this->m_rotation * trans.m_rotation;
res.m_translation = trans.m_translation + trans.m_rotation.transpose() * this->m_translation;
return res;
}
template <typename T>
inline const Transform3d<T> Transform3d<T>::inverseTransform() const
{
Transform3d<T> res(-((this->m_rotation) * (this->m_translation)), this->m_rotation.transpose());
return res;
}
} // namespace PhysIKA
|
playbar/TeamTalk
|
ios/SVWebViewController/UIActivities/SVWebViewControllerActivity.h
|
<reponame>playbar/TeamTalk
//
// SVWebViewControllerActivity.h
// SVWeb
//
// Created by <NAME> on 11/11/2013.
//
//
#import <UIKit/UIKit.h>
@interface SVWebViewControllerActivity : UIActivity
@property (nonatomic, strong) NSURL *URLToOpen;
@property (nonatomic, strong) NSString *schemePrefix;
@end
|
davelaursen/idealogue
|
angular1-node-hapi-mongodb/src/server/routes/technologyRoutes.js
|
var Hapi = require('hapi');
var Joi = require('joi');
var Boom = require('boom');
var TechnologyService = require('../services/technologyService.js');
module.exports = function(config) {
var techSvc = new TechnologyService(config.db);
return [
{
path: '/api/technologies',
method: 'GET',
handler: function(request, reply) {
techSvc.getAll(
function(err) {
reply(Boom.badImplementation());
},
function(obj) {
reply(obj || []);
}
);
},
config: {
description: 'Get all technologies',
auth: 'session',
validate: { }
}
},
{
path: '/api/technologies/{name}',
method: 'PUT',
handler: function(request, reply) {
techSvc.save(request.params.name,
function(err) {
reply(Boom.badImplementation());
},
function(doc, created) {
if (doc === null) {
reply(Boom.badImplementation());
} else {
reply().code(created ? 201 : 204);
}
}
);
},
config: {
description: 'Creates or updates a technology',
auth: 'session',
validate: {
params: {
name: Joi.string().required()
}
}
}
},
{
path: '/api/technologies/{name}',
method: 'DELETE',
handler: function(request, reply) {
techSvc.remove(request.params.name,
function(err) {
reply(Boom.badImplementation());
},
function(successful) {
if (!successful) {
reply(Boom.notFound());
} else {
reply().code(204);
}
}
);
},
config: {
description: 'Remove an existing technology',
auth: 'session',
validate: {
params: {
name: Joi.string().required()
}
}
}
}
];
};
|
sumannewton/dropwizard-dynamic-role-filter
|
src/test/java/com/sumannewton/dropwizard/auth/TestAuthorizer.java
|
<filename>src/test/java/com/sumannewton/dropwizard/auth/TestAuthorizer.java<gh_stars>1-10
package com.sumannewton.dropwizard.auth;
import io.dropwizard.auth.Authorizer;
public class TestAuthorizer implements Authorizer<TestUser> {
@Override
public boolean authorize(TestUser user, String role) {
return user.getRoles().contains(role);
}
}
|
krishnapl123/xworkz
|
KrishnaPractice/src/com/java/constructor/ConstructorChainingWithinClass.java
|
<filename>KrishnaPractice/src/com/java/constructor/ConstructorChainingWithinClass.java
package com.java.constructor;
public class ConstructorChainingWithinClass {
public static void main(String args[]) {
Moon moon=new Moon();
System.out.println("Moon color is " +moon.color);
System.out.println("Moon shape is " +moon.shape);
System.out.println("Moon size is " +moon.size);
}
}
|
HackWars/hackwars-classic
|
HWTomcatServer/webapps/ROOT/WEB-INF/classes/org/xamjwg/util/Domains.java
|
/*
GNU LESSER GENERAL PUBLIC LICENSE
Copyright (C) 2006 The XAMJ Project
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
Contact info: <EMAIL>
*/
/*
* Created on Jun 2, 2005
*/
package org.xamjwg.util;
import java.util.*;
/**
* @author <NAME>.
*/
public class Domains {
private static final Collection gTLDs;
static {
gTLDs = new HashSet();
gTLDs.add(".com");
gTLDs.add(".edu");
gTLDs.add(".gov");
gTLDs.add(".int");
gTLDs.add(".mil");
gTLDs.add(".net");
gTLDs.add(".org");
gTLDs.add(".biz");
gTLDs.add(".info");
gTLDs.add(".name");
gTLDs.add(".pro");
gTLDs.add(".aero");
gTLDs.add(".coop");
gTLDs.add(".museum");
//TODO: New gTLDs?
}
/**
*
*/
private Domains() {
super();
}
public static boolean isValidCookieDomain(String domain, String hostName) {
if(!hostName.endsWith(domain)) {
return false;
}
int lastDotIdx = domain.lastIndexOf('.');
if(lastDotIdx == -1) {
return false;
}
String suffix = domain.substring(lastDotIdx);
if(gTLDs.contains(suffix.toLowerCase())) {
return Strings.countChars(domain, '.') >= 2;
}
else {
return Strings.countChars(domain, '.') >= 3;
}
}
public static boolean endsWithGTLD(String host) {
Iterator i = gTLDs.iterator();
while(i.hasNext()) {
String ending = (String) i.next();
if(host.endsWith(ending)) {
return true;
}
}
return false;
}
}
|
lileishen/eladmin-plus-contract
|
eladmin-common/src/main/java/com/yntovi/base/CommonEntity.java
|
package com.yntovi.base;
import com.baomidou.mybatisplus.annotation.FieldFill;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.extension.activerecord.Model;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import lombok.Getter;
import lombok.Setter;
import java.io.Serializable;
import java.util.Date;
import java.util.Map;
/**
* 抽象实体类 :带有公共字段
* @author fanglei
* @date 2021/07/28
*/
@Data
public abstract class CommonEntity<T extends Model<?>> extends CommonModel<T> implements Serializable{
@ApiModelProperty(value = "创建者")
@TableField(fill= FieldFill.INSERT)
private String createBy;
@ApiModelProperty(value = "更新者")
@TableField(fill= FieldFill.INSERT_UPDATE)
private String updateBy;
@ApiModelProperty(value = "创建日期")
@TableField(fill= FieldFill.INSERT)
private Date createTime;
@ApiModelProperty(value = "更新时间")
@TableField(fill= FieldFill.INSERT_UPDATE)
private Date updateTime;
/* 分组校验 */
public @interface Create {}
/* 分组校验 */
public @interface Update {
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.