repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
chiknas/fire-business-api-java
|
src/main/java/com/fire/sdk/model/request/BatchListRequest.java
|
package com.fire.sdk.model.request;
import java.util.ArrayList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fire.sdk.http.HttpUtils;
import com.fire.sdk.model.Batch.BatchStatus;
import com.fire.sdk.model.Batch.BatchType;
import com.fire.sdk.model.Request;
import com.fire.sdk.model.response.BatchListResponse;
import com.fire.sdk.utils.Utils;
public class BatchListRequest implements Request<BatchListRequest, BatchListResponse> {
private static final Logger logger = LoggerFactory.getLogger(BatchListRequest.class);
private int limit;
private int offset;
private BatchStatus[] batchStatuses;
private BatchType[] batchTypes;
@Override
public String getEndpoint() {
ArrayList<String> queryStrings = new ArrayList<String>();
String queryString = "";
if (getBatchStatuses() != null) {
queryStrings.add("batchStatuses=" + Utils.enumJoin(getBatchStatuses(), ","));
}
if (getBatchTypes() != null) {
queryStrings.add("batchTypes=" + Utils.enumJoin(getBatchTypes(), ","));
}
if (getLimit() != 0) {
queryStrings.add("limit=" + getLimit());
}
if (getOffset() != 0) {
queryStrings.add("offset=" + getOffset());
}
if (queryStrings.size() > 0) {
StringBuilder sb = new StringBuilder(128);
int end = 0;
for (Object s : queryStrings.toArray()) {
sb.append(s);
end = sb.length();
sb.append("&");
}
queryString = "?" + sb.substring(0, end);
}
return "batches" + queryString;
}
public BatchStatus[] getBatchStatuses() {
return batchStatuses;
}
/**
* Set the statuses of batches you want returned.
*
* The following batch types exist:
* <ul>
* <li>OPEN
* <li>PENDING_APPROVAL
* <li>PENDING_PARENT_BATCH_APPROVAL - the bank transfer batch is waiting for a batch of payees to be approved.
* <li>COMPLETE
* <li>CANCELLED
* <li>REJECTED
* </ul>
*
* @param batchStatuses a String[] array containing the batch statuses to return.
* @return the batch list request object for chaining
*/
public BatchListRequest setBatchStatuses(BatchStatus[] batchStatuses) {
this.batchStatuses = batchStatuses;
return this;
}
public BatchType[] getBatchTypes() {
return batchTypes;
}
/**
* Set the types of batches you want returned.
*
* The following batch types exist:
* <ul>
* <li>INTERNAL_TRANSFER
* <li>BANK_TRANSFER
* <li>NEW_PAYEE
* </ul>
*
* @param batchTypes a String[] array containing the batch types to return.
* @return the batch list request object for chaining
*/
public BatchListRequest setBatchTypes(BatchType[] batchTypes) {
this.batchTypes = batchTypes;
return this;
}
public int getLimit() {
return limit;
}
public BatchListRequest setLimit(int limit) {
this.limit = limit;
return this;
}
public int getOffset() {
return offset;
}
public BatchListRequest setOffset(int offset) {
this.offset = offset;
return this;
}
@Override
public HttpUtils.HttpMethod getMethod() {
return HttpUtils.HttpMethod.GET;
}
@Override
public Class<BatchListResponse> getResponseClass() {
return BatchListResponse.class;
}
@Override
public Object getBody() {
// TODO Auto-generated method stub
return null;
}
}
|
rafaelw/mojo
|
sky/engine/core/dom/SelectorQuery.cpp
|
/*
* Copyright (C) 2011, 2013 Apple Inc. All rights reserved.
* Copyright (C) 2014 Samsung Electronics. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "sky/engine/config.h"
#include "sky/engine/core/dom/SelectorQuery.h"
#include "sky/engine/bindings/exception_state.h"
#include "sky/engine/core/css/SelectorChecker.h"
#include "sky/engine/core/css/parser/BisonCSSParser.h"
#include "sky/engine/core/dom/Document.h"
#include "sky/engine/core/dom/ElementTraversal.h"
#include "sky/engine/core/dom/Node.h"
#include "sky/engine/core/dom/StaticNodeList.h"
namespace blink {
PassOwnPtr<SelectorQuery> SelectorQuery::adopt(CSSSelectorList& selectorList)
{
return adoptPtr(new SelectorQuery(selectorList));
}
SelectorQuery::SelectorQuery(CSSSelectorList& selectorList)
{
m_selectors.adopt(selectorList);
}
bool SelectorQuery::matches(Element& element) const
{
return selectorMatches(element, element);
}
PassRefPtr<StaticElementList> SelectorQuery::queryAll(ContainerNode& rootNode) const
{
Vector<RefPtr<Element> > result;
for (Element* element = ElementTraversal::firstWithin(rootNode); element; element = ElementTraversal::next(*element, &rootNode)) {
if (selectorMatches(rootNode, *element))
result.append(element);
}
return StaticElementList::adopt(result);
}
PassRefPtr<Element> SelectorQuery::queryFirst(ContainerNode& rootNode) const
{
for (Element* element = ElementTraversal::firstWithin(rootNode); element; element = ElementTraversal::next(*element, &rootNode)) {
if (selectorMatches(rootNode, *element))
return element;
}
return nullptr;
}
bool SelectorQuery::selectorMatches(ContainerNode& rootNode, Element& element) const
{
SelectorChecker checker(element);
for (const CSSSelector* selector = m_selectors.first(); selector; selector = CSSSelectorList::next(*selector)) {
if (checker.match(*selector))
return true;
}
return false;
}
SelectorQuery* SelectorQueryCache::add(const AtomicString& selectors, const Document& document, ExceptionState& exceptionState)
{
HashMap<AtomicString, OwnPtr<SelectorQuery> >::iterator it = m_entries.find(selectors);
if (it != m_entries.end())
return it->value.get();
CSSParserContext context(document);
BisonCSSParser parser(context);
CSSSelectorList selectorList;
parser.parseSelector(selectors, selectorList);
if (!selectorList.first()) {
exceptionState.ThrowDOMException(SyntaxError, "'" + selectors + "' is not a valid selector.");
return 0;
}
const unsigned maximumSelectorQueryCacheSize = 256;
if (m_entries.size() == maximumSelectorQueryCacheSize)
m_entries.remove(m_entries.begin());
return m_entries.add(selectors, SelectorQuery::adopt(selectorList)).storedValue->value.get();
}
}
|
SolidStateGroup/patientview
|
root/common/src/main/java/org/patientview/persistence/model/UserObservationHeading.java
|
package org.patientview.persistence.model;
import com.fasterxml.jackson.annotation.JsonIgnore;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.JoinColumn;
import javax.persistence.OneToOne;
import javax.persistence.Table;
/**
* Created by <EMAIL>
* Created on 22/12/2014
*
* For per patient result list in table view
*/
@Entity
@Table(name = "pv_user_observation_heading")
public class UserObservationHeading extends SimpleAuditModel {
@JsonIgnore
@OneToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "user_id", nullable = false)
private User user;
@OneToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "observation_heading_id", nullable = false)
private ObservationHeading observationHeading;
public UserObservationHeading() {
}
public UserObservationHeading(User user, ObservationHeading observationHeading) {
this.user = user;
this.observationHeading = observationHeading;
}
public User getUser() {
return user;
}
public void setUser(User user) {
this.user = user;
}
public ObservationHeading getObservationHeading() {
return observationHeading;
}
public void setObservationHeading(ObservationHeading observationHeading) {
this.observationHeading = observationHeading;
}
}
|
datlowe/czsem-gate-tools
|
modules/fs-query/src/main/java/czsem/fs/query/QueryNode.java
|
package czsem.fs.query;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.StringJoiner;
import czsem.fs.query.FSQuery.QueryData;
import czsem.fs.query.FSQuery.QueryMatch;
import czsem.fs.query.eval.FsEvaluator;
import czsem.fs.query.restrictions.DirectAttrRestriction;
import czsem.fs.query.restrictions.PrintableRestriction;
import czsem.fs.query.restrictions.ReferencingRestriction;
import czsem.fs.query.restrictions.Restrictions;
import czsem.fs.query.utils.CloneableIterator;
public class QueryNode {
public static class QueryNodeData {
protected final List<PrintableRestriction> restrictions = new ArrayList<>();
protected final List<DirectAttrRestriction> directRestrictions = new ArrayList<>();
protected final List<ReferencingRestriction> referencingRestrictions = new ArrayList<>();
protected String name;
protected boolean optional = false;
protected boolean optionalSubtree = false;
protected boolean forbiddenSubtree = false;
protected int subtreeDepth = -1;
}
protected final QueryNodeData data;
protected List<QueryNode> children = new ArrayList<QueryNode>();
private QueryNode prent;
public QueryNode() {
data = new QueryNodeData();
}
public QueryNode(QueryNodeData data) {
this.data = data;
}
//protected AbstractEvaluator evaluator;
/*
public QueryNode(AbstractEvaluator evaluator) {
this.evaluator = evaluator;
}
public QueryNode() {this(ChildrenEvaluator.childrenEvaluatorInstance);}
public void setEvaluator(AbstractEvaluator evaluator) {
if (evaluator.getClass().isInstance(this.evaluator)) return;
this.evaluator = evaluator;
}
public Iterable<QueryMatch> getFinalResultsFor(QueryData data, int nodeId) {
return ReferencingRestrictionsResultsIteratorFilter.filter(getResultsFor(data, nodeId), data);
}
public Iterable<QueryMatch> getResultsFor(QueryData data, int nodeId) {
return evaluator.getResultsFor(data, this, nodeId);
}
*/
public void addChild(QueryNode queryNode) {
children.add(queryNode);
queryNode.setPrent(this);
}
public void addRestriction(String comparartor, String arg1, String arg2) {
Restrictions.addRestriction(this, comparartor, arg1, arg2);
}
public void addOtherPrintableRestriction(PrintableRestriction r) {
data.restrictions.add(r);
}
public void addDirectRestriction(DirectAttrRestriction restriction) {
data.restrictions.add(restriction);
data.directRestrictions.add(restriction);
}
public void addReferencingRestriction(ReferencingRestriction restriction) {
data.restrictions.add(restriction);
data.referencingRestrictions.add(restriction);
}
@Override
public String toString() {
if (getName() != null) return getName();
return "QN_"+Integer.toString(hashCode(), Character.MAX_RADIX);
}
public List<QueryNode> getChildren() {
return children;
}
public void setName(String name) {
data.name = name;
}
public String getName() {
return data.name;
}
/*
public void reset() {
evaluator.reset();
for (QueryNode ch : getChildren())
{
ch.reset();
}
}
*/
public List<DirectAttrRestriction> getDirectRestrictions() {
return data.directRestrictions;
}
public List<ReferencingRestriction> getReferencingRestrictions() {
return data.referencingRestrictions;
}
public Collection<PrintableRestriction> getAllRestricitions() {
return data.restrictions;
}
public QueryNode getPrent() {
return prent;
}
public void setPrent(QueryNode prent) {
this.prent = prent;
}
public void setOptional(boolean optional) {
data.optional = optional;
}
public boolean isOptional() {
return data.optional;
}
public void setOptionalSubtree(boolean optionalSubtree) {
this.data.optionalSubtree = optionalSubtree;
}
public boolean isOptionalOrOptionalSubtree() {
return isOptionalSubtree() || isOptional();
}
public boolean isOptionalOrForbiddenSubtree() {
return isOptionalSubtree() || isForbiddenSubtree();
}
public boolean isOptionalSubtree() {
return data.optionalSubtree;
}
public boolean isForbiddenSubtree() {
return data.forbiddenSubtree;
}
public void setForbiddenSubtree(boolean forbiddenSubtree) {
data.forbiddenSubtree = forbiddenSubtree;
}
public int getSubtreeDepth() {
return data.subtreeDepth;
}
public void setSubtreeDepth(int subtreeDepth) {
data.subtreeDepth = subtreeDepth;
}
@Deprecated
public Iterable<QueryMatch> getFinalResultsFor(QueryData data, List<QueryNode> optionalNodes, int dataNodeId) {
CloneableIterator<QueryMatch> i = new FsEvaluator(this, optionalNodes, data).getFinalResultsFor(dataNodeId);
if (i == null) return null;
return i.toIterable();
}
public QueryNodeData getData() {
return data;
}
public String toStringDeep() {
String ret = "["+toString()+"]";
if (children.isEmpty()) return ret;
StringJoiner j = new StringJoiner(",", "(", ")");
for (QueryNode queryNode : children) {
j.add(queryNode.toStringDeep());
}
return ret+j.toString();
}
}
|
rajitbanerjee/kattis
|
Gerrymandering/gerry.py
|
<reponame>rajitbanerjee/kattis<gh_stars>1-10
"""https://open.kattis.com/problems/gerrymandering"""
P, D = map(int, input().split())
districts = {}
for _ in range(P):
d, a, b = map(int, input().split())
if d not in districts.keys():
districts[d] = [0, 0]
districts[d][0] += a
districts[d][1] += b
wA, wB, V = 0, 0, 0
for i in range(1, D + 1):
ansRow = []
# find the winner of the election
w = 0 if districts[i][0] > districts[i][1] else 1
ansRow.append(chr(w + 65))
# wasted votes of winner
winnerWaste = districts[i][w] - sum(districts[i])//2 - 1
# wasted votes of loser
loserWaste = districts[i][1 - w]
ansRow.extend([winnerWaste, loserWaste])
if w == 1:
ansRow[1], ansRow[2] = ansRow[2], ansRow[1]
# display answers
for each in ansRow:
print(each, end=' ')
print()
# update total wasted votes of each party
wA += ansRow[1]
wB += ansRow[2]
# update total number of voters
V += sum(districts[i])
# compute and display efficiency gap
E = abs(wA - wB)/V
print(round(E, 10))
|
sapcc/juno
|
libs/juno-ui-components/src/components/DataGridFootRow/DataGridFootRow.test.js
|
<reponame>sapcc/juno
import * as React from "react"
import { render, screen} from "@testing-library/react"
import { DataGridFootRow } from "./index"
describe("DataGridFootRow", () => {
test("renders a DataGridFootRow", async () => {
const tablefoot = document.createElement('tfoot')
const {container} = render(<DataGridFootRow data-testid="my-datagridrow" />,
{ container: document.body.appendChild(tablefoot)})
expect(screen.getByTestId("my-datagridrow")).toBeInTheDocument()
})
test("renders a custom className", async () => {
const tablefoot = document.createElement('tfoot')
const {container} = render(<DataGridFootRow data-testid="my-datagridrow" className="my-custom-class"/>,
{ container: document.body.appendChild(tablefoot)})
expect(screen.getByTestId("my-datagridrow")).toBeInTheDocument()
expect(screen.getByTestId("my-datagridrow")).toHaveClass("my-custom-class")
})
})
|
RalfNick/DataStruct
|
src/algorithm/heap/PriorityQueue.java
|
package algorithm.heap;
import java.util.Arrays;
/**
* DESCRIPTION
*
* @author lixin
* @create 2019-09-05 下午8:34
**/
public class PriorityQueue<T extends Comparable<? super T>> {
private int capacity;
private int size;
private Object[] arr;
public PriorityQueue(int capacity) {
this.capacity = capacity;
arr = new Object[capacity];
}
public void enQueue(T value) {
if (size >= capacity) {
resize();
}
arr[size] = value;
upJust(arr, value, size);
size++;
}
private void resize() {
capacity <<= 1;
Object[] newArr = new Object[capacity];
System.arraycopy(arr, 0, newArr, 0, size);
arr = newArr;
}
@SuppressWarnings("unchecked")
public T deQueue() {
if (size == 0) {
return null;
}
Object result = arr[0];
arr[0] = arr[--size];
arr[size] = null;
downJust(arr, size, 0);
return (T) result;
}
@SuppressWarnings("unchecked")
public T peek() {
if (size == 0) {
return null;
}
return (T) arr[0];
}
public int size() {
return size;
}
@Override
public String toString() {
return Arrays.toString(arr);
}
@SuppressWarnings("unchecked")
private static <T extends Comparable<? super T>> void upJust(Object[] arr, T val, int childIndex) {
int parentIndex = (childIndex - 1) >> 1;
while (childIndex > 0 && isBigger(val, (T) arr[parentIndex])) {
arr[childIndex] = arr[parentIndex];
childIndex = parentIndex;
parentIndex = (childIndex - 1) >> 1;
}
arr[childIndex] = val;
}
@SuppressWarnings("unchecked")
private static <T extends Comparable<? super T>> void downJust(Object[] arr, int length, int parentIndex) {
int childIndex = (parentIndex << 1) + 1;
Object temp = arr[parentIndex];
while (childIndex < length) {
if (childIndex + 1 < length && isBigger((T) arr[childIndex + 1], (T) arr[childIndex])) {
childIndex++;
}
if (isBigger((T) temp, (T) arr[childIndex])) {
break;
}
arr[parentIndex] = arr[childIndex];
parentIndex = childIndex;
childIndex = (childIndex << 1) + 1;
}
arr[parentIndex] = temp;
}
private static <T extends Comparable<? super T>> boolean isBigger(T t1, T t2) {
if (t1 == null && t2 == null) {
return true;
} else if (t2 == null) {
return true;
} else if (t1 == null) {
return false;
}
return t1.compareTo(t2) > 0;
}
}
|
cooperece366s21/cooper-union-ece366-spring2020
|
spark-example/src/main/java/edu/cooper/ee/ece366/groceries/model/Item.java
|
<reponame>cooperece366s21/cooper-union-ece366-spring2020
package edu.cooper.ee.ece366.groceries.model;
import com.google.gson.annotations.Expose;
public class Item {
@Expose private final Long id;
@Expose private final String name;
@Expose private final Double cost;
public Item(Long id, String name, Double cost) {
this.id = id;
this.name = name;
this.cost = cost;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + id.intValue();
return result;
}
@Override
public boolean equals(final Object obj) {
if (!(obj instanceof Item)) {
return false;
}
Item other = (Item) obj;
return super.equals(other)
|| (this.id.equals(other.getId())
&& this.name.equals(other.getName())
&& this.cost.equals(other.getCost()));
}
public Long getId() {
return id;
}
public String getName() {
return name;
}
public Double getCost() {
return cost;
}
}
|
thebitstudio/pixelopolis_car_app
|
app/src/main/java/com/bit/pixelopolis_car/services/config/CommandTime.java
|
<gh_stars>0
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.bit.pixelopolis_car.services.config;
public class CommandTime {
public int turning;
public int forward;
public CommandTime() {
}
public int getTurning() {
return turning;
}
public void setTurning(int turning) {
this.turning = turning;
}
public int getForward() {
return forward;
}
public void setForward(int forward) {
this.forward = forward;
}
}
|
zhengdejin/SC1_Code
|
kernel-3.10/arch/arm/mach-mt8127/include/mach/hotplug.h
|
<gh_stars>0
#ifndef _HOTPLUG
#define _HOTPLUG
#include <linux/kernel.h> //printk
#include <asm/atomic.h>
#include <mach/mt_reg_base.h>
/* log */
#define HOTPLUG_LOG_NONE 0
#define HOTPLUG_LOG_WITH_XLOG 1
#define HOTPLUG_LOG_WITH_PRINTK 2
#define HOTPLUG_LOG_PRINT HOTPLUG_LOG_WITH_PRINTK
#if (HOTPLUG_LOG_PRINT == HOTPLUG_LOG_NONE)
#define HOTPLUG_INFO(fmt, args...)
#elif (HOTPLUG_LOG_PRINT == HOTPLUG_LOG_WITH_XLOG)
#define HOTPLUG_INFO(fmt, args...) pr_debug("[Power/hotplug] "fmt, ##args)
#elif (HOTPLUG_LOG_PRINT == HOTPLUG_LOG_WITH_PRINTK)
#define HOTPLUG_INFO(fmt, args...) printk("[Power/hotplug] "fmt, ##args)
#endif
/* profilling */
//#define CONFIG_HOTPLUG_PROFILING
#define CONFIG_HOTPLUG_PROFILING_COUNT 100
/* register address */
#define BOOT_ADDR (INFRACFG_AO_BASE + 0x800)
/* register read/write */
#define REG_READ(addr) (*(volatile u32 *)(addr))
#define REG_WRITE(addr, value) (*(volatile u32 *)(addr) = (u32)(value))
/* power on/off cpu*/
#define CONFIG_HOTPLUG_WITH_POWER_CTRL
/* global variable */
extern volatile int pen_release;
extern atomic_t hotplug_cpu_count;
#endif //enf of #ifndef _HOTPLUG
|
ArgonDesign/argon-vtools
|
src/main/scala/com/argondesign/alint/Message.scala
|
////////////////////////////////////////////////////////////////////////////////
// Argon Design Ltd. Project P9000 Argon
// Copyright (c) 2016-2018 Argon Design Ltd. All rights reserved.
//
// This file is covered by the BSD (with attribution) license.
// See the LICENSE file for the precise wording of the license.
//
// Module : Argon Verilog Tools
// Author : <NAME>
//
// DESCRIPTION:
//
////////////////////////////////////////////////////////////////////////////////
package com.argondesign.vtools
sealed abstract trait Message {
val category: String
val message: String
override def toString = category + ": " + this.getClass.getSimpleName.takeWhile(_ != '$') + " - " + message
}
abstract trait Warning extends Message {
val category = "WARNING"
}
abstract trait Error extends Message {
val category = "ERROR"
}
sealed abstract trait SourceMessage extends Message {
val loc: Loc
override def toString = loc + " " + super.toString
}
abstract trait SourceWarning extends SourceMessage with Warning;
abstract trait SourceError extends SourceMessage with Error;
|
gscept/nebula-trifid
|
code/render/lighting/sm50/sm50shadowserver.h
|
#pragma once
//------------------------------------------------------------------------------
/**
@class Lighting::SM50ShadowServer
Handles shadowing using SM 5.0
(C) 2012-2016 Individual contributors, see AUTHORS file
*/
#include "lighting/base/shadowserverbase.h"
#include "frame/frameshader.h"
#include "frame/frameposteffect.h"
#include "coregraphics/rendertarget.h"
#include "lighting/pssmutil.h"
#include "lighting/csmutil.h"
#include "frame/framepass.h"
//------------------------------------------------------------------------------
namespace Lighting
{
class SM50ShadowServer : public ShadowServerBase
{
__DeclareClass(SM50ShadowServer);
public:
/// constructor
SM50ShadowServer();
/// destructor
virtual ~SM50ShadowServer();
/// open the shadow server
void Open();
/// close the shadow server
void Close();
/// update shadow buffer
void UpdateShadowBuffers();
/// get pointer to shadow buffer for local lights
const Ptr<CoreGraphics::Texture>& GetSpotLightShadowBufferTexture() const;
/// get pointer to PSSM shadow buffer for global lights
const Ptr<CoreGraphics::Texture>& GetGlobalLightShadowBufferTexture() const;
/// get array of PSSM split distances
const float* GetSplitDistances() const;
/// get array of PSSM LightProjTransforms
const Math::matrix44* GetSplitTransforms() const;
/// gets CSM shadow view
const Math::matrix44* GetShadowView() const;
/// get array of PSSM frustum far plane corners
const Math::float4* GetFarPlane() const;
/// get array of PSSM frustum near plane corners
const Math::float4* GetNearPlane() const;
private:
/// update spot light shadow buffers
void UpdateSpotLightShadowBuffers();
/// update point light shadow buffers
void UpdatePointLightShadowBuffers();
/// prepare updating global buffer
void PrepareGlobalShadowBuffer();
/// update global light shadow buffers
void UpdateHotGlobalShadowBuffer();
/// update static-only shadow buffer
void UpdateColdGlobalShadowBuffer();
/// sort local lights by priority
virtual void SortLights();
// spot light
Ptr<CoreGraphics::RenderTarget> spotLightShadowMap1;
Ptr<CoreGraphics::RenderTarget> spotLightShadowMap2;
Ptr<CoreGraphics::RenderTarget> spotLightShadowBufferAtlas;
Ptr<CoreGraphics::Shader> satXShader;
Ptr<CoreGraphics::Shader> satYShader;
Ptr<Frame::FramePass> spotLightPass;
Ptr<Frame::FrameBatch> spotLightBatch;
Ptr<Frame::FramePostEffect> spotLightHoriPass;
Ptr<Frame::FramePostEffect> spotLightVertPass;
// point light
Ptr<CoreGraphics::RenderTargetCube> pointLightShadowCubes[MaxNumShadowPointLights];
Ptr<CoreGraphics::RenderTargetCube> pointLightShadowFilterCube;
Ptr<Frame::FramePass> pointLightPass;
Ptr<Frame::FrameBatch> pointLightBatch;
Ptr<CoreGraphics::ShaderVariable> pointLightPosVar;
Ptr<CoreGraphics::Shader> pointLightBlur;
CoreGraphics::ShaderFeature::Mask xBlurMask;
CoreGraphics::ShaderFeature::Mask yBlurMask;
Ptr<CoreGraphics::ShaderVariable> pointLightBlurReadLinear;
Ptr<CoreGraphics::ShaderVariable> pointLightBlurReadPoint;
Ptr<CoreGraphics::ShaderVariable> pointLightBlurWrite;
// global light
Ptr<Frame::FramePass> globalLightHotPass;
Ptr<Frame::FramePostEffect> globalLightBlurPass;
Ptr<Frame::FrameBatch> globalLightShadowBatch;
Ptr<CoreGraphics::RenderTarget> globalLightShadowBuffer;
Ptr<CoreGraphics::RenderTarget> globalLightShadowBufferFinal;
// generic stuff
Ptr<CoreGraphics::Shader> blurShader;
Ptr<CoreGraphics::ShaderVariable> shadowCascadeViewVar;
PSSMUtil pssmUtil;
CSMUtil csmUtil;
_declare_timer(globalShadow);
_declare_timer(pointLightShadow);
_declare_timer(spotLightShadow);
};
//------------------------------------------------------------------------------
/**
*/
inline const Ptr<CoreGraphics::Texture>&
SM50ShadowServer::GetSpotLightShadowBufferTexture() const
{
return this->spotLightShadowBufferAtlas->GetResolveTexture();
}
//------------------------------------------------------------------------------
/**
*/
inline const Ptr<CoreGraphics::Texture>&
SM50ShadowServer::GetGlobalLightShadowBufferTexture() const
{
return this->globalLightShadowBufferFinal->GetResolveTexture();
}
//------------------------------------------------------------------------------
/**
*/
inline const Math::float4*
SM50ShadowServer::GetFarPlane() const
{
return this->pssmUtil.GetFarPlane();
}
//------------------------------------------------------------------------------
/**
*/
inline const Math::float4*
SM50ShadowServer::GetNearPlane() const
{
return this->pssmUtil.GetNearPlane();
}
//------------------------------------------------------------------------------
/**
*/
inline const Math::matrix44*
SM50ShadowServer::GetShadowView() const
{
return &this->csmUtil.GetShadowView();
}
//------------------------------------------------------------------------------
/**
Get raw pointer to array of PSSM split distances.
*/
inline const float*
SM50ShadowServer::GetSplitDistances() const
{
return this->csmUtil.GetCascadeDistances();
}
//------------------------------------------------------------------------------
/**
Get raw pointer to array of PSSM split LightProjTransform matrices.
*/
inline const Math::matrix44*
SM50ShadowServer::GetSplitTransforms() const
{
return this->csmUtil.GetCascadeTransforms();
}
} // namespace Lighting
//------------------------------------------------------------------------------
|
juliuspetero/coding-problems
|
src/com/coding/dynamicprogramming/FibonacciDynamicProgramming.java
|
<gh_stars>0
package com.coding.dynamicprogramming;
public class FibonacciDynamicProgramming {
/**
* Recursive approach to fibonacci
*
* @param n nth fibonacci
* @return fibonacci number
*/
public static int calculateFibonacciNumber(int n) {
if (n < 2)
return n;
int previousFibo1 = calculateFibonacciNumber(n - 1);
int previousFibo2 = calculateFibonacciNumber(n - 2);
int fiboNumber = previousFibo1 + previousFibo2;
return fiboNumber;
}
/**
* Dynamic programmming - Top-down memorization
*
* @param n
* @return
*/
public static int calculateFibonacciNumberUsingDP1(int n) {
int[] memory = new int[n + 1];
int fiboNumber = calculateFibonacciNumberRecursively(memory, n);
return fiboNumber;
}
private static int calculateFibonacciNumberRecursively(int[] memory, int n) {
if (n < 2) {
return n;
}
if (memory[n] != 0) {
return memory[n];
}
memory[n] = calculateFibonacciNumberRecursively(memory, n - 1) + calculateFibonacciNumberRecursively(memory, n - 2);
return memory[n];
}
/**
* Dynamic programming - Bottom-up Tabulation
*/
public int calculateFibonacciNumberUsingDP2(int n) {
if (n == 0) return 0;
int[] array = new int[n + 1];
// Base case scenarios
array[0] = 0;
array[1] = 1;
// Fill the array up to the nth value
for (int i = 2; i <= n; i++) {
array[i] = array[i - 1] + array[i - 2];
}
return array[n];
}
}
|
KismetSuS/SunderingShadows
|
d/shadow/room/forest/road4.c
|
<gh_stars>10-100
#include "forest.h"
inherit "/d/shadow/room/forest/road1";
void create(){
::create();
set_terrain(LIGHT_FOREST);
set_travel(DIRT_ROAD);
set_property("light" , 2);
set_property("indoors" , 0);
set_short("%^RESET%^%^GREEN%^On the Quiet %^BOLD%^%^GREEN%^Fo%^RESET%^%^GREEN%^r%^GREEN%^e%^BOLD%^%^GREEN%^s%^RESET%^%^GREEN%^t %^ORANGE%^R%^BOLD%^%^ORANGE%^o%^ORANGE%^a%^ORANGE%^d%^RESET%^");
set_items(([
"road":"A new road through the forest.",
"forest":"A quiet forest, for sure. Only the sounds of birds."
]));
set_exits(([
"northeast" : FORESTDIR+"road3",
"northwest" : FORESTDIR+"road5"
]));
set_smell("default","You smell fresh air and the odor of %^GREEN%^w%^BOLD%^i%^MAGENTA%^l%^RESET%^%^MAGENTA%^d%^BOLD%^fl%^RESET%^%^MAGENTA%^o%^BOLD%^we%^RESET%^%^MAGENTA%^r%^BOLD%^s%^ORANGE%^.");
set_listen("default","You can hear birds singing.");
set_climate(CLIMATE);
}
|
FunctionLab/sleipnir
|
src/annotation.h
|
/*****************************************************************************
* This file is provided under the Creative Commons Attribution 3.0 license.
*
* You are free to share, copy, distribute, transmit, or adapt this work
* PROVIDED THAT you attribute the work to the authors listed below.
* For more information, please see the following web page:
* http://creativecommons.org/licenses/by/3.0/
*
* This file is a component of the Sleipnir library for functional genomics,
* authored by:
* <NAME> (<EMAIL>)
* <NAME>
* <NAME>
* <NAME> (<EMAIL>, primary contact)
*
* If you use this library, the included executable tools, or any related
* code in your work, please cite the following publication:
* <NAME>, <NAME>, <NAME>, and
* <NAME>.
* "The Sleipnir library for computational functional genomics"
*****************************************************************************/
#ifndef ANNOTATION_H
#define ANNOTATION_H
namespace Sleipnir {
/*!
* \brief
* Encapsulates the hypergeometric functional enrichment of a query against one ontology term.
*
* Generated by an IOntology::TermFinder call, each STermFound struct represents the hypergeometric
* enrichment of one ontology term for a set of query genes. Documentation assumes a set of query
* genes Q and a set of term genes T.
*
* \see
* IOntology
*/
struct STermFound {
/*!
* \brief
* ID of the ontology term for this enrichment.
*/
size_t m_iID;
/*!
* \brief
* Hypergeometric p-value of this enrichment.
*/
double m_dP;
/*!
* \brief
* The number of genes in Q intersect T.
*/
size_t m_iHitsTerm;
/*!
* \brief
* The number of genes in T.
*/
size_t m_iSizeTerm;
/*!
* \brief
* The number of genes in Q.
*/
size_t m_iHitsTotal;
/*!
* \brief
* The total number of genes in the background set (genome).
*/
size_t m_iSizeTotal;
/*!
* \brief
* Construct a new structure with the given parameter values.
*
* \param iID
* ID of the ontology term for this enrichment.
*
* \param dP
* Hypergeometric p-value of this enrichment.
*
* \param iHitsTerm
* The number of genes in Q intersect T.
*
* \param iSizeTerm
* The number of genes in T.
*
* \param iHitsTotal
* The number of genes in Q.
*
* \param iSizeTotal
* The total number of genes in the background set (genome).
*/
STermFound(size_t iID, double dP, size_t iHitsTerm, size_t iSizeTerm, size_t iHitsTotal, size_t iSizeTotal) :
m_iID(iID), m_dP(dP), m_iHitsTerm(iHitsTerm), m_iSizeTerm(iSizeTerm), m_iHitsTotal(iHitsTotal),
m_iSizeTotal(iSizeTotal) {}
};
}
#include "annotationi.h"
namespace Sleipnir {
/*!
* \brief
* Encapsulates a functional catalog/hierarchy/ontology such as GO, KEGG, or MIPS.
*
* IOntology provides a uniform interface able to capture ontological functional catalogs such as the
* Gene Ontology, hierarchical catalogs such as MIPS, and (essentially) flag functional groupings such
* as KEGG. IOntology's structure is modeled on GO's, being the most general: each term in the
* functional catalog has zero or more parents (more general terms), zero or more children (more specific
* terms), and zero or more directly annotated genes. A gene annotated to some term is also implicitly
* annotated to that term's ancestors.
*
* \see
* COntologyKEGG | COntologyOBO | COntologyMIPS | COntologyMIPSPhenotypes | CSlim
*/
class IOntology {
public:
/*!
* \brief
* Returns string identifier of the encapsulated ontology.
*
* \returns
* String identifier of the ontology.
*/
virtual const std::string &GetID() const = 0;
/*!
* \brief
* Returns the number of nodes (terms) in the ontology.
*
* \returns
* Number of nodes (terms) in the ontology.
*/
virtual size_t GetNodes() const = 0;
/*!
* \brief
* Returns the ontology-specific ID string of the requested term.
*
* \param iTerm
* Index of ontology term.
*
* \returns
* Ontology-specific ID of the requested term (e.g. "GO:0007093").
*/
virtual const std::string &GetID(size_t iTerm) const = 0;
/*!
* \brief
* Returns the ontology-specific description of the requested term.
*
* \param iTerm
* Index of ontology term.
*
* \returns
* Ontology-specific description of the requested term (e.g. "mitotic cell cycle").
*/
virtual const std::string &GetGloss(size_t iTerm) const = 0;
/*!
* \brief
* Returns the number of parents of the requested term.
*
* \param iTerm
* Index of ontology term.
*
* \returns
* Number of parents of the requested term.
*/
virtual size_t GetParents(size_t iTerm) const = 0;
/*!
* \brief
* Returns the ontology term index of the requested parent of the requested term.
*
* \param iTerm
* Index of ontology term.
*
* \param iParent
* Parent to retrieve.
*
* \returns
* Ontology term index of the requested parent.
*
* \remarks
* Requested parent must be less than IOntology::GetParents.
*/
virtual size_t GetParent(size_t iTerm, size_t iParent) const = 0;
/*!
* \brief
* Returns the number of children of the requested term.
*
* \param iTerm
* Index of ontology term.
*
* \returns
* Number of children of the requested term.
*/
virtual size_t GetChildren(size_t iTerm) const = 0;
/*!
* \brief
* Returns the ontology term index of the requested child of the requested term.
*
* \param iTerm
* Index of ontology term.
*
* \param iChild
* Child to retrieve.
*
* \returns
* Ontology term index of the requested child.
*
* \remarks
* Requested child must be less than IOntology::GetChildren.
*/
virtual size_t GetChild(size_t iTerm, size_t iChild) const = 0;
/*!
* \brief
* Retrieves the parent term IDs of the requested term.
*
* \param iTerm
* Index of ontology term.
*
* \param setiParents
* Output set of parent term IDs.
*
* \returns
* True on success, false otherwise.
*
* \remarks
* Operates recursively, returning all nodes between iTerm and the ontology root.
*
* \see
* GetParents | GetParent
*/
virtual bool GetParents(size_t iTerm, std::set <size_t> &setiParents) const = 0;
/*!
* \brief
* Retrieves the descendant term IDs of the requested term.
*
* \param iTerm
* Index of ontology term.
*
* \param setiChildren
* Output set of descendant term IDs.
*
* \returns
* True on success, false otherwise.
*
* \remarks
* Operates recursively, returning all nodes between iTerm and the ontology leaves.
*
* \see
* GetChildren | GetChild
*/
virtual bool GetChildren(size_t iTerm, std::set <size_t> &setiChildren) const = 0;
/*!
* \brief
* Returns the number of genes annotated to or (optionally) below this term.
*
* \param iTerm
* Ontology term index.
*
* \param fRecursive
* If true, include gene annotations to descendant terms.
*
* \returns
* Number of genes annotated to or below this term.
*/
virtual size_t GetGenes(size_t iTerm, bool fRecursive = false) const = 0;
/*!
* \brief
* Returns the requested gene annotated to or below the given term.
*
* \param iTerm
* Ontology term index from which to retrieve gene.
*
* \param iGene
* Index of gene within the requested term.
*
* \returns
* Gene annotation at the requested index.
*
* \remarks
* If iGene is less than the number of genes annotated directly to the term, a direct annotation is
* returned. Otherwise, descendant terms are searched and the requested gene index is retrieved from
* this set. iGene must be less than the value of IOntology::GetGenes.
*/
virtual const CGene &GetGene(size_t iTerm, size_t iGene) const = 0;
/*!
* \brief
* Indicates whether the given gene is annotated to or (optionally) below the given term.
*
* \param iTerm
* Ontology term index.
*
* \param Gene
* Gene for which annotation is tested.
*
* \param fRecursive
* If true, include annotations to descendants of the given term.
*
* \returns
* True if the given gene is annotated to or (optionally) below the given term.
*/
virtual bool IsAnnotated(size_t iTerm, const CGene &Gene, bool fRecursive = true) const = 0;
/*!
* \brief
* Returns the ontology term index corresponding to the given ontology-specific ID string.
*
* \param strID
* Ontology term ID to retrieve (e.g. "GO:0007093").
*
* \returns
* The index of the requested term, -1 if not found.
*/
virtual size_t GetNode(const std::string &strID) const = 0;
/*!
* \brief
* Obtain the primary gene names for all genes in the ontology.
*
* \param vecstrGenes
* Outputs primary gene names for all genes in the ontology.
*/
virtual void GetGeneNames(std::vector <std::string> &vecstrGenes) const = 0;
/*!
* \brief
* Uses the hypergeometric distribution to find functional enrichments of the given gene set.
*
* \param Genes
* Gene set to test for functional enrichments.
*
* \param vecsTerms
* Output statistics for the given genes over all terms in the ontology.
*
* \param fBonferroni
* If true, p-values are Bonferroni corrected.
*
* \param fRecursive
* If true, annotations to descendants are used when calculating term overlaps.
*
* \param fGenome
* If true, use all genes in the genome as background; otherwise, use only genes with at least one
* annotation in the ontology.
*
* \param dPValue
* Only terms significant below this p-value will be recorded.
*
* \param pBackground
* If non-null, use the given gene set as the background.
*
* TermFinder uses the GO::TermFinder technique of <NAME>, et al to calculate ontology
* terms (i.e. functions) enriched in the given gene set. This involves significance testing the
* overlap of the given gene set with annotations to every ontology term using the hypergeometric
* test.
*/
virtual void TermFinder(const CGenes &Genes, std::vector <STermFound> &vecsTerms, bool fBonferroni = true,
bool fRecursive = true, bool fGenome = false, float dPValue = 1,
const CGenes *pBackground = NULL) const = 0;
};
// TODO: These should really be templated instead of duplicated like this...
/*!
* \brief
* Implements IOntology for the KEGG orthology.
*
* COntologyKEGG parses the "ko" file from the Kyoto Encyclopedia of Genes and Genomes to extract
* organism-independent function annotations from KEGG.
*/
class COntologyKEGG : COntologyKEGGImpl, public IOntology {
public:
COntologyKEGG();
bool Open(std::istream &istm, CGenome &Genome, const std::string &strOrganism, bool fSynonyms = false);
void GetGeneNames(std::vector <std::string> &vecstrGenes) const {
COntologyImpl::GetGeneNames(vecstrGenes);
}
void TermFinder(const CGenes &Genes, std::vector <STermFound> &vecsTerms, bool fBonferroni = true,
bool fRecursive = true, bool fGenome = false, float dPValue = 1,
const CGenes *pBackground = NULL) const {
COntologyImpl::TermFinder(Genes, vecsTerms, fBonferroni, fRecursive, fGenome, dPValue,
pBackground);
}
size_t GetNode(const std::string &strID) const {
return COntologyImpl::GetNode(strID);
}
bool IsAnnotated(size_t iTerm, const CGene &Gene, bool fRecursive) const {
return COntologyImpl::IsAnnotated(iTerm, Gene, fRecursive);
}
size_t GetNodes() const {
return COntologyImpl::GetNodes();
}
const std::string &GetID() const {
return COntologyImpl::GetID();
}
const std::string &GetID(size_t iTerm) const {
return COntologyImpl::GetID(iTerm);
}
const std::string &GetGloss(size_t iTerm) const {
return COntologyImpl::GetGloss(iTerm);
}
size_t GetParents(size_t iTerm) const {
return COntologyImpl::GetParents(iTerm);
}
size_t GetParent(size_t iTerm, size_t iParent) const {
return COntologyImpl::GetParent(iTerm, iParent);
}
size_t GetChildren(size_t iTerm) const {
return COntologyImpl::GetChildren(iTerm);
}
size_t GetChild(size_t iTerm, size_t iChild) const {
return COntologyImpl::GetChild(iTerm, iChild);
}
size_t GetGenes(size_t iTerm, bool fRecursive) const {
return COntologyImpl::GetGenes(iTerm, fRecursive);
}
const CGene &GetGene(size_t iTerm, size_t iGene) const {
return COntologyImpl::GetGene(iTerm, iGene);
}
bool GetParents(size_t iTerm, std::set <size_t> &setiParents) const {
return COntologyImpl::GetParents(iTerm, setiParents);
}
bool GetChildren(size_t iTerm, std::set <size_t> &setiChildren) const {
return COntologyImpl::GetChildren(iTerm, setiChildren);
}
};
/*!
* \brief
* Implements IOntology for OBO based Ontologies.
*
* COntologyOBO parses OBO and annotation files to obtain the structure and annotations
* for OBO ontologies.
*/
class COntologyOBO : COntologyOBOImpl, public IOntology {
public:
/*!
* \brief
* Common Gene Ontology aspects/namespaces: biological process.
*/
static const char c_szBiologicalProcess[];
/*!
* \brief
* Common Gene Ontology aspects/namespaces: cellular component.
*/
static const char c_szCellularComponent[];
/*!
* \brief
* Common Gene Ontology aspects/namespaces: molecular function.
*/
static const char c_szMolecularFunction[];
static bool Open(std::istream &istmOntology, std::istream &istmAnnotations, CGenome &Genome,
COntologyOBO &OntoBP, COntologyOBO &OntoMF, COntologyOBO &OntoCC, bool fDatabaseIDs = false,
bool fSynonyms = false);
COntologyOBO();
bool Open(std::istream &istmOntology, std::istream &istmAnnotations, CGenome &Genome,
const char *szNamespace, bool fDatabaseIDs = false, bool fSynonyms = false);
void GetGeneNames(std::vector <std::string> &vecstrGenes) const {
COntologyImpl::GetGeneNames(vecstrGenes);
}
void TermFinder(const CGenes &Genes, std::vector <STermFound> &vecsTerms, bool fBonferroni = true,
bool fRecursive = true, bool fGenome = false, float dPValue = 1,
const CGenes *pBackground = NULL) const {
COntologyImpl::TermFinder(Genes, vecsTerms, fBonferroni, fRecursive, fGenome, dPValue,
pBackground);
}
size_t GetNode(const std::string &strID) const {
return COntologyImpl::GetNode(strID);
}
bool IsAnnotated(size_t iTerm, const CGene &Gene, bool fRecursive) const {
return COntologyImpl::IsAnnotated(iTerm, Gene, fRecursive);
}
size_t GetNodes() const {
return COntologyImpl::GetNodes();
}
const std::string &GetID() const {
return COntologyImpl::GetID();
}
const std::string &GetID(size_t iTerm) const {
return COntologyImpl::GetID(iTerm);
}
const std::string &GetGloss(size_t iTerm) const {
return COntologyImpl::GetGloss(iTerm);
}
size_t GetParents(size_t iTerm) const {
return COntologyImpl::GetParents(iTerm);
}
size_t GetParent(size_t iTerm, size_t iParent) const {
return COntologyImpl::GetParent(iTerm, iParent);
}
size_t GetChildren(size_t iTerm) const {
return COntologyImpl::GetChildren(iTerm);
}
size_t GetChild(size_t iTerm, size_t iChild) const {
return COntologyImpl::GetChild(iTerm, iChild);
}
size_t GetGenes(size_t iTerm, bool fRecursive) const {
return COntologyImpl::GetGenes(iTerm, fRecursive);
}
const CGene &GetGene(size_t iTerm, size_t iGene) const {
return COntologyImpl::GetGene(iTerm, iGene);
}
bool GetParents(size_t iTerm, std::set <size_t> &setiParents) const {
return COntologyImpl::GetParents(iTerm, setiParents);
}
bool GetChildren(size_t iTerm, std::set <size_t> &setiChildren) const {
return COntologyImpl::GetChildren(iTerm, setiChildren);
}
};
/*!
* \brief
* Implements IOntology for the MIPS functional catalog.
*
* COntologyMIPS parses the "funcat" structure and annotation files from the Munich Information center for
* Protein Sequences.
*/
class COntologyMIPS : protected COntologyMIPSImpl, public IOntology {
public:
COntologyMIPS();
bool Open(std::istream &istmOntology, std::istream &istmAnnotations, CGenome &Genome);
void GetGeneNames(std::vector <std::string> &vecstrGenes) const {
return COntologyImpl::GetGeneNames(vecstrGenes);
}
void TermFinder(const CGenes &Genes, std::vector <STermFound> &vecsTerms, bool fBonferroni = true,
bool fRecursive = true, bool fGenome = false, float dPValue = 1,
const CGenes *pBackground = NULL) const {
COntologyImpl::TermFinder(Genes, vecsTerms, fBonferroni, fRecursive, fGenome, dPValue,
pBackground);
}
size_t GetNode(const std::string &strID) const {
return COntologyImpl::GetNode(strID);
}
bool IsAnnotated(size_t iTerm, const CGene &Gene, bool fRecursive) const {
return COntologyImpl::IsAnnotated(iTerm, Gene, fRecursive);
}
size_t GetNodes() const {
return COntologyImpl::GetNodes();
}
const std::string &GetID() const {
return COntologyImpl::GetID();
}
const std::string &GetID(size_t iTerm) const {
return COntologyImpl::GetID(iTerm);
}
const std::string &GetGloss(size_t iTerm) const {
return COntologyImpl::GetGloss(iTerm);
}
size_t GetParents(size_t iTerm) const {
return COntologyImpl::GetParents(iTerm);
}
size_t GetParent(size_t iTerm, size_t iParent) const {
return COntologyImpl::GetParent(iTerm, iParent);
}
size_t GetChildren(size_t iTerm) const {
return COntologyImpl::GetChildren(iTerm);
}
size_t GetChild(size_t iTerm, size_t iChild) const {
return COntologyImpl::GetChild(iTerm, iChild);
}
size_t GetGenes(size_t iTerm, bool fRecursive) const {
return COntologyImpl::GetGenes(iTerm, fRecursive);
}
const CGene &GetGene(size_t iTerm, size_t iGene) const {
return COntologyImpl::GetGene(iTerm, iGene);
}
bool GetParents(size_t iTerm, std::set <size_t> &setiParents) const {
return COntologyImpl::GetParents(iTerm, setiParents);
}
bool GetChildren(size_t iTerm, std::set <size_t> &setiChildren) const {
return COntologyImpl::GetChildren(iTerm, setiChildren);
}
};
/*!
* \brief
* Extends COntologyMIPS to include the (apparently defunct) "phencat" phenotype hierarchy.
*
* COntologyMIPS parses the "phencat" structure and annotation files from the Munich Information center for
* Protein Sequences.
*/
class COntologyMIPSPhenotypes : public COntologyMIPS {
public:
COntologyMIPSPhenotypes();
protected:
/*!
* \brief
* String identifier for the MIPS Phenotype ontology.
*/
static const char c_szMIPSPhen[];
};
/*!
* \brief
* Represents a set of ontology terms.
*
* Modeled after the "GO Slim" sets of GO terms, CSlim represents any set of IOntology terms. A slim is
* generally opened from a text file listing ontology IDs and can be used in a variety of settings, e.g.
* functional enrichment or gold standard generation.
*
* \see
* CDat::Open | CDataPair::Open
*/
class CSlim : CSlimImpl {
public:
bool Open(std::istream &istmSlim, const IOntology *pOntology);
void GetGeneNames(std::vector <std::string> &vecstrGenes) const;
/*!
* \brief
* Returns the gene at the requested index below the requested slim term.
*
* \param iSlim
* Index of the slim term for which a gene are retrieved.
*
* \param iGene
* Index of the gene to retrieve.
*
* \returns
* Gene annotated at the requested index below the requested slim term.
*
* \remarks
* iSlim must be less than CSlim::GetSlims; iGene must be less than CSlim::GetGenes. All genes annotated
* recursively to descendants of the slim terms are considered.
*
* \see
* IOntology::GetGenes
*/
const CGene &GetGene(size_t iSlim, size_t iGene) const {
return *m_vecvecpGenes[iSlim][iGene];
}
/*!
* \brief
* Returns the number of ontology terms in this slim.
*
* \returns
* Number of ontology terms in this slim.
*/
size_t GetSlims() const {
return m_vecstrSlims.size();
}
/*!
* \brief
* Returns the number of genes annotated below the given slim term.
*
* \param iSlim
* Index of the slim term for which genes are retrieved.
*
* \returns
* Number of genes annotated below the given slim term.
*
* \remarks
* iSlim must be less than CSlim::GetSlims.
*/
size_t GetGenes(size_t iSlim) const {
return m_vecvecpGenes[iSlim].size();
}
/*!
* \brief
* Returns the string ID of the ontology term at the given slim index.
*
* \param iSlim
* Index of the slim term to identify.
*
* \returns
* String ID of the requested slim index.
*
* \remarks
* iSlim must be less than CSlim::GetSlims.
*/
const std::string &GetSlim(size_t iSlim) const {
return m_vecstrSlims[iSlim];
}
/*!
* \brief
* Returns the number of ontology terms in the requested slim.
*
* \param iSlim
* ID of slim.
*
* \returns
* Number of ontology terms in the requested slim.
*
* \see
* GetNode
*/
size_t GetNodes(size_t iSlim) const {
return m_vecveciTerms[iSlim].size();
}
/*!
* \brief
* Returns the ontology term ID at the requested index in the requested slim.
*
* \param iSlim
* ID of slim.
*
* \param iTerm
* Index of ontology term to be returned.
*
* \returns
* Ontology term at the requested index within the given slim.
*
* \remarks
* No bounds checking is performed; iSlim must be less than GetSlims, and iTerm must be less than GetNodes.
*
* \see
* GetNodes
*/
size_t GetNode(size_t iSlim, size_t iTerm) const {
return m_vecveciTerms[iSlim][iTerm];
}
};
}
#endif // ANNOTATION_H
|
jsc-masshtab/veil-connect
|
src/vdi_client/vdi_manager.h
|
<reponame>jsc-masshtab/veil-connect
/*
* VeiL Connect
* VeiL VDI Client
* Based on virt-viewer and freerdp
*
* Author: http://mashtab.org/
*/
#ifndef VIRT_VIEWER_VEIL_VDI_MANAGER_H
#define VIRT_VIEWER_VEIL_VDI_MANAGER_H
#include <gtk/gtk.h>
#include "settings_data.h"
#include "vdi_session.h"
#define TYPE_VDI_MANAGER ( vdi_manager_get_type( ) )
#define VDI_MANAGER( obj ) ( G_TYPE_CHECK_INSTANCE_CAST( (obj), TYPE_VDI_MANAGER, VdiManager ) )
#define IS_VDI_MANAGER( obj ) ( G_TYPE_CHECK_INSTANCE_TYPE( (obj), TYPE_VDI_MANAGER ) )
#define VDI_MANAGER_CLASS( klass ) ( G_TYPE_CHECK_CLASS_CAST( (klass), TYPE_VDI_MANAGER, VdiManagerClass ) )
#define IS_VDI_MANAGER_CLASS( klass ) ( G_TYPE_CHECK_CLASS_TYPE( (klass), TYPE_VDI_MANAGER ) )
#define VDI_MANAGER_GET_CLASS( obj ) ( G_TYPE_INSTANCE_GET_CLASS( (obj), TYPE_VDI_MANAGER, VdiManagerClass ) )
typedef struct _VdiManager VdiManager;
typedef struct _VdiManagerClass VdiManagerClass;
struct _VdiManager
{
GObject parent;
GtkBuilder *builder;
GtkWidget *window;
GtkWidget *button_quit;
GtkWidget *button_renew;
GtkWidget *btn_open_user_settings;
GtkWidget *btn_cancel_requests;
GtkWidget *vm_main_box;
GtkWidget *gtk_flow_box;
GtkWidget *status_label;
GtkWidget *vm_prep_progress_bar;
GtkWidget *main_vm_spinner;
GtkWidget *label_is_vdi_online;
GArray *pool_widgets_array;
ConnectionInfo ci;
gulong ws_conn_changed_handle;
gulong ws_cmd_received_handle;
gulong auth_fail_detected_handle;
gulong vm_prep_progress_handle;
int current_vm_request_id;
ConnectSettingsData *p_conn_data;
};
struct _VdiManagerClass
{
GObjectClass parent_class;
/* signals */
};
GType vdi_manager_get_type( void ) G_GNUC_CONST;
RemoteViewerState vdi_manager_dialog(VdiManager *self, ConnectSettingsData *conn_data);
VdiManager *vdi_manager_new(void);
#endif //VIRT_VIEWER_VEIL_VDI_MANAGER_H
|
ybadmus/ASW
|
src/components/BusinessNews/index.js
|
<filename>src/components/BusinessNews/index.js
import React from 'react';
import {Link} from "react-router-dom";
const BusinessNews = ({businessNews, headerHide}) => {
return (
<div className="row">
<div className="col-12">
<div className="businerss_news">
{headerHide ? '' :
<div className="row">
<div className="col-12 align-self-center">
<h2 className="widget-title">Latest News</h2>
</div>
</div>}
<div className="row">
<div className="col-12">
{businessNews.map((item, i) => (
<div key={i} className="single_post post_type3 post_type12 mb30">
<div className="post_img">
<div className="img_wrap">
<Link to="#">
<img src={item.image} alt="thumb"/>
</Link>
</div>
</div>
<div className="single_post_text">
<div className="meta3"><Link to="#">{item.category}</Link>
<Link to="#">{item.date}</Link>
</div>
<h4><Link to={`/post/${item.id}`}>{item.title}</Link></h4>
<div className="space-10"/>
<p className="post-p">{item.description} ...</p>
<div className="space-20"/>
<Link to={`/post/${item.id}`} className="readmore">Read more</Link>
</div>
</div>
))}
</div>
</div>
</div>
</div>
</div>
);
};
export default BusinessNews;
|
maulikjs/hue
|
desktop/core/ext-py/celery-4.2.1/t/unit/worker/test_autoscale.py
|
from __future__ import absolute_import, unicode_literals
import sys
from case import Mock, mock, patch
from celery.concurrency.base import BasePool
from celery.five import monotonic
from celery.utils.objects import Bunch
from celery.worker import autoscale, state
class MockPool(BasePool):
shrink_raises_exception = False
shrink_raises_ValueError = False
def __init__(self, *args, **kwargs):
super(MockPool, self).__init__(*args, **kwargs)
self._pool = Bunch(_processes=self.limit)
def grow(self, n=1):
self._pool._processes += n
def shrink(self, n=1):
if self.shrink_raises_exception:
raise KeyError('foo')
if self.shrink_raises_ValueError:
raise ValueError('foo')
self._pool._processes -= n
@property
def num_processes(self):
return self._pool._processes
class test_WorkerComponent:
def test_register_with_event_loop(self):
parent = Mock(name='parent')
parent.autoscale = True
parent.consumer.on_task_message = set()
w = autoscale.WorkerComponent(parent)
assert parent.autoscaler is None
assert w.enabled
hub = Mock(name='hub')
w.create(parent)
w.register_with_event_loop(parent, hub)
assert (parent.autoscaler.maybe_scale in
parent.consumer.on_task_message)
hub.call_repeatedly.assert_called_with(
parent.autoscaler.keepalive, parent.autoscaler.maybe_scale,
)
parent.hub = hub
hub.on_init = []
w.instantiate = Mock()
w.register_with_event_loop(parent, Mock(name='loop'))
assert parent.consumer.on_task_message
class test_Autoscaler:
def setup(self):
self.pool = MockPool(3)
def test_stop(self):
class Scaler(autoscale.Autoscaler):
alive = True
joined = False
def is_alive(self):
return self.alive
def join(self, timeout=None):
self.joined = True
worker = Mock(name='worker')
x = Scaler(self.pool, 10, 3, worker=worker)
x._is_stopped.set()
x.stop()
assert x.joined
x.joined = False
x.alive = False
x.stop()
assert not x.joined
@mock.sleepdeprived(module=autoscale)
def test_body(self):
worker = Mock(name='worker')
x = autoscale.Autoscaler(self.pool, 10, 3, worker=worker)
x.body()
assert x.pool.num_processes == 3
_keep = [Mock(name='req{0}'.format(i)) for i in range(20)]
[state.task_reserved(m) for m in _keep]
x.body()
x.body()
assert x.pool.num_processes == 10
worker.consumer._update_prefetch_count.assert_called()
state.reserved_requests.clear()
x.body()
assert x.pool.num_processes == 10
x._last_scale_up = monotonic() - 10000
x.body()
assert x.pool.num_processes == 3
worker.consumer._update_prefetch_count.assert_called()
def test_run(self):
class Scaler(autoscale.Autoscaler):
scale_called = False
def body(self):
self.scale_called = True
self._is_shutdown.set()
worker = Mock(name='worker')
x = Scaler(self.pool, 10, 3, worker=worker)
x.run()
assert x._is_shutdown.isSet()
assert x._is_stopped.isSet()
assert x.scale_called
def test_shrink_raises_exception(self):
worker = Mock(name='worker')
x = autoscale.Autoscaler(self.pool, 10, 3, worker=worker)
x.scale_up(3)
x.pool.shrink_raises_exception = True
x._shrink(1)
@patch('celery.worker.autoscale.debug')
def test_shrink_raises_ValueError(self, debug):
worker = Mock(name='worker')
x = autoscale.Autoscaler(self.pool, 10, 3, worker=worker)
x.scale_up(3)
x._last_scale_up = monotonic() - 10000
x.pool.shrink_raises_ValueError = True
x.scale_down(1)
assert debug.call_count
def test_update_and_force(self):
worker = Mock(name='worker')
x = autoscale.Autoscaler(self.pool, 10, 3, worker=worker)
assert x.processes == 3
x.force_scale_up(5)
assert x.processes == 8
x.update(5, None)
assert x.processes == 5
x.force_scale_down(3)
assert x.processes == 2
x.update(None, 3)
assert x.processes == 3
x.force_scale_down(1000)
assert x.min_concurrency == 0
assert x.processes == 0
x.force_scale_up(1000)
x.min_concurrency = 1
x.force_scale_down(1)
x.update(max=300, min=10)
x.update(max=300, min=2)
x.update(max=None, min=None)
def test_info(self):
worker = Mock(name='worker')
x = autoscale.Autoscaler(self.pool, 10, 3, worker=worker)
info = x.info()
assert info['max'] == 10
assert info['min'] == 3
assert info['current'] == 3
@patch('os._exit')
def test_thread_crash(self, _exit):
class _Autoscaler(autoscale.Autoscaler):
def body(self):
self._is_shutdown.set()
raise OSError('foo')
worker = Mock(name='worker')
x = _Autoscaler(self.pool, 10, 3, worker=worker)
stderr = Mock()
p, sys.stderr = sys.stderr, stderr
try:
x.run()
finally:
sys.stderr = p
_exit.assert_called_with(1)
stderr.write.assert_called()
@mock.sleepdeprived(module=autoscale)
def test_no_negative_scale(self):
total_num_processes = []
worker = Mock(name='worker')
x = autoscale.Autoscaler(self.pool, 10, 3, worker=worker)
x.body() # the body func scales up or down
_keep = [Mock(name='req{0}'.format(i)) for i in range(35)]
for req in _keep:
state.task_reserved(req)
x.body()
total_num_processes.append(self.pool.num_processes)
for req in _keep:
state.task_ready(req)
x.body()
total_num_processes.append(self.pool.num_processes)
assert all(x.min_concurrency <= i <= x.max_concurrency
for i in total_num_processes)
|
zabrewer/batfish
|
projects/batfish-common-protocol/src/main/java/org/batfish/datamodel/routing_policy/communities/CommunityExprVisitor.java
|
package org.batfish.datamodel.routing_policy.communities;
/** A visitor of {@link CommunityExpr} that takes 1 generic argument and returns a generic value. */
public interface CommunityExprVisitor<T, U> {
T visitRouteTargetExtendedCommunityExpr(
RouteTargetExtendedCommunityExpr extendedCommunityTypeGlobalHighLowLocal, U arg);
T visitStandardCommunityHighLowExprs(
StandardCommunityHighLowExprs standardCommunityHighLowExprs, U arg);
}
|
gmenti/authorization-service
|
test/unit/helpers.js
|
const chai = require('chai');
const sinon = require('sinon');
const { expect } = chai;
module.exports = { sinon, expect };
|
danewalton/azure-iot-sdk-python
|
azure-iot-device/azure/iot/device/aio/__init__.py
|
"""Azure IoT Device Library - Asynchronous
This library provides asynchronous clients for communicating with Azure IoT services
from an IoT device.
"""
# Import all exposed items in aio subpackages to expose them via this package
from azure.iot.device.iothub.aio import *
from azure.iot.device.provisioning.aio import *
# Import the subpackages themselves in order to set the __all__
import azure.iot.device.iothub.aio
import azure.iot.device.provisioning.aio
# Import the module to generate missing documentation
from . import patch_documentation
# TODO: remove this chunk of commented code if we truly no longer want to take this approach
# Dynamically patch the clients to add shim implementations for all the inherited methods.
# This is necessary to generate accurate online docs.
# It SHOULD not impact the functionality of the methods themselves in any way.
# NOTE In the event of addition of new methods and generation of accurate documentation
# for those methods we have to append content to "patch_documentation.py" file.
# In order to do so please uncomment the "patch.add_shims" lines below,
# enable logging with level "DEBUG" in a python terminal and do
# "import azure.iot.device". The delta between the newly generated output
# and the existing content of "patch_documentation.py" should be appended to
# the function "execute_patch_for_sync" in "patch_documentation.py".
# Once done please again omment out the "patch.add_shims" lines below.
# patch.add_shims_for_inherited_methods(IoTHubDeviceClient) # noqa: F405
# patch.add_shims_for_inherited_methods(IoTHubModuleClient) # noqa: F405
# patch.add_shims_for_inherited_methods(ProvisioningDeviceClient) # noqa: F405
patch_documentation.execute_patch_for_async()
__all__ = azure.iot.device.iothub.aio.__all__ + azure.iot.device.provisioning.aio.__all__
|
Toromino/chromiumos-platform2
|
power_manager/common/test_main_loop_runner.cc
|
// Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "power_manager/common/test_main_loop_runner.h"
#include <base/check.h>
#include <base/location.h>
#include <base/logging.h>
#include <base/run_loop.h>
#include "power_manager/common/util.h"
namespace power_manager {
TestMainLoopRunner::TestMainLoopRunner() : timed_out_(false) {}
TestMainLoopRunner::~TestMainLoopRunner() {}
bool TestMainLoopRunner::StartLoop(base::TimeDelta timeout_delay) {
CHECK(!runner_.get()) << "Loop is already running";
timed_out_ = false;
timeout_timer_.Start(FROM_HERE, timeout_delay, this,
&TestMainLoopRunner::OnTimeout);
runner_.reset(new base::RunLoop);
runner_->Run();
runner_.reset();
return !timed_out_;
}
void TestMainLoopRunner::StopLoop() {
CHECK(runner_.get()) << "Loop isn't running";
timeout_timer_.Stop();
runner_->Quit();
}
bool TestMainLoopRunner::LoopIsRunning() const {
return runner_.get();
}
void TestMainLoopRunner::OnTimeout() {
CHECK(runner_.get());
timed_out_ = true;
runner_->Quit();
}
} // namespace power_manager
|
antoree/registry-creds
|
vendor/github.com/googleapis/gax-go/path_template_test.go
|
<reponame>antoree/registry-creds
// Copyright 2016, Google Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package gax
import "testing"
func TestPathTemplateMatchRender(t *testing.T) {
testCases := []struct {
message string
template string
path string
values map[string]string
}{
{
"base",
"buckets/*/*/objects/*",
"buckets/f/o/objects/bar",
map[string]string{"$0": "f", "$1": "o", "$2": "bar"},
},
{
"path wildcards",
"bar/**/foo/*",
"bar/foo/foo/foo/bar",
map[string]string{"$0": "foo/foo", "$1": "bar"},
},
{
"named binding",
"buckets/{foo}/objects/*",
"buckets/foo/objects/bar",
map[string]string{"$0": "bar", "foo": "foo"},
},
{
"named binding with colon",
"buckets/{foo}/objects/*",
"buckets/foo:boo/objects/bar",
map[string]string{"$0": "bar", "foo": "foo:boo"},
},
{
"named binding with complex patterns",
"buckets/{foo=x/*/y/**}/objects/*",
"buckets/x/foo/y/bar/baz/objects/quox",
map[string]string{"$0": "quox", "foo": "x/foo/y/bar/baz"},
},
{
"starts with slash",
"/foo/*",
"/foo/bar",
map[string]string{"$0": "bar"},
},
}
for _, testCase := range testCases {
pt, err := NewPathTemplate(testCase.template)
if err != nil {
t.Errorf("[%s] Failed to parse template %s: %v", testCase.message, testCase.template, err)
continue
}
values, err := pt.Match(testCase.path)
if err != nil {
t.Errorf("[%s] PathTemplate '%s' failed to match with '%s', %v", testCase.message, testCase.template, testCase.path, err)
continue
}
for key, expected := range testCase.values {
actual, ok := values[key]
if !ok {
t.Errorf("[%s] The matched data misses the value for %s", testCase.message, key)
continue
}
delete(values, key)
if actual != expected {
t.Errorf("[%s] Failed to match: value for '%s' is expected '%s' but is actually '%s'", testCase.message, key, expected, actual)
}
}
if len(values) != 0 {
t.Errorf("[%s] The matched data has unexpected keys: %v", testCase.message, values)
}
built, err := pt.Render(testCase.values)
if err != nil || built != testCase.path {
t.Errorf("[%s] Built path '%s' is different from the expected '%s', %v", testCase.message, built, testCase.path, err)
}
}
}
func TestPathTemplateMatchFailure(t *testing.T) {
testCases := []struct {
message string
template string
path string
}{
{
"too many paths",
"buckets/*/*/objects/*",
"buckets/f/o/o/objects/bar",
},
{
"missing last path",
"buckets/*/*/objects/*",
"buckets/f/o/objects",
},
{
"too many paths at end",
"buckets/*/*/objects/*",
"buckets/f/o/objects/too/long",
},
}
for _, testCase := range testCases {
pt, err := NewPathTemplate(testCase.template)
if err != nil {
t.Errorf("[%s] Failed to parse path %s: %v", testCase.message, testCase.template, err)
continue
}
if values, err := pt.Match(testCase.path); err == nil {
t.Errorf("[%s] PathTemplate %s doesn't expect to match %s, but succeeded somehow. Match result: %v", testCase.message, testCase.template, testCase.path, values)
}
}
}
func TestPathTemplateRenderTooManyValues(t *testing.T) {
// Test cases where Render() succeeds but Match() doesn't return the same map.
testCases := []struct {
message string
template string
values map[string]string
expected string
}{
{
"too many",
"bar/*/foo/*",
map[string]string{"$0": "_1", "$1": "_2", "$2": "_3"},
"bar/_1/foo/_2",
},
}
for _, testCase := range testCases {
pt, err := NewPathTemplate(testCase.template)
if err != nil {
t.Errorf("[%s] Failed to parse template %s (error %v)", testCase.message, testCase.template, err)
continue
}
if result, err := pt.Render(testCase.values); err != nil || result != testCase.expected {
t.Errorf("[%s] Failed to build the path (expected '%s' but returned '%s'", testCase.message, testCase.expected, result)
}
}
}
func TestPathTemplateParseErrors(t *testing.T) {
testCases := []struct {
message string
template string
}{
{
"multiple path wildcards",
"foo/**/bar/**",
},
{
"recursive named bindings",
"foo/{foo=foo/{bar}/baz/*}/baz/*",
},
{
"complicated multiple path wildcards patterns",
"foo/{foo=foo/**/bar/*}/baz/**",
},
{
"consective slashes",
"foo//bar",
},
{
"invalid variable pattern",
"foo/{foo=foo/*/}bar",
},
{
"same name multiple times",
"foo/{foo}/bar/{foo}",
},
{
"empty string after '='",
"foo/{foo=}/bar",
},
}
for _, testCase := range testCases {
if pt, err := NewPathTemplate(testCase.template); err == nil {
t.Errorf("[%s] Template '%s' should fail to be parsed, but succeeded and returned %+v", testCase.message, testCase.template, pt)
}
}
}
|
tjohnston-softdev/fox-controller-app
|
fox-api/device-params.js
|
// Help functions for Device and Node APIs.
const rioIndex = require("../fox-devices/remote_io/remote-io.index");
// Reads page string request parameter.
function readPagePart(parameterString)
{
var givenType = typeof parameterString;
var readRes = null;
if (givenType === "string" && parameterString.length > 0)
{
readRes = parameterString;
}
return readRes;
}
// Retrieves Remote IO device information.
function retrieveDeviceObject(inpDeviceID, deviceCallback)
{
var apiResult = {};
// Result properties.
apiResult["outcome"] = -1;
apiResult["deviceInfo"] = {};
apiResult["messageText"] = "";
if (inpDeviceID !== null)
{
// Device entered - Query information.
callRemoteIoRead(inpDeviceID, apiResult, deviceCallback);
}
else
{
// Not entered.
apiResult.outcome = -1;
apiResult.messageText = "Device ID Not Entered";
return deviceCallback(apiResult);
}
}
// Queries Remote IO Database.
function callRemoteIoRead(deviceID, resObject, rioCallback)
{
rioIndex.getRemoteIoDevice(deviceID, function (rioErr, retrievedObject)
{
if (rioErr !== null)
{
// Query error.
resObject.outcome = 0;
resObject.messageText = rioErr.message;
}
else
{
// Successful.
resObject.outcome = 1;
resObject.deviceInfo = retrievedObject;
}
return rioCallback(resObject);
});
}
module.exports =
{
readPage: readPagePart,
retrieveDevice: retrieveDeviceObject
};
|
CoOwner/VisualPvP
|
org/sonatype/guice/bean/locators/QualifyingStrategy.java
|
<reponame>CoOwner/VisualPvP
package org.sonatype.guice.bean.locators;
import com.google.inject.Binding;
import com.google.inject.Key;
import com.google.inject.name.Named;
import java.lang.annotation.Annotation;
enum QualifyingStrategy
{
UNRESTRICTED, NAMED, NAMED_WITH_ATTRIBUTES, MARKED, MARKED_WITH_ATTRIBUTES;
private QualifyingStrategy() {}
abstract Annotation qualify(Key<?> paramKey, Binding<?> paramBinding);
}
|
certik/paraview
|
VTK/Rendering/vtkXRenderWindowTclInteractor.cxx
|
/*=========================================================================
Program: Visualization Toolkit
Module: $RCSfile: vtkXRenderWindowTclInteractor.cxx,v $
Copyright (c) <NAME>, <NAME>, <NAME>
All rights reserved.
See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notice for more information.
=========================================================================*/
#include "vtkActor.h"
#include "vtkActorCollection.h"
#include "vtkObjectFactory.h"
#include "vtkOldStyleCallbackCommand.h"
#include "vtkPoints.h"
#include "vtkXOpenGLRenderWindow.h"
#include "vtkXRenderWindowTclInteractor.h"
#include <X11/Shell.h>
#include <X11/X.h>
#include <X11/keysym.h>
#include <math.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <vtkTk.h>
vtkCxxRevisionMacro(vtkXRenderWindowTclInteractor, "$Revision: 1.53 $");
vtkStandardNewMacro(vtkXRenderWindowTclInteractor);
// steal the first three elements of the TkMainInfo stuct
// we don't care about the rest of the elements.
struct TkMainInfo
{
int refCount;
struct TkWindow *winPtr;
Tcl_Interp *interp;
};
#if ((TK_MAJOR_VERSION <= 4)||((TK_MAJOR_VERSION == 8)&&(TK_MINOR_VERSION == 0)))
extern TkMainInfo *tkMainWindowList;
#else
extern "C" {TkMainInfo *TkGetMainInfoList();}
#endif
// returns 1 if done
static int vtkTclEventProc(XtPointer clientData,XEvent *event)
{
Boolean ctd;
vtkXOpenGLRenderWindow *rw;
rw = (vtkXOpenGLRenderWindow *)
(((vtkXRenderWindowTclInteractor *)clientData)->GetRenderWindow());
if (rw->GetWindowId() == (reinterpret_cast<XAnyEvent *>(event))->window)
{
vtkXRenderWindowTclInteractorCallback((Widget)NULL,clientData, event, &ctd);
ctd = 0;
}
else
{
ctd = 1;
}
return !ctd;
}
extern "C"
{
void vtkXTclTimerProc(ClientData clientData)
{
XtIntervalId id;
vtkXRenderWindowTclInteractorTimer((XtPointer)clientData,&id);
}
}
// Construct object so that light follows camera motion.
vtkXRenderWindowTclInteractor::vtkXRenderWindowTclInteractor()
{
this->App = 0;
this->top = 0;
this->TopLevelShell = NULL;
this->BreakLoopFlag = 0;
}
vtkXRenderWindowTclInteractor::~vtkXRenderWindowTclInteractor()
{
if (this->Initialized)
{
Tk_DeleteGenericHandler((Tk_GenericProc *)vtkTclEventProc,
(ClientData)this);
}
}
void vtkXRenderWindowTclInteractor::SetWidget(Widget foo)
{
this->top = foo;
}
// This method will store the top level shell widget for the interactor.
// This method and the method invocation sequence applies for:
// 1 vtkRenderWindow-Interactor pair in a nested widget heirarchy
// multiple vtkRenderWindow-Interactor pairs in the same top level shell
// It is not needed for
// 1 vtkRenderWindow-Interactor pair as the direct child of a top level shell
// multiple vtkRenderWindow-Interactor pairs, each in its own top level shell
//
// The method, along with EnterNotify event, changes the keyboard focus among
// the widgets/vtkRenderWindow(s) so the Interactor(s) can receive the proper
// keyboard events. The following calls need to be made:
// vtkRenderWindow's display ID need to be set to the top level shell's
// display ID.
// vtkXRenderWindowTclInteractor's Widget has to be set to the vtkRenderWindow's
// container widget
// vtkXRenderWindowTclInteractor's TopLevel has to be set to the top level
// shell widget
// note that the procedure for setting up render window in a widget needs to
// be followed. See vtkRenderWindowInteractor's SetWidget method.
//
// If multiple vtkRenderWindow-Interactor pairs in SEPARATE windows are desired,
// do not set the display ID (Interactor will create them as needed. Alternatively,
// create and set distinct DisplayID for each vtkRenderWindow. Using the same
// display ID without setting the parent widgets will cause the display to be
// reinitialized every time an interactor is initialized), do not set the
// widgets (so the render windows would be in their own windows), and do
// not set TopLevelShell (each has its own top level shell already)
void vtkXRenderWindowTclInteractor::SetTopLevelShell(Widget topLevel)
{
this->TopLevelShell = topLevel;
}
static void vtkBreakTclLoop(void *iren)
{
((vtkXRenderWindowTclInteractor*)iren)->SetBreakLoopFlag(1);
}
void vtkXRenderWindowTclInteractor::Start()
{
// Let the compositing handle the event loop if it wants to.
if (this->HasObserver(vtkCommand::StartEvent) && !this->HandleEventLoop)
{
this->InvokeEvent(vtkCommand::StartEvent,NULL);
return;
}
vtkOldStyleCallbackCommand *cbc = vtkOldStyleCallbackCommand::New();
cbc->Callback = vtkBreakTclLoop;
cbc->ClientData = this;
unsigned long ExitTag = this->AddObserver(vtkCommand::ExitEvent,cbc, 0.5);
cbc->Delete();
this->BreakLoopFlag = 0;
while(this->BreakLoopFlag == 0)
{
Tk_DoOneEvent(0);
}
this->RemoveObserver(ExitTag);
}
// Initializes the event handlers
void vtkXRenderWindowTclInteractor::Initialize(XtAppContext app)
{
this->App = app;
this->Initialize();
}
// Begin processing keyboard strokes.
void vtkXRenderWindowTclInteractor::Initialize()
{
vtkXOpenGLRenderWindow *ren;
int *size;
// make sure we have a RenderWindow and camera
if ( ! this->RenderWindow)
{
vtkErrorMacro(<<"No renderer defined!");
return;
}
this->Initialized = 1;
ren = (vtkXOpenGLRenderWindow *)(this->RenderWindow);
// use the same display as tcl/tk
#if ((TK_MAJOR_VERSION <= 4)||((TK_MAJOR_VERSION == 8)&&(TK_MINOR_VERSION == 0)))
ren->SetDisplayId(Tk_Display(tkMainWindowList->winPtr));
#else
ren->SetDisplayId(Tk_Display(TkGetMainInfoList()->winPtr));
#endif
this->DisplayId = ren->GetDisplayId();
// get the info we need from the RenderingWindow
size = ren->GetSize();
size = ren->GetSize();
ren->Start();
this->WindowId = ren->GetWindowId();
size = ren->GetSize();
this->Size[0] = size[0];
this->Size[1] = size[1];
this->Enable();
// Set the event handler
Tk_CreateGenericHandler((Tk_GenericProc *)vtkTclEventProc,(ClientData)this);
}
void vtkXRenderWindowTclInteractor::Enable()
{
// avoid cycles of calling Initialize() and Enable()
if (this->Enabled)
{
return;
}
// Select the events that we want to respond to
// (Multiple calls to XSelectInput overrides the previous settings)
XSelectInput(this->DisplayId, this->WindowId,
KeyPressMask | KeyReleaseMask |
ButtonPressMask | ButtonReleaseMask |
ExposureMask | StructureNotifyMask |
EnterWindowMask | LeaveWindowMask |
PointerMotionMask | PointerMotionMask);
// Setup for capturing the window deletion
this->KillAtom = XInternAtom(this->DisplayId,"WM_DELETE_WINDOW",False);
XSetWMProtocols(this->DisplayId,this->WindowId,&this->KillAtom,1);
this->Enabled = 1;
this->Modified();
}
void vtkXRenderWindowTclInteractor::Disable()
{
if (!this->Enabled)
{
return;
}
// Remove the all the events that we registered for EXCEPT for
// StructureNotifyMask event since we need to keep track of the window
// size (we will not render if we are disabled, we simply track the window
// size changes for a possible Enable()). Expose events are disabled.
// (Multiple calls to XSelectInput overrides the previous settings)
XSelectInput(this->DisplayId,this->WindowId,
StructureNotifyMask );
this->Enabled = 0;
this->Modified();
}
void vtkXRenderWindowTclInteractor::PrintSelf(ostream& os, vtkIndent indent)
{
this->Superclass::PrintSelf(os,indent);
if (this->App)
{
os << indent << "App: " << this->App << "\n";
}
else
{
os << indent << "App: (none)\n";
}
os << indent << "Break Loop Flag: "
<< (this->BreakLoopFlag ? "On\n" : "Off\n");
}
void vtkXRenderWindowTclInteractor::UpdateSize(int x,int y)
{
// if the size changed send this on to the RenderWindow
if ((x != this->Size[0])||(y != this->Size[1]))
{
this->Size[0] = x;
this->Size[1] = y;
this->RenderWindow->SetSize(x,y);
}
}
void vtkXRenderWindowTclInteractorCallback(Widget vtkNotUsed(w),
XtPointer client_data,
XEvent *event,
Boolean *vtkNotUsed(ctd))
{
vtkXRenderWindowTclInteractor *me;
me = (vtkXRenderWindowTclInteractor *)client_data;
int xp, yp;
switch (event->type)
{
case Expose:
{
if (!me->Enabled)
{
return;
}
XEvent result;
while (XCheckTypedWindowEvent(me->DisplayId,
me->WindowId,
Expose,
&result))
{
// just getting the expose configure event
event = &result;
}
int width = (reinterpret_cast<XConfigureEvent *>(event))->width;
int height = (reinterpret_cast<XConfigureEvent *>(event))->height;
me->SetEventSize(width, height);
xp = (reinterpret_cast<XButtonEvent*>(event))->x;
yp = (reinterpret_cast<XButtonEvent*>(event))->y;
yp = me->Size[1] - xp - 1;
me->SetEventPosition(xp, yp);
// only render if we are currently accepting events
if (me->Enabled)
{
me->InvokeEvent(vtkCommand::ExposeEvent,NULL);
me->Render();
}
}
break;
case MapNotify:
{
// only render if we are currently accepting events
if (me->Enabled && me->GetRenderWindow()->GetNeverRendered())
{
me->Render();
}
}
break;
case ConfigureNotify:
{
XEvent result;
while (XCheckTypedWindowEvent(me->DisplayId,
me->WindowId,
ConfigureNotify,
&result))
{
// just getting the last configure event
event = &result;
}
int width = (reinterpret_cast<XConfigureEvent *>(event))->width;
int height = (reinterpret_cast<XConfigureEvent *>(event))->height;
if (width != me->Size[0] || height != me->Size[1])
{
me->UpdateSize(width, height);
xp = (reinterpret_cast<XButtonEvent*>(event))->x;
yp = (reinterpret_cast<XButtonEvent*>(event))->y;
me->SetEventPosition(xp, me->Size[1] - yp - 1);
// only render if we are currently accepting events
if (me->Enabled)
{
me->InvokeEvent(vtkCommand::ConfigureEvent,NULL);
me->Render();
}
}
}
break;
case ButtonPress:
{
if (!me->Enabled)
{
return;
}
int ctrl =
(reinterpret_cast<XButtonEvent *>(event))->state & ControlMask ? 1 : 0;
int shift =
(reinterpret_cast<XButtonEvent *>(event))->state & ShiftMask ? 1 : 0;
int alt =
(reinterpret_cast<XButtonEvent *>(event))->state & Mod1Mask ? 1 : 0;
xp = (reinterpret_cast<XButtonEvent*>(event))->x;
yp = (reinterpret_cast<XButtonEvent*>(event))->y;
me->SetEventInformationFlipY(xp,
yp,
ctrl,
shift);
me->SetAltKey(alt);
switch ((reinterpret_cast<XButtonEvent *>(event))->button)
{
case Button1:
me->InvokeEvent(vtkCommand::LeftButtonPressEvent, NULL);
break;
case Button2:
me->InvokeEvent(vtkCommand::MiddleButtonPressEvent, NULL);
break;
case Button3:
me->InvokeEvent(vtkCommand::RightButtonPressEvent, NULL);
break;
case Button4:
me->InvokeEvent(vtkCommand::MouseWheelForwardEvent, NULL);
break;
case Button5:
me->InvokeEvent(vtkCommand::MouseWheelBackwardEvent, NULL);
break;
}
}
break;
case ButtonRelease:
{
if (!me->Enabled)
{
return;
}
int ctrl =
(reinterpret_cast<XButtonEvent *>(event))->state & ControlMask ? 1 : 0;
int shift =
(reinterpret_cast<XButtonEvent *>(event))->state & ShiftMask ? 1 : 0;
int alt =
(reinterpret_cast<XButtonEvent *>(event))->state & Mod1Mask ? 1 : 0;
xp = (reinterpret_cast<XButtonEvent*>(event))->x;
yp = (reinterpret_cast<XButtonEvent*>(event))->y;
// check for double click
static int MousePressTime = 0;
int repeat = 0;
// 400 ms threshold by default is probably good to start
if((reinterpret_cast<XButtonEvent*>(event)->time - MousePressTime) < 400)
{
MousePressTime -= 2000; // no double click next time
repeat = 1;
}
else
{
MousePressTime = reinterpret_cast<XButtonEvent*>(event)->time;
}
me->SetEventInformationFlipY(xp,
yp,
ctrl,
shift,
0,
repeat);
me->SetAltKey(alt);
switch ((reinterpret_cast<XButtonEvent *>(event))->button)
{
case Button1:
me->InvokeEvent(vtkCommand::LeftButtonReleaseEvent, NULL);
break;
case Button2:
me->InvokeEvent(vtkCommand::MiddleButtonReleaseEvent, NULL);
break;
case Button3:
me->InvokeEvent(vtkCommand::RightButtonReleaseEvent, NULL);
break;
}
}
break;
case EnterNotify:
{
// Force the keyboard focus to be this render window
if (me->TopLevelShell != NULL)
{
XtSetKeyboardFocus(me->TopLevelShell, me->top);
}
if (me->Enabled)
{
XEnterWindowEvent *e = reinterpret_cast<XEnterWindowEvent *>(event);
me->SetEventInformationFlipY(e->x,
e->y,
(e->state & ControlMask) != 0,
(e->state & ShiftMask) != 0);
me->SetAltKey(
(reinterpret_cast<XButtonEvent *>(event))->state & Mod1Mask ? 1 : 0);
me->InvokeEvent(vtkCommand::EnterEvent, NULL);
}
}
break;
case LeaveNotify:
{
if (me->Enabled)
{
XLeaveWindowEvent *e = reinterpret_cast<XLeaveWindowEvent *>(event);
me->SetEventInformationFlipY(e->x,
e->y,
(e->state & ControlMask) != 0,
(e->state & ShiftMask) != 0);
me->SetAltKey(
(reinterpret_cast<XButtonEvent *>(event))->state & Mod1Mask ? 1 : 0);
me->InvokeEvent(vtkCommand::LeaveEvent, NULL);
}
}
break;
case KeyPress:
{
if (!me->Enabled)
{
return;
}
int ctrl =
(reinterpret_cast<XButtonEvent *>(event))->state & ControlMask ? 1 : 0;
int shift =
(reinterpret_cast<XButtonEvent *>(event))->state & ShiftMask ? 1 : 0;
int alt =
(reinterpret_cast<XButtonEvent *>(event))->state & Mod1Mask ? 1 : 0;
KeySym ks;
static char buffer[20];
buffer[0] = '\0';
XLookupString(reinterpret_cast<XKeyEvent *>(event),buffer, 20, &ks,NULL);
xp = (reinterpret_cast<XKeyEvent*>(event))->x;
yp = (reinterpret_cast<XKeyEvent*>(event))->y;
me->SetEventInformationFlipY(xp,
yp,
ctrl,
shift,
buffer[0],
1,
XKeysymToString(ks));
me->SetAltKey(alt);
me->InvokeEvent(vtkCommand::KeyPressEvent, NULL);
me->InvokeEvent(vtkCommand::CharEvent, NULL);
}
break;
case KeyRelease:
{
if (!me->Enabled)
{
return;
}
int ctrl =
(reinterpret_cast<XButtonEvent *>(event))->state & ControlMask ? 1 : 0;
int shift =
(reinterpret_cast<XButtonEvent *>(event))->state & ShiftMask ? 1 : 0;
int alt =
(reinterpret_cast<XButtonEvent *>(event))->state & Mod1Mask ? 1 : 0;
KeySym ks;
static char buffer[20];
buffer[0] = '\0';
XLookupString(reinterpret_cast<XKeyEvent *>(event),buffer, 20, &ks,NULL);
xp = (reinterpret_cast<XKeyEvent *>(event))->x;
yp = (reinterpret_cast<XKeyEvent *>(event))->y;
me->SetEventInformationFlipY(xp,
yp,
ctrl,
shift,
buffer[0],
1,
XKeysymToString(ks));
me->SetAltKey(alt);
me->InvokeEvent(vtkCommand::KeyReleaseEvent, NULL);
}
break;
case MotionNotify:
{
if (!me->Enabled)
{
return;
}
int ctrl =
(reinterpret_cast<XButtonEvent *>(event))->state & ControlMask ? 1 : 0;
int shift =
(reinterpret_cast<XButtonEvent *>(event))->state & ShiftMask ? 1 : 0;
int alt =
(reinterpret_cast<XButtonEvent *>(event))->state & Mod1Mask ? 1 : 0;
xp = (reinterpret_cast<XMotionEvent*>(event))->x;
yp = (reinterpret_cast<XMotionEvent*>(event))->y;
me->SetEventInformationFlipY(xp,
yp,
ctrl,
shift);
me->SetAltKey(alt);
me->InvokeEvent(vtkCommand::MouseMoveEvent, NULL);
}
break;
case ClientMessage:
{
if( static_cast<Atom>(event->xclient.data.l[0]) == me->KillAtom )
{
me->InvokeEvent(vtkCommand::ExitEvent, NULL);
}
}
break;
}
}
void vtkXRenderWindowTclInteractorTimer(XtPointer client_data,
XtIntervalId *vtkNotUsed(id))
{
vtkXRenderWindowTclInteractor *me;
me = (vtkXRenderWindowTclInteractor *)client_data;
Window root,child;
int root_x,root_y;
int x,y;
unsigned int keys;
// get the pointer position
XQueryPointer(me->DisplayId,
me->WindowId,
&root,
&child,
&root_x,
&root_y,
&x,
&y,
&keys);
if (!me->Enabled)
{
return;
}
me->SetEventInformationFlipY(x,
y,
0,
0);
me->InvokeEvent(vtkCommand::TimerEvent, NULL);
}
int vtkXRenderWindowTclInteractor::CreateTimer(int vtkNotUsed(timertype))
{
Tk_CreateTimerHandler(this->TimerDuration,vtkXTclTimerProc,(ClientData)this);
return 1;
}
int vtkXRenderWindowTclInteractor::DestroyTimer(void)
{
// timers automatically expire in X windows
return 1;
}
void vtkXRenderWindowTclInteractor::TerminateApp(void)
{
#if ((TK_MAJOR_VERSION <= 4)||((TK_MAJOR_VERSION == 8)&&(TK_MINOR_VERSION == 0)))
Tcl_Interp* interp = tkMainWindowList->interp;
#else
Tcl_Interp* interp = TkGetMainInfoList()->interp;
#endif
#if TCL_MAJOR_VERSION == 8 && TCL_MINOR_VERSION <= 2
char es[12];
strcpy(es,"exit");
Tcl_GlobalEval(interp, es);
#else
Tcl_EvalEx(interp, "exit", -1, TCL_EVAL_GLOBAL);
#endif
}
|
the-trav/CalendarSwing-EventPlanning
|
src/mypi/calendarPanel/CalendarDayButton.java
|
<reponame>the-trav/CalendarSwing-EventPlanning
package mypi.calendarPanel;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.Calendar;
import java.util.GregorianCalendar;
import javax.swing.JButton;
import javax.swing.JLabel;
import javax.swing.JTabbedPane;
import javax.swing.JTextArea;
import javax.swing.SwingConstants;
import mypi.eventForm.CreateEventForm;
import mypi.pojo.Events;
/**
*
* @author trav
*
*/
public class CalendarDayButton extends JButton {
private JLabel dayNumber;
private JTextArea eventDescription;
private GregorianCalendar tempCalendar;
private JTabbedPane theAppTabs;
public CalendarDayButton(int dayNum,GregorianCalendar tempCalendar,JTabbedPane theAppTabs) {
this.tempCalendar = tempCalendar;
this.theAppTabs = theAppTabs;
dayNumber = new JLabel(Integer.toString(dayNum));
setUpEventDescription();
setUpButton();
}
/**
* method is called to highlight the currentDay as white
*/
public void setCurrentDay(){
this.setBackground(Color.WHITE);
eventDescription.setBackground(Color.WHITE);
}
/**
* <eventString> is used to handle multiple events planned in a day
* eventString adds what is currently already planned in that day copy it and
* add the @param eventToPlan
* finally it will be set
*/
public void setEventDescription(String eventToPlan){
StringBuilder eventString = new StringBuilder();
eventString.append(eventDescription.getText() );
eventString.append("\n");
eventString.append(eventToPlan);
eventDescription.setText(eventString.toString());
}
/**
* helper method initilizes the dayButtons textArea location to where the
* planned event will be displayed "if any event is planned"
*/
private void setUpEventDescription(){
eventDescription = new JTextArea();
eventDescription.setEditable(false);
eventDescription.setBackground(Color.lightGray);
eventDescription.setLineWrap(true);
eventDescription.setWrapStyleWord(true);
}
private void setUpButton(){
this.setBackground(Color.lightGray);
this.setHorizontalAlignment(SwingConstants.LEFT);
this.setVerticalAlignment(SwingConstants.TOP);
this.setLayout(new BorderLayout());
this.setSize(100, 100);
this.addActionListener(new DayListener());
this.add(BorderLayout.NORTH, dayNumber);
this.add(BorderLayout.SOUTH, eventDescription);
}
/**
* <DayListener> handles anytime a day on the calendar is clicked once
* clicked the user will be brought to <CreateNewEventPanel>.class the user
* will only be able to transition out of the new panel until he/she clicks
* okay or cancel
*/
private class DayListener implements ActionListener {
/**
*
* @param m is from an integer value of the month
* @return 0=january , 11=december ect.
*/
private String getMonth(int m) {
String holder = Integer.toString(m);
switch (m) {
case (0):
holder = "January";
break;
case (1):
holder = "February";
break;
case (2):
holder = "March";
break;
case (3):
holder = "April";
break;
case (4):
holder = "May";
break;
case (5):
holder = "June";
break;
case (6):
holder = "July";
break;
case (7):
holder = "August";
break;
case (8):
holder = "September";
break;
case (9):
holder = "October";
break;
case (10):
holder = "November";
break;
case (11):
holder = "December";
break;
}
return holder;
}
@Override
public void actionPerformed(ActionEvent e) {
if (CalendarDayButton.this == e.getSource()){
Events eventPojo = new Events(dayNumber.getText() , getMonth(tempCalendar.get(Calendar.MONTH)), tempCalendar.get(Calendar.YEAR));
CreateEventForm createAEvent = new CreateEventForm(eventPojo, theAppTabs);
theAppTabs.add("Creating an Event", createAEvent);
theAppTabs.setSelectedComponent(createAEvent);
//making tabs calendar events un-clickable until the creating even is complete
theAppTabs.setEnabledAt(0, false);
theAppTabs.setEnabledAt(1, false);
}
}//end of action
}//end of dayListener
}
|
Kelvin19891223/TipEx
|
frontend/admin/routes/components/dataEntry/Cascader/CustomTrigger.js
|
<reponame>Kelvin19891223/TipEx
import React, {Component} from "react";
import {Card, Cascader} from "antd";
const options = [{
value: 'menu',
label: 'Menu',
children: [{
value: 'jumbo',
label: 'jquery',
children: [{
value: 'opstion',
label: 'West Lake',
}],
}],
}, {
value: 'menu',
label: 'Menu 1',
children: [{
value: 'opstion 1',
label: 'jumbo',
children: [{
value: 'opstion 2',
label: 'react',
}],
}],
}];
class CustomTrigger extends Component {
state = {
text: 'Unselect',
};
onChange = (value, selectedOptions) => {
this.setState({
text: selectedOptions.map(o => o.label).join(', '),
});
};
render() {
return (
<Card className="gx-card" title="Custom Trigger">
{this.state.text}
<Cascader options={options} onChange={this.onChange}>
<span className="gx-link">Change city</span>
</Cascader>
</Card>
);
}
}
export default CustomTrigger;
|
merveealpay/python-exercises
|
OOP/inher.py
|
class X:
def met(self):
print("X OK")
class Y:
def met(self):
print("Y OK")
class Z(X, Y):
def met(self):
super(X, self).met()
class A(Z):
def met(self):
super(A, self).met()
object = A()
object.met()
#Y OK
|
1847123212/YoC-open
|
components/chip_ch2601/include/es7210.h
|
/*
* Copyright (C) 2017-2020 Alibaba Group Holding Limited
*/
/******************************************************************************
* @file es7210.h
* @brief
* @version
* @date 2020-07-09
******************************************************************************/
#ifndef _ES7210_H_
#define _ES7210_H_
#include <drv/common.h>
#ifdef __cplusplus
extern "C" {
#endif
#define ES7210_RESET_CTL_REG00 0x00
#define ES7210_CLK_ON_OFF_REG01 0x01
#define ES7210_MCLK_CTL_REG02 0x02
#define ES7210_MST_CLK_CTL_REG03 0x03
#define ES7210_MST_LRCDIVH_REG04 0x04
#define ES7210_MST_LRCDIVL_REG05 0x05
#define ES7210_DIGITAL_PDN_REG06 0x06
#define ES7210_ADC_OSR_REG07 0x07
#define ES7210_MODE_CFG_REG08 0x08
#define ES7210_TCT0_CHPINI_REG09 0x09
#define ES7210_TCT1_CHPINI_REG0A 0x0A
#define ES7210_CHIP_STA_REG0B 0x0B
#define ES7210_IRQ_CTL_REG0C 0x0C
#define ES7210_MISC_CTL_REG0D 0x0D
#define ES7210_DMIC_CTL_REG10 0x10
#define ES7210_SDP_CFG1_REG11 0x11
#define ES7210_SDP_CFG2_REG12 0x12
#define ES7210_ADC_AUTOMUTE_REG13 0x13
#define ES7210_ADC34_MUTE_REG14 0x14
#define ES7210_ADC12_MUTE_REG15 0x15
#define ES7210_ALC_SEL_REG16 0x16
#define ES7210_ALC_COM_CFG1_REG17 0x17
#define ES7210_ALC34_LVL_REG18 0x18
#define ES7210_ALC12_LVL_REG19 0x19
#define ES7210_ALC_COM_CFG2_REG1A 0x1A
#define ES7210_ADC4_MAX_GAIN_REG1B 0x1B
#define ES7210_ADC3_MAX_GAIN_REG1C 0x1C
#define ES7210_ADC2_MAX_GAIN_REG1D 0x1D
#define ES7210_ADC1_MAX_GAIN_REG1E 0x1E
#define ES7210_ADC34_HPF2_REG20 0x20
#define ES7210_ADC34_HPF1_REG21 0x21
#define ES7210_ADC12_HPF2_REG22 0x22
#define ES7210_ADC12_HPF1_REG23 0x23
#define ES7210_CHP_ID1_REG3D 0x3D
#define ES7210_CHP_ID0_REG3E 0x3E
#define ES7210_CHP_VER_REG3F 0x3F
#define ES7210_ANALOG_SYS_REG40 0x40
#define ES7210_MICBIAS12_REG41 0x41
#define ES7210_MICBIAS34_REG42 0x42
#define ES7210_MIC1_GAIN_REG43 0x43
#define ES7210_MIC2_GAIN_REG44 0x44
#define ES7210_MIC3_GAIN_REG45 0x45
#define ES7210_MIC4_GAIN_REG46 0x46
#define ES7210_MIC1_LP_REG47 0x47
#define ES7210_MIC2_LP_REG48 0x48
#define ES7210_MIC3_LP_REG49 0x49
#define ES7210_MIC4_LP_REG4A 0x4A
#define ES7210_MIC12_PDN_REG4B 0x4B
#define ES7210_MIC34_PDN_REG4C 0x4C
typedef enum {
ES7210_I2S_SAMPLE_RATE_8000 = 8000U,
ES7210_I2S_SAMPLE_RATE_11025 = 11025U,
ES7210_I2S_SAMPLE_RATE_12000 = 12000U,
ES7210_I2S_SAMPLE_RATE_16000 = 16000U,
ES7210_I2S_SAMPLE_RATE_22050 = 22050U,
ES7210_I2S_SAMPLE_RATE_24000 = 24000U,
ES7210_I2S_SAMPLE_RATE_32000 = 32000U,
ES7210_I2S_SAMPLE_RATE_44100 = 44100U,
ES7210_I2S_SAMPLE_RATE_48000 = 48000U,
ES7210_I2S_SAMPLE_RATE_96000 = 96000U,
ES7210_I2S_SAMPLE_RATE_192000 = 192000U,
ES7210_I2S_SAMPLE_RATE_256000 = 256000U,
} es7210_i2s_sample_rate_t;
typedef enum {
ES7210_I2S_MODE_MASTER,
ES7210_I2S_MODE_SLAVE,
} es7210_i2s_mode_t;
typedef enum {
ES7210_NORMAL_I2S,
ES7210_NORMAL_LSB_JUSTIFIED,
} es7210_protocol_t;
typedef enum {
ES7210_16BIT_LENGTH,
ES7210_18BIT_LENGTH,
ES7210_20BIT_LENGTH,
ES7210_24BIT_LENGTH,
ES7210_32BIT_LENGTH,
} es7210_data_len_t;
typedef enum {
ES7210_I2S_SCLK_16FS = 16U,
ES7210_I2S_SCLK_32FS = 32U,
ES7210_I2S_SCLK_48FS = 48U,
ES7210_I2S_SCLK_64FS = 64U,
} es7210_i2s_sclk_freq_t;
typedef enum {
ES7210_MCLK_256FS = 256U,
ES7210_MCLK_384FS = 384U,
} es7210_mclk_freq_t;
typedef struct {
es7210_i2s_sample_rate_t i2s_rate; ///< es7210 i2s rate
es7210_i2s_mode_t i2s_mode; ///< es7210 i2s work mode
es7210_protocol_t i2s_protocol; ///< es7210 i2s protocol
es7210_data_len_t data_len; ///< es7210 data len
es7210_i2s_sclk_freq_t i2s_sclk_freq; ///< es7210 lrck divider
es7210_mclk_freq_t mclk_freq; ///< input mclk freq
} es7210_config_t;
typedef struct {
int8_t es7210_addr; ///< es7210 iic addr
int32_t (*es7210_iic_write)(uint8_t addr, uint8_t *data, uint32_t date_len);
int32_t (*es7210_iic_read)(uint8_t addr, uint8_t *data, uint32_t date_len);
} es7210_dev_t;
int32_t es7210_init(es7210_dev_t *dev, es7210_config_t *es7210_config);
int32_t es7210_uninit(es7210_dev_t *dev);
int32_t es7210_mic1_set_gain(es7210_dev_t *dev, unsigned char gain);
int32_t es7210_mic2_set_gain(es7210_dev_t *dev, unsigned char gain);
int32_t es7210_mic3_set_gain(es7210_dev_t *dev, unsigned char gain);
int32_t es7210_mic4_set_gain(es7210_dev_t *dev, unsigned char gain);
int32_t es7210_mic1_set_mute(es7210_dev_t *dev, bool en);
int32_t es7210_mic2_set_mute(es7210_dev_t *dev, bool en);
int32_t es7210_mic3_set_mute(es7210_dev_t *dev, bool en);
int32_t es7210_mic4_set_mute(es7210_dev_t *dev, bool en);
int32_t es7210_adc1_set_gain(es7210_dev_t *dev, unsigned char gain);
int32_t es7210_adc2_set_gain(es7210_dev_t *dev, unsigned char gain);
int32_t es7210_adc3_set_gain(es7210_dev_t *dev, unsigned char gain);
int32_t es7210_adc4_set_gain(es7210_dev_t *dev, unsigned char gain);
void es7210_read_all_register(es7210_dev_t *dev, uint8_t *buf);
#ifdef __cplusplus
}
#endif
#endif /* _ES7210_H_*/
|
seakers/ExtUtils
|
dakota-6.3.0.Windows.x86/include/DesignMultiSet.hpp.inl
|
/*
================================================================================
PROJECT:
<NAME> Genetic Algorithms (JEGA)
CONTENTS:
Inline methods of class DesignMultiSet.
NOTES:
See notes of DesignMultiSet.hpp.
PROGRAMMERS:
<NAME> (<EMAIL>) (JE)
ORGANIZATION:
Sandia National Laboratories
COPYRIGHT:
See the LICENSE file in the top level JEGA directory.
VERSION:
1.0.0
CHANGES:
Wed Apr 07 18:21:54 2004 - Original Version (JE)
================================================================================
*/
/*
================================================================================
Document This File
================================================================================
*/
/** \file
* \brief Contains the inline methods of the DesignMultiSet class.
*/
/*
================================================================================
Includes
================================================================================
*/
/*
================================================================================
Begin Namespace
================================================================================
*/
namespace JEGA {
namespace Utilities {
/*
================================================================================
Static Member Data Definitions
================================================================================
*/
template <typename Pred>
const std::size_t DesignMultiSet<Pred>::MARK = 7;
/*
================================================================================
Inline Mutators
================================================================================
*/
/*
================================================================================
Inline Accessors
================================================================================
*/
/*
================================================================================
Inline Public Methods
================================================================================
*/
template <typename Pred>
inline
typename DesignMultiSet<Pred>::const_reference
DesignMultiSet<Pred>::back(
) const
{
EDDY_FUNC_DEBUGSCOPE
return *this->rbegin();
} // DesignMultiSet::back
template <typename Pred>
inline
typename DesignMultiSet<Pred>::const_reference
DesignMultiSet<Pred>::front(
) const
{
EDDY_FUNC_DEBUGSCOPE
return *this->begin();
} // DesignMultiSet::front
template <typename Pred>
inline
typename DesignMultiSet<Pred>::reference
DesignMultiSet<Pred>::back(
)
{
EDDY_FUNC_DEBUGSCOPE
// workaround for compilation on IBM with MIPSpro 7.4.2m
return const_cast<reference>(static_cast<const_reference>(*this->rbegin()));
} // DesignMultiSet::back
template <typename Pred>
inline
typename DesignMultiSet<Pred>::reference
DesignMultiSet<Pred>::front(
)
{
EDDY_FUNC_DEBUGSCOPE
// workaround for compilation on IBM with MIPSpro 7.4.2m
return const_cast<reference>(static_cast<const_reference>(*this->begin()));
} // DesignMultiSet::front
template <typename Pred>
typename DesignMultiSet<Pred>::const_iterator
DesignMultiSet<Pred>::find_exact(
const key_type key
) const
{
EDDY_FUNC_DEBUGSCOPE
// start by bounding the search region.
const_iterator_pair bounds(this->equal_range(key));
// now, look at every member in that range until key is found.
for(; bounds.first!=bounds.second; ++bounds.first)
if(*(bounds.first) == key) return bounds.first;
// if we make it here, we didn't find it.
return this->end();
} // DesignMultiSet::find_exact
template <typename Pred>
typename DesignMultiSet<Pred>::const_iterator
DesignMultiSet<Pred>::find_not_exact(
const key_type key
) const
{
EDDY_FUNC_DEBUGSCOPE
// start by bounding the search region.
const_iterator_pair bounds(this->equal_range(key));
// now, look at every member in that range until
// a non-match to key is found.
for(; bounds.first!=bounds.second; ++bounds.first)
if(*bounds.first != key) return bounds.first;
// if we make it here, we didn't find it.
return this->end();
} // DesignMultiSet::find_not_exact
template <typename Pred>
typename DesignMultiSet<Pred>::size_type
DesignMultiSet<Pred>::count_non_unique(
) const
{
EDDY_FUNC_DEBUGSCOPE
// check for the trivial abort conditions
if(this->size() < 2) return 0;
// prepare to store the duplicate count.
size_type ndups = 0;
// prepare some iterators for use in the following loops.
const_iterator iit, jit, e(this->end());
// Get the predicate for repeated use.
Pred predicate(this->key_comp());
// Start with the first Design and check each one after it until a
// non-duplicate Design is found. Then start again where we left
// off and repeat the process until the entire container has been checked.
for(iit=this->begin(); iit!=e; iit=jit)
{
jit = iit;
for(++jit; jit!=e; ++jit)
{
if(!predicate(*iit, *jit)) ++ndups;
else break;
}
}
// return the number of duplicates counted.
return ndups;
} // DesignMultiSet::count_non_unique
template <typename Pred>
std::ostream&
DesignMultiSet<Pred>::stream_out(
std::ostream& stream
) const
{
EDDY_FUNC_DEBUGSCOPE
// check for the trivial abort conditions
if(this->empty()) return stream;
// prepare some iterators for use below.
// cannot use --end() on some platforms.
const_iterator it(this->begin());
const_iterator e(this->end());
// stream out each design
for(--e; it!=e; ++it) this->stream_out(*it, stream) << '\n';
// do the last one separately without a newline
return this->stream_out(*e, stream);
} // DesignMultiSet::stream_out
template <typename Pred>
std::ostream&
DesignMultiSet<Pred>::stream_out(
const key_type des,
std::ostream& stream
)
{
EDDY_FUNC_DEBUGSCOPE
stream.precision(12);
const std::size_t ndv = des->GetNDV();
if(ndv > 0)
{
// Print the design variable values no matter what so long as there are
// some. Start with the first ndv-1 each followed by a tab.
for(std::size_t i=0; i<(ndv-1); ++i)
stream << des->GetVariableValue(i) << '\t';
// now put out the last design variable without a tab
// so that if the responses don't get written, there is
// no hanging tab.
stream << des->GetVariableValue(ndv-1);
}
// only print out responses if the Design has been
// evaluated and is well conditioned.
if(des->IsEvaluated() && !des->IsIllconditioned())
{
// these must be signed because we do subtraction.
const std::size_t nof = des->GetNOF();
const std::size_t ncn = des->GetNCN();
// put out each objective after a tab character
for(std::size_t j=0; j<nof; ++j)
stream << '\t' << des->GetObjective(j);
// now do the same for any constraints.
for(std::size_t j=0; j<ncn; ++j)
stream << '\t' << des->GetConstraint(j);
}
return stream;
} // DesignMultiSet::stream_out
template <typename Pred>
typename DesignMultiSet<Pred>::const_iterator
DesignMultiSet<Pred>::test_for_clone(
const key_type key
) const
{
EDDY_FUNC_DEBUGSCOPE
const_iterator clone = this->find_not_exact(key);
if(clone != this->end()) Design::TagAsClones(*key, **clone);
return clone;
} // DesignMultiSet::test_for_clone
template <typename Pred>
typename DesignMultiSet<Pred>::size_type
DesignMultiSet<Pred>::mark_non_unique(
std::size_t mark
) const
{
EDDY_FUNC_DEBUGSCOPE
// check for the trivial abort conditions
if(this->size() < 2) return 0;
// prepare to store the duplicate count.
size_type nmarked = 0;
// Store the iterator to the last design of interest for repeated use.
// The last Design is the last one in the set which is --end();
const const_iterator e(this->end());
for(const_iterator curr(this->begin()); curr!=e;)
{
// mark the first of all non-unique with the false tag.
(*curr)->ModifyAttribute(mark, false);
// store the Design associated with curr for use below.
Design* currDes = *curr;
// if curr is the last one, we don't have to do anything further.
if(++curr == e) break;
// Otherwise, iterate all duplicates and mark them true.
for(const_iterator last(this->upper_bound(currDes));
curr!=last; ++curr)
{
(*curr)->ModifyAttribute(mark, true);
++nmarked;
}
}
// return the number of duplicates marked as non-unique.
return nmarked;
} // DesignMultiSet::mark_non_unique
template <typename Pred>
typename DesignMultiSet<Pred>::size_type
DesignMultiSet<Pred>::test_within_list_for_clones(
) const
{
EDDY_FUNC_DEBUGSCOPE
// check for the trivial abort conditions
if(this->size() < 2) return 0;
// prepare to store the duplicate count.
size_type nclones = 0;
// prepare some iterators for use in the following loops.
const_iterator jit, e(this->end());
// Get the predicate for repeated use.
Pred predicate(this->key_comp());
// Start with the first Design and check each one after it until a
// non-duplicate Design is found. Then start again where we left
// off and repeat the process until the entire container has been checked.
for(const_iterator iit(this->begin()); iit!=e; iit=jit)
{
jit = iit;
for(++jit; jit!=e; ++jit)
{
if(!predicate(*iit, *jit))
nclones += Design::TagAsClones(**jit, **iit);
else break;
}
}
// return the number of duplicates counted.
return nclones;
} // DesignMultiSet::test_within_list_for_clones
template <typename Pred>
typename DesignMultiSet<Pred>::size_type
DesignMultiSet<Pred>::mark(
const key_type key,
std::size_t mark
)
{
EDDY_FUNC_DEBUGSCOPE
// prepare to count the number marked.
size_type nmarked = 0;
// start by bounding the search region.
iterator_pair bounds(this->equal_range(key));
// unmark all those before the range
for(iterator it(this->begin()); it!=bounds.first; ++it)
(*it)->ModifyAttribute(mark, false);
// now mark all those in the range. Also track our number marked.
for(; bounds.first!=bounds.second; ++bounds.first)
{
(*bounds.first)->ModifyAttribute(mark, true);
++nmarked;
}
// now unmark all those after the range.
const const_iterator e(this->end());
for(; bounds.second!=e; ++bounds.second)
(*bounds.second)->ModifyAttribute(mark, false);
return nmarked;
}
template <typename Pred>
typename DesignMultiSet<Pred>::size_type
DesignMultiSet<Pred>::mark_not_exact(
const key_type key,
std::size_t mark
)
{
EDDY_FUNC_DEBUGSCOPE
// prepare to count the number marked.
size_type nmarked = 0;
// start by bounding the search region.
iterator_pair bounds(this->equal_range(key));
// unmark all those before the range
for(iterator it(this->begin()); it!=bounds.first; ++it)
(*it)->ModifyAttribute(mark, false);
// now mark all those in the range that are not the same exact
// object as key. Also track our number marked.
for(; bounds.first!=bounds.second; ++bounds.first)
{
bool mark = *bounds.first != key;
(*bounds.first)->ModifyAttribute(mark, mark);
if(mark) ++nmarked;
}
// now unmark all those after the range.
const const_iterator e(this->end());
for(; bounds.second!=e; ++bounds.second)
(*bounds.second)->ModifyAttribute(mark, false);
return nmarked;
}
template <typename Pred>
inline
void
DesignMultiSet<Pred>::pop_front(
)
{
EDDY_FUNC_DEBUGSCOPE
EDDY_ASSERT(!this->empty())
this->base_type::erase(this->base_type::begin());
} // DesignMultiSet::pop_front
template <typename Pred>
inline
void
DesignMultiSet<Pred>::pop_back(
)
{
EDDY_FUNC_DEBUGSCOPE
EDDY_ASSERT(!this->empty())
// cannot use --end() on some platforms.
iterator it(this->base_type::end());
this->base_type::erase(--it);
} // DesignMultiSet::pop_back
template <typename Pred>
inline
typename DesignMultiSet<Pred>::iterator
DesignMultiSet<Pred>::replace(
iterator where,
const key_type key
)
{
EDDY_FUNC_DEBUGSCOPE
EDDY_ASSERT(where != this->end())
this->base_type::erase(where);
return this->base_type::insert(key);
} // DesignMultiSet::replace
template <typename Pred>
typename DesignMultiSet<Pred>::size_type
DesignMultiSet<Pred>::erase_exacts(
const key_type key
)
{
EDDY_FUNC_DEBUGSCOPE
// store the initial size so we can easily return the number erased.
size_type isize = this->size();
// start by bounding the search region.
iterator_pair bounds(this->equal_range(key));
// now, look at every member in that range.
// when any exact matches to key are found, erase them.
for(; bounds.first!=bounds.second;)
{
if(*bounds.first == key) this->erase(bounds.first++);
else ++bounds.first;
}
// return the difference in size from the beginning.
return isize - this->size();
} // DesignMultiSet::erase_exacts
template <typename Pred>
void
DesignMultiSet<Pred>::flush(
)
{
EDDY_FUNC_DEBUGSCOPE
// iterate the set and destroy all designs.
const_iterator e(this->end());
for(const_iterator it(this->begin()); it!=e; ++it) delete (*it);
// now clear the set.
this->base_type::clear();
} // DesignMultiSet::flush
template <typename Pred>
typename DesignMultiSet<Pred>::iterator
DesignMultiSet<Pred>::find_exact(
const key_type key
)
{
EDDY_FUNC_DEBUGSCOPE
// start by bounding the search region.
iterator_pair bounds(this->base_type::equal_range(key));
// now, look at every member in that range until key is found.
for(; bounds.first!=bounds.second; ++bounds.first)
if(*(bounds.first) == key) return bounds.first;
// if we make it here, we didn't find it.
return this->base_type::end();
} // DesignMultiSet::find_exact
template <typename Pred>
typename DesignMultiSet<Pred>::iterator
DesignMultiSet<Pred>::find_not_exact(
const key_type key
)
{
EDDY_FUNC_DEBUGSCOPE
// start by bounding the search region.
iterator_pair bounds(this->base_type::equal_range(key));
// now, look at every member in that range until
// a non-match to key is found.
for(; bounds.first!=bounds.second; ++bounds.first)
if(*bounds.first != key) return bounds.first;
// if we make it here, we didn't find it.
return this->base_type::end();
} // DesignMultiSet::find_not_exact
/*
================================================================================
Inline Subclass Visible Methods
================================================================================
*/
/*
================================================================================
Inline Private Methods
================================================================================
*/
/*
================================================================================
Inline Structors
================================================================================
*/
template <typename Pred>
inline
DesignMultiSet<Pred>::DesignMultiSet(
const key_compare& pred
) :
base_type(pred)
{
} // DesignMultiSet::DesignMultiSet
template <typename Pred>
inline
DesignMultiSet<Pred>::DesignMultiSet(
const my_type& copy
) :
base_type(copy)
{
} // DesignMultiSet::DesignMultiSet
/*
================================================================================
End Namespace
================================================================================
*/
} // namespace Utilities
} // namespace JEGA
|
gitter-badger/ZeloEngine
|
Sandbox/Craft/cube.cpp
|
<reponame>gitter-badger/ZeloEngine
#include <math.h>
#include "cube.h"
#include "item.h"
#include "matrix.h"
#include "util.h"
void make_cube_faces(
float *data, float ao[6][4], float light[6][4],
int left, int right, int top, int bottom, int front, int back,
int wleft, int wright, int wtop, int wbottom, int wfront, int wback,
float x, float y, float z, float n) {
static const float positions[6][4][3] = {
{{-1, -1, -1}, {-1, -1, +1}, {-1, +1, -1}, {-1, +1, +1}},
{{+1, -1, -1}, {+1, -1, +1}, {+1, +1, -1}, {+1, +1, +1}},
{{-1, +1, -1}, {-1, +1, +1}, {+1, +1, -1}, {+1, +1, +1}},
{{-1, -1, -1}, {-1, -1, +1}, {+1, -1, -1}, {+1, -1, +1}},
{{-1, -1, -1}, {-1, +1, -1}, {+1, -1, -1}, {+1, +1, -1}},
{{-1, -1, +1}, {-1, +1, +1}, {+1, -1, +1}, {+1, +1, +1}}
};
static const float normals[6][3] = {
{-1, 0, 0},
{+1, 0, 0},
{0, +1, 0},
{0, -1, 0},
{0, 0, -1},
{0, 0, +1}
};
static const float uvs[6][4][2] = {
{{0, 0}, {1, 0}, {0, 1}, {1, 1}},
{{1, 0}, {0, 0}, {1, 1}, {0, 1}},
{{0, 1}, {0, 0}, {1, 1}, {1, 0}},
{{0, 0}, {0, 1}, {1, 0}, {1, 1}},
{{0, 0}, {0, 1}, {1, 0}, {1, 1}},
{{1, 0}, {1, 1}, {0, 0}, {0, 1}}
};
static const float indices[6][6] = {
{0, 3, 2, 0, 1, 3},
{0, 3, 1, 0, 2, 3},
{0, 3, 2, 0, 1, 3},
{0, 3, 1, 0, 2, 3},
{0, 3, 2, 0, 1, 3},
{0, 3, 1, 0, 2, 3}
};
static const float flipped[6][6] = {
{0, 1, 2, 1, 3, 2},
{0, 2, 1, 2, 3, 1},
{0, 1, 2, 1, 3, 2},
{0, 2, 1, 2, 3, 1},
{0, 1, 2, 1, 3, 2},
{0, 2, 1, 2, 3, 1}
};
float *d = data;
float s = 0.0625;
float a = 0 + 1 / 2048.0;
float b = s - 1 / 2048.0;
int faces[6] = {left, right, top, bottom, front, back};
int tiles[6] = {wleft, wright, wtop, wbottom, wfront, wback};
for (int i = 0; i < 6; i++) {
if (faces[i] == 0) {
continue;
}
float du = (tiles[i] % 16) * s;
float dv = (tiles[i] / 16) * s;
int flip = ao[i][0] + ao[i][3] > ao[i][1] + ao[i][2];
for (int v = 0; v < 6; v++) {
int j = flip ? flipped[i][v] : indices[i][v];
*(d++) = x + n * positions[i][j][0];
*(d++) = y + n * positions[i][j][1];
*(d++) = z + n * positions[i][j][2];
*(d++) = normals[i][0];
*(d++) = normals[i][1];
*(d++) = normals[i][2];
*(d++) = du + (uvs[i][j][0] ? b : a);
*(d++) = dv + (uvs[i][j][1] ? b : a);
*(d++) = ao[i][j];
*(d++) = light[i][j];
}
}
}
void make_cube(
float *data, float ao[6][4], float light[6][4],
int left, int right, int top, int bottom, int front, int back,
float x, float y, float z, float n, int w) {
int wleft = blocks[w][0];
int wright = blocks[w][1];
int wtop = blocks[w][2];
int wbottom = blocks[w][3];
int wfront = blocks[w][4];
int wback = blocks[w][5];
make_cube_faces(
data, ao, light,
left, right, top, bottom, front, back,
wleft, wright, wtop, wbottom, wfront, wback,
x, y, z, n);
}
void make_plant(
float *data, float ao, float light,
float px, float py, float pz, float n, int w, float rotation) {
static const float positions[4][4][3] = {
{{0, -1, -1}, {0, -1, +1}, {0, +1, -1}, {0, +1, +1}},
{{0, -1, -1}, {0, -1, +1}, {0, +1, -1}, {0, +1, +1}},
{{-1, -1, 0}, {-1, +1, 0}, {+1, -1, 0}, {+1, +1, 0}},
{{-1, -1, 0}, {-1, +1, 0}, {+1, -1, 0}, {+1, +1, 0}}
};
static const float normals[4][3] = {
{-1, 0, 0},
{+1, 0, 0},
{0, 0, -1},
{0, 0, +1}
};
static const float uvs[4][4][2] = {
{{0, 0}, {1, 0}, {0, 1}, {1, 1}},
{{1, 0}, {0, 0}, {1, 1}, {0, 1}},
{{0, 0}, {0, 1}, {1, 0}, {1, 1}},
{{1, 0}, {1, 1}, {0, 0}, {0, 1}}
};
static const float indices[4][6] = {
{0, 3, 2, 0, 1, 3},
{0, 3, 1, 0, 2, 3},
{0, 3, 2, 0, 1, 3},
{0, 3, 1, 0, 2, 3}
};
float *d = data;
float s = 0.0625;
float a = 0;
float b = s;
float du = (plants[w] % 16) * s;
float dv = (plants[w] / 16) * s;
for (int i = 0; i < 4; i++) {
for (int v = 0; v < 6; v++) {
int j = indices[i][v];
*(d++) = n * positions[i][j][0];
*(d++) = n * positions[i][j][1];
*(d++) = n * positions[i][j][2];
*(d++) = normals[i][0];
*(d++) = normals[i][1];
*(d++) = normals[i][2];
*(d++) = du + (uvs[i][j][0] ? b : a);
*(d++) = dv + (uvs[i][j][1] ? b : a);
*(d++) = ao;
*(d++) = light;
}
}
float ma[16];
float mb[16];
mat_identity(ma);
mat_rotate(mb, 0, 1, 0, RADIANS(rotation));
mat_multiply(ma, mb, ma);
mat_apply(data, ma, 24, 3, 10);
mat_translate(mb, px, py, pz);
mat_multiply(ma, mb, ma);
mat_apply(data, ma, 24, 0, 10);
}
void make_player(
float *data,
float x, float y, float z, float rx, float ry) {
float ao[6][4] = {0};
float light[6][4] = {
{0.8, 0.8, 0.8, 0.8},
{0.8, 0.8, 0.8, 0.8},
{0.8, 0.8, 0.8, 0.8},
{0.8, 0.8, 0.8, 0.8},
{0.8, 0.8, 0.8, 0.8},
{0.8, 0.8, 0.8, 0.8}
};
make_cube_faces(
data, ao, light,
1, 1, 1, 1, 1, 1,
226, 224, 241, 209, 225, 227,
0, 0, 0, 0.4);
float ma[16];
float mb[16];
mat_identity(ma);
mat_rotate(mb, 0, 1, 0, rx);
mat_multiply(ma, mb, ma);
mat_rotate(mb, cosf(rx), 0, sinf(rx), -ry);
mat_multiply(ma, mb, ma);
mat_apply(data, ma, 36, 3, 10);
mat_translate(mb, x, y, z);
mat_multiply(ma, mb, ma);
mat_apply(data, ma, 36, 0, 10);
}
void make_cube_wireframe(float *data, float x, float y, float z, float n) {
static const float positions[8][3] = {
{-1, -1, -1},
{-1, -1, +1},
{-1, +1, -1},
{-1, +1, +1},
{+1, -1, -1},
{+1, -1, +1},
{+1, +1, -1},
{+1, +1, +1}
};
static const int indices[24] = {
0, 1, 0, 2, 0, 4, 1, 3,
1, 5, 2, 3, 2, 6, 3, 7,
4, 5, 4, 6, 5, 7, 6, 7
};
float *d = data;
for (int i = 0; i < 24; i++) {
int j = indices[i];
*(d++) = x + n * positions[j][0];
*(d++) = y + n * positions[j][1];
*(d++) = z + n * positions[j][2];
}
}
void make_character(
float *data,
float x, float y, float n, float m, char c) {
float *d = data;
float s = 0.0625;
float a = s;
float b = s * 2;
int w = c - 32;
float du = (w % 16) * a;
float dv = 1 - (w / 16) * b - b;
*(d++) = x - n;
*(d++) = y - m;
*(d++) = du + 0;
*(d++) = dv;
*(d++) = x + n;
*(d++) = y - m;
*(d++) = du + a;
*(d++) = dv;
*(d++) = x + n;
*(d++) = y + m;
*(d++) = du + a;
*(d++) = dv + b;
*(d++) = x - n;
*(d++) = y - m;
*(d++) = du + 0;
*(d++) = dv;
*(d++) = x + n;
*(d++) = y + m;
*(d++) = du + a;
*(d++) = dv + b;
*(d++) = x - n;
*(d++) = y + m;
*(d++) = du + 0;
*(d++) = dv + b;
}
void make_character_3d(
float *data, float x, float y, float z, float n, int face, char c) {
static const float positions[8][6][3] = {
{{0, -2, -1}, {0, +2, +1}, {0, +2, -1},
{0, -2, -1}, {0, -2, +1}, {0, +2, +1}},
{{0, -2, -1}, {0, +2, +1}, {0, -2, +1},
{0, -2, -1}, {0, +2, -1}, {0, +2, +1}},
{{-1, -2, 0}, {+1, +2, 0}, {+1, -2, 0},
{-1, -2, 0}, {-1, +2, 0}, {+1, +2, 0}},
{{-1, -2, 0}, {+1, -2, 0}, {+1, +2, 0},
{-1, -2, 0}, {+1, +2, 0}, {-1, +2, 0}},
{{-1, 0, +2}, {+1, 0, +2}, {+1, 0, -2},
{-1, 0, +2}, {+1, 0, -2}, {-1, 0, -2}},
{{-2, 0, +1}, {+2, 0, -1}, {-2, 0, -1},
{-2, 0, +1}, {+2, 0, +1}, {+2, 0, -1}},
{{+1, 0, +2}, {-1, 0, -2}, {-1, 0, +2},
{+1, 0, +2}, {+1, 0, -2}, {-1, 0, -2}},
{{+2, 0, -1}, {-2, 0, +1}, {+2, 0, +1},
{+2, 0, -1}, {-2, 0, -1}, {-2, 0, +1}}
};
static const float uvs[8][6][2] = {
{{0, 0}, {1, 1}, {0, 1}, {0, 0}, {1, 0}, {1, 1}},
{{1, 0}, {0, 1}, {0, 0}, {1, 0}, {1, 1}, {0, 1}},
{{1, 0}, {0, 1}, {0, 0}, {1, 0}, {1, 1}, {0, 1}},
{{0, 0}, {1, 0}, {1, 1}, {0, 0}, {1, 1}, {0, 1}},
{{0, 0}, {1, 0}, {1, 1}, {0, 0}, {1, 1}, {0, 1}},
{{0, 1}, {1, 0}, {1, 1}, {0, 1}, {0, 0}, {1, 0}},
{{0, 1}, {1, 0}, {1, 1}, {0, 1}, {0, 0}, {1, 0}},
{{0, 1}, {1, 0}, {1, 1}, {0, 1}, {0, 0}, {1, 0}}
};
static const float offsets[8][3] = {
{-1, 0, 0},
{+1, 0, 0},
{0, 0, -1},
{0, 0, +1},
{0, +1, 0},
{0, +1, 0},
{0, +1, 0},
{0, +1, 0},
};
float *d = data;
float s = 0.0625;
float pu = s / 5;
float pv = s / 2.5;
float u1 = pu;
float v1 = pv;
float u2 = s - pu;
float v2 = s * 2 - pv;
float p = 0.5;
int w = c - 32;
float du = (w % 16) * s;
float dv = 1 - (w / 16 + 1) * s * 2;
x += p * offsets[face][0];
y += p * offsets[face][1];
z += p * offsets[face][2];
for (int i = 0; i < 6; i++) {
*(d++) = x + n * positions[face][i][0];
*(d++) = y + n * positions[face][i][1];
*(d++) = z + n * positions[face][i][2];
*(d++) = du + (uvs[face][i][0] ? u2 : u1);
*(d++) = dv + (uvs[face][i][1] ? v2 : v1);
}
}
int _make_sphere(
float *data, float r, int detail,
float *a, float *b, float *c,
float *ta, float *tb, float *tc) {
if (detail == 0) {
float *d = data;
*(d++) = a[0] * r;
*(d++) = a[1] * r;
*(d++) = a[2] * r;
*(d++) = a[0];
*(d++) = a[1];
*(d++) = a[2];
*(d++) = ta[0];
*(d++) = ta[1];
*(d++) = b[0] * r;
*(d++) = b[1] * r;
*(d++) = b[2] * r;
*(d++) = b[0];
*(d++) = b[1];
*(d++) = b[2];
*(d++) = tb[0];
*(d++) = tb[1];
*(d++) = c[0] * r;
*(d++) = c[1] * r;
*(d++) = c[2] * r;
*(d++) = c[0];
*(d++) = c[1];
*(d++) = c[2];
*(d++) = tc[0];
*(d++) = tc[1];
return 1;
} else {
float ab[3], ac[3], bc[3];
for (int i = 0; i < 3; i++) {
ab[i] = (a[i] + b[i]) / 2;
ac[i] = (a[i] + c[i]) / 2;
bc[i] = (b[i] + c[i]) / 2;
}
normalize(ab + 0, ab + 1, ab + 2);
normalize(ac + 0, ac + 1, ac + 2);
normalize(bc + 0, bc + 1, bc + 2);
float tab[2], tac[2], tbc[2];
tab[0] = 0;
tab[1] = 1 - acosf(ab[1]) / PI;
tac[0] = 0;
tac[1] = 1 - acosf(ac[1]) / PI;
tbc[0] = 0;
tbc[1] = 1 - acosf(bc[1]) / PI;
int total = 0;
int n;
n = _make_sphere(data, r, detail - 1, a, ab, ac, ta, tab, tac);
total += n;
data += n * 24;
n = _make_sphere(data, r, detail - 1, b, bc, ab, tb, tbc, tab);
total += n;
data += n * 24;
n = _make_sphere(data, r, detail - 1, c, ac, bc, tc, tac, tbc);
total += n;
data += n * 24;
n = _make_sphere(data, r, detail - 1, ab, bc, ac, tab, tbc, tac);
total += n;
data += n * 24;
return total;
}
}
void make_sphere(float *data, float r, int detail) {
// detail, triangles, floats
// 0, 8, 192
// 1, 32, 768
// 2, 128, 3072
// 3, 512, 12288
// 4, 2048, 49152
// 5, 8192, 196608
// 6, 32768, 786432
// 7, 131072, 3145728
static int indices[8][3] = {
{4, 3, 0},
{1, 4, 0},
{3, 4, 5},
{4, 1, 5},
{0, 3, 2},
{0, 2, 1},
{5, 2, 3},
{5, 1, 2}
};
static float positions[6][3] = {
{0, 0, -1},
{1, 0, 0},
{0, -1, 0},
{-1, 0, 0},
{0, 1, 0},
{0, 0, 1}
};
static float uvs[6][3] = {
{0, 0.5},
{0, 0.5},
{0, 0},
{0, 0.5},
{0, 1},
{0, 0.5}
};
int total = 0;
for (int i = 0; i < 8; i++) {
int n = _make_sphere(
data, r, detail,
positions[indices[i][0]],
positions[indices[i][1]],
positions[indices[i][2]],
uvs[indices[i][0]],
uvs[indices[i][1]],
uvs[indices[i][2]]);
total += n;
data += n * 24;
}
}
|
Yoon-SeokJin/PlatformerGame
|
PlatformerGame/BulletBlock.cpp
|
#include "BulletBlock.hpp"
BulletBlock::BulletBlock(const Vec2<double>& pos, int rotate) : Block(pos) {
bullet_rotate = rotate;
sprite_info.sprite_index = SpriteIndex::bullet_block;
sprite_info.image_angle = rotate;
if (rotate == 0) dpos = { 10, 0 };
else if (rotate == 1) dpos = { -1, 10 };
else if (rotate == 2) dpos = { -10, -1 };
else if (rotate == 3) dpos = { 0, -10 };
}
void BulletBlock::step() {
if (timer++ >= 120) {
room->add_instance(new Bullet(pos + dpos, bullet_rotate));
timer = 0;
}
}
|
Bhanditz/Winds
|
api/src/routes/share.js
|
import Share from '../controllers/share';
module.exports = api => {
api.route('/shares').get(Share.list);
api.route('/shares/:shareId').get(Share.get);
api.route('/shares').post(Share.post);
api.route('/shares/:shareId').put(Share.put);
api.route('/shares/:shareId').delete(Share.delete);
};
|
FrankKwok/Oreo
|
android/net/NetworkCapabilities.java
|
<gh_stars>1-10
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.net;
import android.os.Parcel;
import android.os.Parcelable;
import com.android.internal.annotations.VisibleForTesting;
import com.android.internal.util.BitUtils;
import java.util.Objects;
/**
* This class represents the capabilities of a network. This is used both to specify
* needs to {@link ConnectivityManager} and when inspecting a network.
*
* Note that this replaces the old {@link ConnectivityManager#TYPE_MOBILE} method
* of network selection. Rather than indicate a need for Wi-Fi because an application
* needs high bandwidth and risk obsolescence when a new, fast network appears (like LTE),
* the application should specify it needs high bandwidth. Similarly if an application
* needs an unmetered network for a bulk transfer it can specify that rather than assuming
* all cellular based connections are metered and all Wi-Fi based connections are not.
*/
public final class NetworkCapabilities implements Parcelable {
private static final String TAG = "NetworkCapabilities";
/**
* @hide
*/
public NetworkCapabilities() {
clearAll();
mNetworkCapabilities = DEFAULT_CAPABILITIES;
}
public NetworkCapabilities(NetworkCapabilities nc) {
if (nc != null) {
mNetworkCapabilities = nc.mNetworkCapabilities;
mTransportTypes = nc.mTransportTypes;
mLinkUpBandwidthKbps = nc.mLinkUpBandwidthKbps;
mLinkDownBandwidthKbps = nc.mLinkDownBandwidthKbps;
mNetworkSpecifier = nc.mNetworkSpecifier;
mSignalStrength = nc.mSignalStrength;
}
}
/**
* Completely clears the contents of this object, removing even the capabilities that are set
* by default when the object is constructed.
* @hide
*/
public void clearAll() {
mNetworkCapabilities = mTransportTypes = 0;
mLinkUpBandwidthKbps = mLinkDownBandwidthKbps = 0;
mNetworkSpecifier = null;
mSignalStrength = SIGNAL_STRENGTH_UNSPECIFIED;
}
/**
* Represents the network's capabilities. If any are specified they will be satisfied
* by any Network that matches all of them.
*/
private long mNetworkCapabilities;
/**
* Indicates this is a network that has the ability to reach the
* carrier's MMSC for sending and receiving MMS messages.
*/
public static final int NET_CAPABILITY_MMS = 0;
/**
* Indicates this is a network that has the ability to reach the carrier's
* SUPL server, used to retrieve GPS information.
*/
public static final int NET_CAPABILITY_SUPL = 1;
/**
* Indicates this is a network that has the ability to reach the carrier's
* DUN or tethering gateway.
*/
public static final int NET_CAPABILITY_DUN = 2;
/**
* Indicates this is a network that has the ability to reach the carrier's
* FOTA portal, used for over the air updates.
*/
public static final int NET_CAPABILITY_FOTA = 3;
/**
* Indicates this is a network that has the ability to reach the carrier's
* IMS servers, used for network registration and signaling.
*/
public static final int NET_CAPABILITY_IMS = 4;
/**
* Indicates this is a network that has the ability to reach the carrier's
* CBS servers, used for carrier specific services.
*/
public static final int NET_CAPABILITY_CBS = 5;
/**
* Indicates this is a network that has the ability to reach a Wi-Fi direct
* peer.
*/
public static final int NET_CAPABILITY_WIFI_P2P = 6;
/**
* Indicates this is a network that has the ability to reach a carrier's
* Initial Attach servers.
*/
public static final int NET_CAPABILITY_IA = 7;
/**
* Indicates this is a network that has the ability to reach a carrier's
* RCS servers, used for Rich Communication Services.
*/
public static final int NET_CAPABILITY_RCS = 8;
/**
* Indicates this is a network that has the ability to reach a carrier's
* XCAP servers, used for configuration and control.
*/
public static final int NET_CAPABILITY_XCAP = 9;
/**
* Indicates this is a network that has the ability to reach a carrier's
* Emergency IMS servers or other services, used for network signaling
* during emergency calls.
*/
public static final int NET_CAPABILITY_EIMS = 10;
/**
* Indicates that this network is unmetered.
*/
public static final int NET_CAPABILITY_NOT_METERED = 11;
/**
* Indicates that this network should be able to reach the internet.
*/
public static final int NET_CAPABILITY_INTERNET = 12;
/**
* Indicates that this network is available for general use. If this is not set
* applications should not attempt to communicate on this network. Note that this
* is simply informative and not enforcement - enforcement is handled via other means.
* Set by default.
*/
public static final int NET_CAPABILITY_NOT_RESTRICTED = 13;
/**
* Indicates that the user has indicated implicit trust of this network. This
* generally means it's a sim-selected carrier, a plugged in ethernet, a paired
* BT device or a wifi the user asked to connect to. Untrusted networks
* are probably limited to unknown wifi AP. Set by default.
*/
public static final int NET_CAPABILITY_TRUSTED = 14;
/**
* Indicates that this network is not a VPN. This capability is set by default and should be
* explicitly cleared for VPN networks.
*/
public static final int NET_CAPABILITY_NOT_VPN = 15;
/**
* Indicates that connectivity on this network was successfully validated. For example, for a
* network with NET_CAPABILITY_INTERNET, it means that Internet connectivity was successfully
* detected.
*/
public static final int NET_CAPABILITY_VALIDATED = 16;
/**
* Indicates that this network was found to have a captive portal in place last time it was
* probed.
*/
public static final int NET_CAPABILITY_CAPTIVE_PORTAL = 17;
/**
* Indicates that this network is available for use by apps, and not a network that is being
* kept up in the background to facilitate fast network switching.
* @hide
*/
public static final int NET_CAPABILITY_FOREGROUND = 18;
private static final int MIN_NET_CAPABILITY = NET_CAPABILITY_MMS;
private static final int MAX_NET_CAPABILITY = NET_CAPABILITY_FOREGROUND;
/**
* Network capabilities that are expected to be mutable, i.e., can change while a particular
* network is connected.
*/
private static final long MUTABLE_CAPABILITIES =
// TRUSTED can change when user explicitly connects to an untrusted network in Settings.
// http://b/18206275
(1 << NET_CAPABILITY_TRUSTED) |
(1 << NET_CAPABILITY_VALIDATED) |
(1 << NET_CAPABILITY_CAPTIVE_PORTAL) |
(1 << NET_CAPABILITY_FOREGROUND);
/**
* Network capabilities that are not allowed in NetworkRequests. This exists because the
* NetworkFactory / NetworkAgent model does not deal well with the situation where a
* capability's presence cannot be known in advance. If such a capability is requested, then we
* can get into a cycle where the NetworkFactory endlessly churns out NetworkAgents that then
* get immediately torn down because they do not have the requested capability.
*/
private static final long NON_REQUESTABLE_CAPABILITIES =
MUTABLE_CAPABILITIES & ~(1 << NET_CAPABILITY_TRUSTED);
/**
* Capabilities that are set by default when the object is constructed.
*/
private static final long DEFAULT_CAPABILITIES =
(1 << NET_CAPABILITY_NOT_RESTRICTED) |
(1 << NET_CAPABILITY_TRUSTED) |
(1 << NET_CAPABILITY_NOT_VPN);
/**
* Capabilities that suggest that a network is restricted.
* {@see #maybeMarkCapabilitiesRestricted}.
*/
@VisibleForTesting
/* package */ static final long RESTRICTED_CAPABILITIES =
(1 << NET_CAPABILITY_CBS) |
(1 << NET_CAPABILITY_DUN) |
(1 << NET_CAPABILITY_EIMS) |
(1 << NET_CAPABILITY_FOTA) |
(1 << NET_CAPABILITY_IA) |
(1 << NET_CAPABILITY_IMS) |
(1 << NET_CAPABILITY_RCS) |
(1 << NET_CAPABILITY_XCAP);
/**
* Capabilities that suggest that a network is unrestricted.
* {@see #maybeMarkCapabilitiesRestricted}.
*/
@VisibleForTesting
/* package */ static final long UNRESTRICTED_CAPABILITIES =
(1 << NET_CAPABILITY_INTERNET) |
(1 << NET_CAPABILITY_MMS) |
(1 << NET_CAPABILITY_SUPL) |
(1 << NET_CAPABILITY_WIFI_P2P);
/**
* Adds the given capability to this {@code NetworkCapability} instance.
* Multiple capabilities may be applied sequentially. Note that when searching
* for a network to satisfy a request, all capabilities requested must be satisfied.
*
* @param capability the {@code NetworkCapabilities.NET_CAPABILITY_*} to be added.
* @return This NetworkCapabilities instance, to facilitate chaining.
* @hide
*/
public NetworkCapabilities addCapability(int capability) {
if (capability < MIN_NET_CAPABILITY || capability > MAX_NET_CAPABILITY) {
throw new IllegalArgumentException("NetworkCapability out of range");
}
mNetworkCapabilities |= 1 << capability;
return this;
}
/**
* Removes (if found) the given capability from this {@code NetworkCapability} instance.
*
* @param capability the {@code NetworkCapabilities.NET_CAPABILTIY_*} to be removed.
* @return This NetworkCapabilities instance, to facilitate chaining.
* @hide
*/
public NetworkCapabilities removeCapability(int capability) {
if (capability < MIN_NET_CAPABILITY || capability > MAX_NET_CAPABILITY) {
throw new IllegalArgumentException("NetworkCapability out of range");
}
mNetworkCapabilities &= ~(1 << capability);
return this;
}
/**
* Gets all the capabilities set on this {@code NetworkCapability} instance.
*
* @return an array of {@code NetworkCapabilities.NET_CAPABILITY_*} values
* for this instance.
* @hide
*/
public int[] getCapabilities() {
return BitUtils.unpackBits(mNetworkCapabilities);
}
/**
* Tests for the presence of a capabilitity on this instance.
*
* @param capability the {@code NetworkCapabilities.NET_CAPABILITY_*} to be tested for.
* @return {@code true} if set on this instance.
*/
public boolean hasCapability(int capability) {
if (capability < MIN_NET_CAPABILITY || capability > MAX_NET_CAPABILITY) {
return false;
}
return ((mNetworkCapabilities & (1 << capability)) != 0);
}
private void combineNetCapabilities(NetworkCapabilities nc) {
this.mNetworkCapabilities |= nc.mNetworkCapabilities;
}
/**
* Convenience function that returns a human-readable description of the first mutable
* capability we find. Used to present an error message to apps that request mutable
* capabilities.
*
* @hide
*/
public String describeFirstNonRequestableCapability() {
if (hasCapability(NET_CAPABILITY_VALIDATED)) return "NET_CAPABILITY_VALIDATED";
if (hasCapability(NET_CAPABILITY_CAPTIVE_PORTAL)) return "NET_CAPABILITY_CAPTIVE_PORTAL";
if (hasCapability(NET_CAPABILITY_FOREGROUND)) return "NET_CAPABILITY_FOREGROUND";
// This cannot happen unless the preceding checks are incomplete.
if ((mNetworkCapabilities & NON_REQUESTABLE_CAPABILITIES) != 0) {
return "unknown non-requestable capabilities " + Long.toHexString(mNetworkCapabilities);
}
if (mLinkUpBandwidthKbps != 0 || mLinkDownBandwidthKbps != 0) return "link bandwidth";
if (hasSignalStrength()) return "signalStrength";
return null;
}
private boolean satisfiedByNetCapabilities(NetworkCapabilities nc, boolean onlyImmutable) {
long networkCapabilities = this.mNetworkCapabilities;
if (onlyImmutable) {
networkCapabilities = networkCapabilities & ~MUTABLE_CAPABILITIES;
}
return ((nc.mNetworkCapabilities & networkCapabilities) == networkCapabilities);
}
/** @hide */
public boolean equalsNetCapabilities(NetworkCapabilities nc) {
return (nc.mNetworkCapabilities == this.mNetworkCapabilities);
}
private boolean equalsNetCapabilitiesImmutable(NetworkCapabilities that) {
return ((this.mNetworkCapabilities & ~MUTABLE_CAPABILITIES) ==
(that.mNetworkCapabilities & ~MUTABLE_CAPABILITIES));
}
private boolean equalsNetCapabilitiesRequestable(NetworkCapabilities that) {
return ((this.mNetworkCapabilities & ~NON_REQUESTABLE_CAPABILITIES) ==
(that.mNetworkCapabilities & ~NON_REQUESTABLE_CAPABILITIES));
}
/**
* Removes the NET_CAPABILITY_NOT_RESTRICTED capability if all the capabilities it provides are
* typically provided by restricted networks.
*
* TODO: consider:
* - Renaming it to guessRestrictedCapability and make it set the
* restricted capability bit in addition to clearing it.
* @hide
*/
public void maybeMarkCapabilitiesRestricted() {
// Verify there aren't any unrestricted capabilities. If there are we say
// the whole thing is unrestricted.
final boolean hasUnrestrictedCapabilities =
((mNetworkCapabilities & UNRESTRICTED_CAPABILITIES) != 0);
// Must have at least some restricted capabilities.
final boolean hasRestrictedCapabilities =
((mNetworkCapabilities & RESTRICTED_CAPABILITIES) != 0);
if (hasRestrictedCapabilities && !hasUnrestrictedCapabilities) {
removeCapability(NET_CAPABILITY_NOT_RESTRICTED);
}
}
/**
* Representing the transport type. Apps should generally not care about transport. A
* request for a fast internet connection could be satisfied by a number of different
* transports. If any are specified here it will be satisfied a Network that matches
* any of them. If a caller doesn't care about the transport it should not specify any.
*/
private long mTransportTypes;
/**
* Indicates this network uses a Cellular transport.
*/
public static final int TRANSPORT_CELLULAR = 0;
/**
* Indicates this network uses a Wi-Fi transport.
*/
public static final int TRANSPORT_WIFI = 1;
/**
* Indicates this network uses a Bluetooth transport.
*/
public static final int TRANSPORT_BLUETOOTH = 2;
/**
* Indicates this network uses an Ethernet transport.
*/
public static final int TRANSPORT_ETHERNET = 3;
/**
* Indicates this network uses a VPN transport.
*/
public static final int TRANSPORT_VPN = 4;
/**
* Indicates this network uses a Wi-Fi Aware transport.
*/
public static final int TRANSPORT_WIFI_AWARE = 5;
/** @hide */
public static final int MIN_TRANSPORT = TRANSPORT_CELLULAR;
/** @hide */
public static final int MAX_TRANSPORT = TRANSPORT_WIFI_AWARE;
private static final String[] TRANSPORT_NAMES = {
"CELLULAR",
"WIFI",
"BLUETOOTH",
"ETHERNET",
"VPN",
"WIFI_AWARE"
};
/**
* Adds the given transport type to this {@code NetworkCapability} instance.
* Multiple transports may be applied sequentially. Note that when searching
* for a network to satisfy a request, any listed in the request will satisfy the request.
* For example {@code TRANSPORT_WIFI} and {@code TRANSPORT_ETHERNET} added to a
* {@code NetworkCapabilities} would cause either a Wi-Fi network or an Ethernet network
* to be selected. This is logically different than
* {@code NetworkCapabilities.NET_CAPABILITY_*} listed above.
*
* @param transportType the {@code NetworkCapabilities.TRANSPORT_*} to be added.
* @return This NetworkCapabilities instance, to facilitate chaining.
* @hide
*/
public NetworkCapabilities addTransportType(int transportType) {
if (transportType < MIN_TRANSPORT || transportType > MAX_TRANSPORT) {
throw new IllegalArgumentException("TransportType out of range");
}
mTransportTypes |= 1 << transportType;
setNetworkSpecifier(mNetworkSpecifier); // used for exception checking
return this;
}
/**
* Removes (if found) the given transport from this {@code NetworkCapability} instance.
*
* @param transportType the {@code NetworkCapabilities.TRANSPORT_*} to be removed.
* @return This NetworkCapabilities instance, to facilitate chaining.
* @hide
*/
public NetworkCapabilities removeTransportType(int transportType) {
if (transportType < MIN_TRANSPORT || transportType > MAX_TRANSPORT) {
throw new IllegalArgumentException("TransportType out of range");
}
mTransportTypes &= ~(1 << transportType);
setNetworkSpecifier(mNetworkSpecifier); // used for exception checking
return this;
}
/**
* Gets all the transports set on this {@code NetworkCapability} instance.
*
* @return an array of {@code NetworkCapabilities.TRANSPORT_*} values
* for this instance.
* @hide
*/
public int[] getTransportTypes() {
return BitUtils.unpackBits(mTransportTypes);
}
/**
* Tests for the presence of a transport on this instance.
*
* @param transportType the {@code NetworkCapabilities.TRANSPORT_*} to be tested for.
* @return {@code true} if set on this instance.
*/
public boolean hasTransport(int transportType) {
if (transportType < MIN_TRANSPORT || transportType > MAX_TRANSPORT) {
return false;
}
return ((mTransportTypes & (1 << transportType)) != 0);
}
private void combineTransportTypes(NetworkCapabilities nc) {
this.mTransportTypes |= nc.mTransportTypes;
}
private boolean satisfiedByTransportTypes(NetworkCapabilities nc) {
return ((this.mTransportTypes == 0) ||
((this.mTransportTypes & nc.mTransportTypes) != 0));
}
/** @hide */
public boolean equalsTransportTypes(NetworkCapabilities nc) {
return (nc.mTransportTypes == this.mTransportTypes);
}
/**
* Passive link bandwidth. This is a rough guide of the expected peak bandwidth
* for the first hop on the given transport. It is not measured, but may take into account
* link parameters (Radio technology, allocated channels, etc).
*/
private int mLinkUpBandwidthKbps;
private int mLinkDownBandwidthKbps;
/**
* Sets the upstream bandwidth for this network in Kbps. This always only refers to
* the estimated first hop transport bandwidth.
* <p>
* Note that when used to request a network, this specifies the minimum acceptable.
* When received as the state of an existing network this specifies the typical
* first hop bandwidth expected. This is never measured, but rather is inferred
* from technology type and other link parameters. It could be used to differentiate
* between very slow 1xRTT cellular links and other faster networks or even between
* 802.11b vs 802.11AC wifi technologies. It should not be used to differentiate between
* fast backhauls and slow backhauls.
*
* @param upKbps the estimated first hop upstream (device to network) bandwidth.
* @hide
*/
public void setLinkUpstreamBandwidthKbps(int upKbps) {
mLinkUpBandwidthKbps = upKbps;
}
/**
* Retrieves the upstream bandwidth for this network in Kbps. This always only refers to
* the estimated first hop transport bandwidth.
*
* @return The estimated first hop upstream (device to network) bandwidth.
*/
public int getLinkUpstreamBandwidthKbps() {
return mLinkUpBandwidthKbps;
}
/**
* Sets the downstream bandwidth for this network in Kbps. This always only refers to
* the estimated first hop transport bandwidth.
* <p>
* Note that when used to request a network, this specifies the minimum acceptable.
* When received as the state of an existing network this specifies the typical
* first hop bandwidth expected. This is never measured, but rather is inferred
* from technology type and other link parameters. It could be used to differentiate
* between very slow 1xRTT cellular links and other faster networks or even between
* 802.11b vs 802.11AC wifi technologies. It should not be used to differentiate between
* fast backhauls and slow backhauls.
*
* @param downKbps the estimated first hop downstream (network to device) bandwidth.
* @hide
*/
public void setLinkDownstreamBandwidthKbps(int downKbps) {
mLinkDownBandwidthKbps = downKbps;
}
/**
* Retrieves the downstream bandwidth for this network in Kbps. This always only refers to
* the estimated first hop transport bandwidth.
*
* @return The estimated first hop downstream (network to device) bandwidth.
*/
public int getLinkDownstreamBandwidthKbps() {
return mLinkDownBandwidthKbps;
}
private void combineLinkBandwidths(NetworkCapabilities nc) {
this.mLinkUpBandwidthKbps =
Math.max(this.mLinkUpBandwidthKbps, nc.mLinkUpBandwidthKbps);
this.mLinkDownBandwidthKbps =
Math.max(this.mLinkDownBandwidthKbps, nc.mLinkDownBandwidthKbps);
}
private boolean satisfiedByLinkBandwidths(NetworkCapabilities nc) {
return !(this.mLinkUpBandwidthKbps > nc.mLinkUpBandwidthKbps ||
this.mLinkDownBandwidthKbps > nc.mLinkDownBandwidthKbps);
}
private boolean equalsLinkBandwidths(NetworkCapabilities nc) {
return (this.mLinkUpBandwidthKbps == nc.mLinkUpBandwidthKbps &&
this.mLinkDownBandwidthKbps == nc.mLinkDownBandwidthKbps);
}
private NetworkSpecifier mNetworkSpecifier = null;
/**
* Sets the optional bearer specific network specifier.
* This has no meaning if a single transport is also not specified, so calling
* this without a single transport set will generate an exception, as will
* subsequently adding or removing transports after this is set.
* </p>
*
* @param networkSpecifier A concrete, parcelable framework class that extends
* NetworkSpecifier.
* @return This NetworkCapabilities instance, to facilitate chaining.
* @hide
*/
public NetworkCapabilities setNetworkSpecifier(NetworkSpecifier networkSpecifier) {
if (networkSpecifier != null && Long.bitCount(mTransportTypes) != 1) {
throw new IllegalStateException("Must have a single transport specified to use " +
"setNetworkSpecifier");
}
mNetworkSpecifier = networkSpecifier;
return this;
}
/**
* Gets the optional bearer specific network specifier.
*
* @return The optional {@link NetworkSpecifier} specifying the bearer specific network
* specifier. See {@link #setNetworkSpecifier}.
* @hide
*/
public NetworkSpecifier getNetworkSpecifier() {
return mNetworkSpecifier;
}
private void combineSpecifiers(NetworkCapabilities nc) {
if (mNetworkSpecifier != null && !mNetworkSpecifier.equals(nc.mNetworkSpecifier)) {
throw new IllegalStateException("Can't combine two networkSpecifiers");
}
setNetworkSpecifier(nc.mNetworkSpecifier);
}
private boolean satisfiedBySpecifier(NetworkCapabilities nc) {
return mNetworkSpecifier == null || mNetworkSpecifier.satisfiedBy(nc.mNetworkSpecifier)
|| nc.mNetworkSpecifier instanceof MatchAllNetworkSpecifier;
}
private boolean equalsSpecifier(NetworkCapabilities nc) {
return Objects.equals(mNetworkSpecifier, nc.mNetworkSpecifier);
}
/**
* Magic value that indicates no signal strength provided. A request specifying this value is
* always satisfied.
*
* @hide
*/
public static final int SIGNAL_STRENGTH_UNSPECIFIED = Integer.MIN_VALUE;
/**
* Signal strength. This is a signed integer, and higher values indicate better signal.
* The exact units are bearer-dependent. For example, Wi-Fi uses RSSI.
*/
private int mSignalStrength;
/**
* Sets the signal strength. This is a signed integer, with higher values indicating a stronger
* signal. The exact units are bearer-dependent. For example, Wi-Fi uses the same RSSI units
* reported by WifiManager.
* <p>
* Note that when used to register a network callback, this specifies the minimum acceptable
* signal strength. When received as the state of an existing network it specifies the current
* value. A value of code SIGNAL_STRENGTH_UNSPECIFIED} means no value when received and has no
* effect when requesting a callback.
*
* @param signalStrength the bearer-specific signal strength.
* @hide
*/
public void setSignalStrength(int signalStrength) {
mSignalStrength = signalStrength;
}
/**
* Returns {@code true} if this object specifies a signal strength.
*
* @hide
*/
public boolean hasSignalStrength() {
return mSignalStrength > SIGNAL_STRENGTH_UNSPECIFIED;
}
/**
* Retrieves the signal strength.
*
* @return The bearer-specific signal strength.
* @hide
*/
public int getSignalStrength() {
return mSignalStrength;
}
private void combineSignalStrength(NetworkCapabilities nc) {
this.mSignalStrength = Math.max(this.mSignalStrength, nc.mSignalStrength);
}
private boolean satisfiedBySignalStrength(NetworkCapabilities nc) {
return this.mSignalStrength <= nc.mSignalStrength;
}
private boolean equalsSignalStrength(NetworkCapabilities nc) {
return this.mSignalStrength == nc.mSignalStrength;
}
/**
* Combine a set of Capabilities to this one. Useful for coming up with the complete set
* @hide
*/
public void combineCapabilities(NetworkCapabilities nc) {
combineNetCapabilities(nc);
combineTransportTypes(nc);
combineLinkBandwidths(nc);
combineSpecifiers(nc);
combineSignalStrength(nc);
}
/**
* Check if our requirements are satisfied by the given {@code NetworkCapabilities}.
*
* @param nc the {@code NetworkCapabilities} that may or may not satisfy our requirements.
* @param onlyImmutable if {@code true}, do not consider mutable requirements such as link
* bandwidth, signal strength, or validation / captive portal status.
*
* @hide
*/
private boolean satisfiedByNetworkCapabilities(NetworkCapabilities nc, boolean onlyImmutable) {
return (nc != null &&
satisfiedByNetCapabilities(nc, onlyImmutable) &&
satisfiedByTransportTypes(nc) &&
(onlyImmutable || satisfiedByLinkBandwidths(nc)) &&
satisfiedBySpecifier(nc) &&
(onlyImmutable || satisfiedBySignalStrength(nc)));
}
/**
* Check if our requirements are satisfied by the given {@code NetworkCapabilities}.
*
* @param nc the {@code NetworkCapabilities} that may or may not satisfy our requirements.
*
* @hide
*/
public boolean satisfiedByNetworkCapabilities(NetworkCapabilities nc) {
return satisfiedByNetworkCapabilities(nc, false);
}
/**
* Check if our immutable requirements are satisfied by the given {@code NetworkCapabilities}.
*
* @param nc the {@code NetworkCapabilities} that may or may not satisfy our requirements.
*
* @hide
*/
public boolean satisfiedByImmutableNetworkCapabilities(NetworkCapabilities nc) {
return satisfiedByNetworkCapabilities(nc, true);
}
/**
* Checks that our immutable capabilities are the same as those of the given
* {@code NetworkCapabilities}.
*
* @hide
*/
public boolean equalImmutableCapabilities(NetworkCapabilities nc) {
if (nc == null) return false;
return (equalsNetCapabilitiesImmutable(nc) &&
equalsTransportTypes(nc) &&
equalsSpecifier(nc));
}
/**
* Checks that our requestable capabilities are the same as those of the given
* {@code NetworkCapabilities}.
*
* @hide
*/
public boolean equalRequestableCapabilities(NetworkCapabilities nc) {
if (nc == null) return false;
return (equalsNetCapabilitiesRequestable(nc) &&
equalsTransportTypes(nc) &&
equalsSpecifier(nc));
}
@Override
public boolean equals(Object obj) {
if (obj == null || (obj instanceof NetworkCapabilities == false)) return false;
NetworkCapabilities that = (NetworkCapabilities)obj;
return (equalsNetCapabilities(that) &&
equalsTransportTypes(that) &&
equalsLinkBandwidths(that) &&
equalsSignalStrength(that) &&
equalsSpecifier(that));
}
@Override
public int hashCode() {
return ((int)(mNetworkCapabilities & 0xFFFFFFFF) +
((int)(mNetworkCapabilities >> 32) * 3) +
((int)(mTransportTypes & 0xFFFFFFFF) * 5) +
((int)(mTransportTypes >> 32) * 7) +
(mLinkUpBandwidthKbps * 11) +
(mLinkDownBandwidthKbps * 13) +
Objects.hashCode(mNetworkSpecifier) * 17 +
(mSignalStrength * 19));
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeLong(mNetworkCapabilities);
dest.writeLong(mTransportTypes);
dest.writeInt(mLinkUpBandwidthKbps);
dest.writeInt(mLinkDownBandwidthKbps);
dest.writeParcelable((Parcelable) mNetworkSpecifier, flags);
dest.writeInt(mSignalStrength);
}
public static final Creator<NetworkCapabilities> CREATOR =
new Creator<NetworkCapabilities>() {
@Override
public NetworkCapabilities createFromParcel(Parcel in) {
NetworkCapabilities netCap = new NetworkCapabilities();
netCap.mNetworkCapabilities = in.readLong();
netCap.mTransportTypes = in.readLong();
netCap.mLinkUpBandwidthKbps = in.readInt();
netCap.mLinkDownBandwidthKbps = in.readInt();
netCap.mNetworkSpecifier = in.readParcelable(null);
netCap.mSignalStrength = in.readInt();
return netCap;
}
@Override
public NetworkCapabilities[] newArray(int size) {
return new NetworkCapabilities[size];
}
};
@Override
public String toString() {
int[] types = getTransportTypes();
String transports = (types.length > 0) ? " Transports: " + transportNamesOf(types) : "";
types = getCapabilities();
String capabilities = (types.length > 0 ? " Capabilities: " : "");
for (int i = 0; i < types.length; ) {
switch (types[i]) {
case NET_CAPABILITY_MMS: capabilities += "MMS"; break;
case NET_CAPABILITY_SUPL: capabilities += "SUPL"; break;
case NET_CAPABILITY_DUN: capabilities += "DUN"; break;
case NET_CAPABILITY_FOTA: capabilities += "FOTA"; break;
case NET_CAPABILITY_IMS: capabilities += "IMS"; break;
case NET_CAPABILITY_CBS: capabilities += "CBS"; break;
case NET_CAPABILITY_WIFI_P2P: capabilities += "WIFI_P2P"; break;
case NET_CAPABILITY_IA: capabilities += "IA"; break;
case NET_CAPABILITY_RCS: capabilities += "RCS"; break;
case NET_CAPABILITY_XCAP: capabilities += "XCAP"; break;
case NET_CAPABILITY_EIMS: capabilities += "EIMS"; break;
case NET_CAPABILITY_NOT_METERED: capabilities += "NOT_METERED"; break;
case NET_CAPABILITY_INTERNET: capabilities += "INTERNET"; break;
case NET_CAPABILITY_NOT_RESTRICTED: capabilities += "NOT_RESTRICTED"; break;
case NET_CAPABILITY_TRUSTED: capabilities += "TRUSTED"; break;
case NET_CAPABILITY_NOT_VPN: capabilities += "NOT_VPN"; break;
case NET_CAPABILITY_VALIDATED: capabilities += "VALIDATED"; break;
case NET_CAPABILITY_CAPTIVE_PORTAL: capabilities += "CAPTIVE_PORTAL"; break;
case NET_CAPABILITY_FOREGROUND: capabilities += "FOREGROUND"; break;
}
if (++i < types.length) capabilities += "&";
}
String upBand = ((mLinkUpBandwidthKbps > 0) ? " LinkUpBandwidth>=" +
mLinkUpBandwidthKbps + "Kbps" : "");
String dnBand = ((mLinkDownBandwidthKbps > 0) ? " LinkDnBandwidth>=" +
mLinkDownBandwidthKbps + "Kbps" : "");
String specifier = (mNetworkSpecifier == null ?
"" : " Specifier: <" + mNetworkSpecifier + ">");
String signalStrength = (hasSignalStrength() ? " SignalStrength: " + mSignalStrength : "");
return "[" + transports + capabilities + upBand + dnBand + specifier + signalStrength + "]";
}
/**
* @hide
*/
public static String transportNamesOf(int[] types) {
if (types == null || types.length == 0) {
return "";
}
StringBuilder transports = new StringBuilder();
for (int t : types) {
transports.append("|").append(transportNameOf(t));
}
return transports.substring(1);
}
/**
* @hide
*/
public static String transportNameOf(int transport) {
if (transport < 0 || TRANSPORT_NAMES.length <= transport) {
return "UNKNOWN";
}
return TRANSPORT_NAMES[transport];
}
}
|
anhle1476/restaurant-manager-react-client
|
src/components/Cashier/TableAndArea/AddTableModal/AddTableModal.js
|
import React, { useState } from "react";
import { Modal, ModalBody, ModalFooter, Form, Button } from "reactstrap";
import tableApi from "../../../../api/tableApi";
import { toastError, toastSuccess } from "../../../../utils/toastUtils";
import ModalHeaderWithCloseBtn from "../../../ModalHeaderWithCloseBtn/ModalHeaderWithCloseBtn";
import CustomInputGroup from "../../../CustomInputGroup/CustomInputGroup";
const ADD_SCHEMA = { name: "" };
const FEEDBACK_SCHEMA = { name: "", area: "" };
const AddAreaModal = ({ show, toggle, currentArea, handleAddTable }) => {
const [data, setData] = useState(ADD_SCHEMA);
const [feedback, setFeedback] = useState(FEEDBACK_SCHEMA);
const handleChange = ({ target }) => {
setData({ ...data, [target.name]: target.value });
};
const handleSubmit = async (e) => {
e.preventDefault();
try {
setFeedback(FEEDBACK_SCHEMA);
const res = await tableApi.create({ ...data, area: currentArea });
handleAddTable(res.data);
toastSuccess("Tạo bàn thành công");
toggle();
setData(ADD_SCHEMA);
} catch (ex) {
setFeedback(ex.response.data);
toastError("Tạo bàn thất bại");
}
};
return (
<Modal isOpen={show} toggle={toggle}>
<Form onSubmit={handleSubmit}>
<ModalHeaderWithCloseBtn toggle={toggle}>
Thêm bàn
</ModalHeaderWithCloseBtn>
<ModalBody className="bg-white">
<CustomInputGroup
required
onChange={handleChange}
label="Tên bàn"
name="name"
value={data.name}
feedback={feedback.name}
/>
<CustomInputGroup
disabled
required
onChange={(e) => e.preventDefault()}
label="Khu vực"
name="area"
value={currentArea.name}
feedback={feedback.area}
/>
</ModalBody>
<ModalFooter>
<Button color="light" onClick={toggle}>
Hủy
</Button>{" "}
<Button color="warning" type="submit">
Lưu
</Button>
</ModalFooter>
</Form>
</Modal>
);
};
export default AddAreaModal;
|
mattshirtliffe/Axelrod
|
axelrod/result_set.py
|
import csv
import tqdm
from collections import namedtuple
from numpy import mean, nanmedian, std
from . import eigen
from .game import Game
import axelrod.interaction_utils as iu
def update_progress_bar(method):
"""A decorator to update a progress bar if it exists"""
def wrapper(*args):
"""Run the method and update the progress bar if it exists"""
output = method(*args)
try:
args[0].progress_bar.update(1)
except AttributeError:
pass
return output
return wrapper
class ResultSet(object):
"""A class to hold the results of a tournament."""
def __init__(self, players, interactions, progress_bar=True, game=None):
"""
Parameters
----------
players : list
a list of player objects.
interactions : list
a list of dictionaries mapping tuples of player indices to
interactions (1 for each repetition)
progress_bar : bool
Whether or not to create a progress bar which will be updated
"""
if game is None:
self.game = Game()
else:
self.game = game
self.players = players
self.nplayers = len(players)
self.interactions = interactions
self.nrepetitions = max(
[len(rep) for rep in list(interactions.values())])
if progress_bar:
self.progress_bar = tqdm.tqdm(total=19, desc="Analysing results")
else:
self.progress_bar = False
# Calculate all attributes:
self.build_all()
def build_all(self):
"""Build all the results. In a seperate method to make inheritance more
straightforward"""
self.wins = self.build_wins()
self.match_lengths = self.build_match_lengths()
self.scores = self.build_scores()
self.normalised_scores = self.build_normalised_scores()
self.ranking = self.build_ranking()
self.ranked_names = self.build_ranked_names()
self.payoffs = self.build_payoffs()
self.payoff_matrix = self.build_payoff_matrix()
self.payoff_stddevs = self.build_payoff_stddevs()
self.score_diffs = self.build_score_diffs()
self.payoff_diffs_means = self.build_payoff_diffs_means()
self.cooperation = self.build_cooperation()
self.normalised_cooperation = self.build_normalised_cooperation()
self.vengeful_cooperation = self.build_vengeful_cooperation()
self.cooperating_rating = self.build_cooperating_rating()
self.good_partner_matrix = self.build_good_partner_matrix()
self.good_partner_rating = self.build_good_partner_rating()
self.eigenmoses_rating = self.build_eigenmoses_rating()
self.eigenjesus_rating = self.build_eigenjesus_rating()
try:
self.progress_bar.close()
except AttributeError:
pass
@property
def _null_results_matrix(self):
"""
Returns:
--------
A null matrix (i.e. fully populated with zero values) using
lists of the form required for the results dictionary.
i.e. one row per player, containing one element per opponent (in
order of player index) which lists values for each repetition.
"""
plist = list(range(self.nplayers))
replist = list(range(self.nrepetitions))
return [[[0 for j in plist] for i in plist] for r in replist]
@update_progress_bar
def build_match_lengths(self):
"""
Returns:
--------
The match lengths. List of the form:
[ML1, ML2, ML3..., MLn]
Where n is the number of repetitions and MLi is a list of the form:
[Pli1, PLi2, Pli3, ..., Plim]
Where m is the number of players and Plij is of the form:
[aij1, aij2, aij3, ..., aijk]
Where k is the number of players and aijk is the length of the match
between player j and k in repetition i.
"""
match_lengths = self._null_results_matrix
for index_pair, repetitions in self.interactions.items():
for repetition, interaction in enumerate(repetitions):
player, opponent = index_pair
match_lengths[repetition][player][opponent] = len(interaction)
if player != opponent: # Match lengths are symmetric
match_lengths[repetition][opponent][player] = len(interaction)
return match_lengths
@update_progress_bar
def build_scores(self):
"""
Returns:
--------
The total scores per player for each repetition lengths.
List of the form:
[ML1, ML2, ML3..., MLn]
Where n is the number of players and MLi is a list of the form:
[pi1, pi2, pi3, ..., pim]
Where m is the number of repetitions and pij is the total score
obtained by each player in repetition j.
In Axelrod's original tournament, there were no self-interactions
(e.g. player 1 versus player 1) and so these are also ignored.
"""
scores = [[0 for rep in range(self.nrepetitions)] for _ in
range(self.nplayers)]
for index_pair, repetitions in self.interactions.items():
if index_pair[0] != index_pair[1]: # Ignoring self interactions
for repetition, interaction in enumerate(repetitions):
final_scores = iu.compute_final_score(interaction,
self.game)
for player in range(2):
player_index = index_pair[player]
player_score = final_scores[player]
scores[player_index][repetition] += player_score
return scores
@update_progress_bar
def build_ranked_names(self):
"""
Returns:
--------
Returns the ranked names. A list of names as calculated by
self.ranking.
"""
return [str(self.players[i]) for i in self.ranking]
@update_progress_bar
def build_wins(self):
"""
Returns:
--------
The total wins per player for each repetition lengths.
List of the form:
[ML1, ML2, ML3..., MLn]
Where n is the number of players and MLi is a list of the form:
[pi1, pi2, pi3, ..., pim]
Where m is the number of repetitions and pij is the total wins
obtained by each player in repetition j.
In Axelrod's original tournament, there were no self-interactions
(e.g. player 1 versus player 1) and so these are also ignored.
"""
wins = [[0 for rep in range(self.nrepetitions)] for _ in
range(self.nplayers)]
for index_pair, repetitions in self.interactions.items():
if index_pair[0] != index_pair[1]: # Ignore self interactions
for player in range(2):
player_index = index_pair[player]
for rep, interaction in enumerate(repetitions):
winner_index = iu.compute_winner_index(interaction,
self.game)
if winner_index is not False and player == winner_index:
wins[player_index][rep] += 1
return wins
@update_progress_bar
def build_normalised_scores(self):
"""
Returns:
--------
The total mean scores per turn per layer for each repetition
lengths. List of the form:
[ML1, ML2, ML3..., MLn]
Where n is the number of players and MLi is a list of the form:
[pi1, pi2, pi3, ..., pim]
Where m is the number of repetitions and pij is the mean scores per
turn obtained by each player in repetition j.
In Axelrod's original tournament, there were no self-interactions
(e.g. player 1 versus player 1) and so these are also ignored.
"""
normalised_scores = [
[[] for rep in range(self.nrepetitions)] for _ in
range(self.nplayers)]
# Getting list of all per turn scores for each player for each rep
for index_pair, repetitions in self.interactions.items():
for repetition, interaction in enumerate(repetitions):
if index_pair[0] != index_pair[1]: # Ignore self interactions
scores_per_turn = iu.compute_final_score_per_turn(
interaction,
self.game)
for player in range(2):
player_index = index_pair[player]
score_per_turn = scores_per_turn[player]
normalised_scores[player_index][repetition].append(score_per_turn)
# Obtaining mean scores and overwriting corresponding entry in
# normalised scores
for i, rep in enumerate(normalised_scores):
for j, player_scores in enumerate(rep):
normalised_scores[i][j] = mean(player_scores)
return normalised_scores
@update_progress_bar
def build_ranking(self):
"""
Returns:
--------
The ranking. List of the form:
[R1, R2, R3..., Rn]
Where n is the number of players and Rj is the rank of the jth player
(based on median normalised score).
"""
return sorted(range(self.nplayers),
key=lambda i: -nanmedian(self.normalised_scores[i]))
@update_progress_bar
def build_payoffs(self):
"""
Returns:
--------
The list of per turn payoffs.
List of the form:
[ML1, ML2, ML3..., MLn]
Where n is the number of players and MLi is a list of the form:
[pi1, pi2, pi3, ..., pim]
Where m is the number of players and pij is a list of the form:
[uij1, uij2, ..., uijk]
Where k is the number of repetitions and uijk is the list of
utilities obtained by player i against player j in each repetition.
"""
plist = list(range(self.nplayers))
payoffs = [[[] for opponent in plist] for player in plist]
for player in plist:
for opponent in plist:
utilities = []
for index_pair, repetitions in self.interactions.items():
if (player, opponent) == index_pair:
for interaction in repetitions:
utilities.append(iu.compute_final_score_per_turn(
interaction,
self.game)[0])
elif (opponent, player) == index_pair:
for interaction in repetitions:
utilities.append(iu.compute_final_score_per_turn(
interaction,
self.game)[1])
payoffs[player][opponent] = utilities
return payoffs
@update_progress_bar
def build_payoff_matrix(self):
"""
Returns:
--------
The mean of per turn payoffs.
List of the form:
[ML1, ML2, ML3..., MLn]
Where n is the number of players and MLi is a list of the form:
[pi1, pi2, pi3, ..., pim]
Where m is the number of players and pij is a list of the form:
[uij1, uij2, ..., uijk]
Where k is the number of repetitions and u is the mean utility (over
all repetitions) obtained by player i against player j.
"""
plist = list(range(self.nplayers))
payoff_matrix = [[[] for opponent in plist] for player in plist]
for player in plist:
for opponent in plist:
utilities = self.payoffs[player][opponent]
if utilities:
payoff_matrix[player][opponent] = mean(utilities)
else:
payoff_matrix[player][opponent] = 0
return payoff_matrix
@update_progress_bar
def build_payoff_stddevs(self):
"""
Returns:
--------
The mean of per turn payoffs.
List of the form:
[ML1, ML2, ML3..., MLn]
Where n is the number of players and MLi is a list of the form:
[pi1, pi2, pi3, ..., pim]
Where m is the number of players and pij is a list of the form:
[uij1, uij2, ..., uijk]
Where k is the number of repetitions and u is the standard
deviation of the utility (over all repetitions) obtained by player
i against player j.
"""
plist = list(range(self.nplayers))
payoff_stddevs = [[[0] for opponent in plist] for player in plist]
for player in plist:
for opponent in plist:
utilities = self.payoffs[player][opponent]
if utilities:
payoff_stddevs[player][opponent] = std(utilities)
else:
payoff_stddevs[player][opponent] = 0
return payoff_stddevs
@update_progress_bar
def build_score_diffs(self):
"""
Returns:
--------
Returns the score differences between players.
List of the form:
[ML1, ML2, ML3..., MLn]
Where n is the number of players and MLi is a list of the form:
[pi1, pi2, pi3, ..., pim]
Where m is the number of players and pij is a list of the form:
[uij1, uij2, ..., uijk]
Where k is the number of repetitions and uijm is the difference of the
scores per turn between player i and j in repetition m.
"""
plist = list(range(self.nplayers))
score_diffs = [[[0] * self.nrepetitions for opponent in plist]
for player in plist]
for player in plist:
for opponent in plist:
if (player, opponent) in self.interactions:
for repetition, interaction in enumerate(self.interactions[(player, opponent)]):
scores = iu.compute_final_score_per_turn(interaction,
self.game)
diff = (scores[0] - scores[1])
score_diffs[player][opponent][repetition] = diff
if (opponent, player) in self.interactions:
for repetition, interaction in enumerate(self.interactions[(opponent, player)]):
scores = iu.compute_final_score_per_turn(interaction,
self.game)
diff = (scores[1] - scores[0])
score_diffs[player][opponent][repetition] = diff
return score_diffs
@update_progress_bar
def build_payoff_diffs_means(self):
"""
Returns:
--------
The score differences between players.
List of the form:
[ML1, ML2, ML3..., MLn]
Where n is the number of players and MLi is a list of the form:
[pi1, pi2, pi3, ..., pim]
Where pij is the mean difference of the
scores per turn between player i and j in repetition m.
"""
payoff_diffs_means = [[mean(diff) for diff in player]
for player in self.score_diffs]
return payoff_diffs_means
@update_progress_bar
def build_cooperation(self):
"""
Returns:
--------
The list of cooperation counts.
List of the form:
[ML1, ML2, ML3..., MLn]
Where n is the number of players and MLi is a list of the form:
[pi1, pi2, pi3, ..., pim]
Where pij is the total number of cooperations over all repetitions
played by player i against player j.
"""
plist = list(range(self.nplayers))
cooperations = [[0 for opponent in plist] for player in plist]
for player in plist:
for opponent in plist:
if player != opponent:
for index_pair, repetitions in self.interactions.items():
coop_count = 0
if (player, opponent) == index_pair:
for interaction in repetitions:
coop_count += iu.compute_cooperations(interaction)[0]
elif (opponent, player) == index_pair:
for interaction in repetitions:
coop_count += iu.compute_cooperations(interaction)[1]
cooperations[player][opponent] += coop_count
return cooperations
@update_progress_bar
def build_normalised_cooperation(self):
"""
Returns:
--------
The list of per turn cooperation counts.
List of the form:
[ML1, ML2, ML3..., MLn]
Where n is the number of players and MLi is a list of the form:
[pi1, pi2, pi3, ..., pin]
Where pij is the mean number of
cooperations per turn played by player i against player j in each
repetition.
"""
plist = list(range(self.nplayers))
normalised_cooperations = [[0 for opponent in plist] for player in plist]
for player in plist:
for opponent in plist:
coop_counts = []
if (player, opponent) in self.interactions:
repetitions = self.interactions[(player, opponent)]
for interaction in repetitions:
coop_counts.append(iu.compute_normalised_cooperation(interaction)[0])
if (opponent, player) in self.interactions:
repetitions = self.interactions[(opponent, player)]
for interaction in repetitions:
coop_counts.append(iu.compute_normalised_cooperation(interaction)[1])
if ((player, opponent) not in self.interactions) and ((opponent, player) not in self.interactions):
coop_counts.append(0)
# Mean over all reps:
normalised_cooperations[player][opponent] = mean(coop_counts)
return normalised_cooperations
@update_progress_bar
def build_vengeful_cooperation(self):
"""
Returns:
--------
The vengeful cooperation matrix derived from the
normalised cooperation matrix:
Dij = 2(Cij - 0.5)
"""
return [[2 * (element - 0.5) for element in row]
for row in self.normalised_cooperation]
@update_progress_bar
def build_cooperating_rating(self):
"""
Returns:
--------
The list of cooperation ratings
List of the form:
[ML1, ML2, ML3..., MLn]
Where n is the number of players and MLi is a list of the form:
[pi1, pi2, pi3, ..., pim]
Where pij is the total number of cooperations divided by the total
number of turns over all repetitions played by player i against
player j.
"""
plist = list(range(self.nplayers))
total_length_v_opponent = [zip(*[rep[player_index] for
rep in self.match_lengths])
for player_index in plist]
lengths = [[sum(e) for j, e in enumerate(row) if i != j] for i, row in
enumerate(total_length_v_opponent)]
# Max is to deal with edge cases of matches that have no turns
return [sum(cs) / max(1, float(sum(ls))) for cs, ls
in zip(self.cooperation, lengths)]
@update_progress_bar
def build_good_partner_matrix(self):
"""
Returns:
--------
An n by n matrix of good partner ratings for n players i.e. an n by
n matrix where n is the number of players. Each row (i) and column
(j) represents an individual player and the value Pij is the sum of
the number of repetitions where player i cooperated as often or
more than opponent j.
"""
plist = list(range(self.nplayers))
good_partner_matrix = [[0 for opponent in plist] for player in plist]
for player in plist:
for opponent in plist:
if player != opponent:
for index_pair, repetitions in self.interactions.items():
if (player, opponent) == index_pair:
for interaction in repetitions:
coops = iu.compute_cooperations(interaction)
if coops[0] >= coops[1]:
good_partner_matrix[player][opponent] += 1
elif (opponent, player) == index_pair:
for interaction in repetitions:
coops = iu.compute_cooperations(interaction)
if coops[0] <= coops[1]:
good_partner_matrix[player][opponent] += 1
return good_partner_matrix
@update_progress_bar
def build_good_partner_rating(self):
"""
Returns:
--------
A list of good partner ratings ordered by player index.
"""
plist = list(range(self.nplayers))
good_partner_rating = []
for player_index in plist:
total_interactions = 0
for index_pair, repetitions in self.interactions.items():
if player_index in index_pair and index_pair[0] != index_pair[1]:
total_interactions += len(repetitions)
# Max is to deal with edge case of matchs with no turns
rating = sum(self.good_partner_matrix[player_index]) / max(1, float(total_interactions))
good_partner_rating.append(rating)
return good_partner_rating
@update_progress_bar
def build_eigenjesus_rating(self):
"""
Returns:
--------
The eigenjesus rating as defined in:
http://www.scottaaronson.com/morality.pdf
"""
eigenvector, eigenvalue = eigen.principal_eigenvector(
self.normalised_cooperation)
return eigenvector.tolist()
@update_progress_bar
def build_eigenmoses_rating(self):
"""
Returns:
--------
The eigenmoses rating as defined in:
http://www.scottaaronson.com/morality.pdf
"""
eigenvector, eigenvalue = eigen.principal_eigenvector(
self.vengeful_cooperation)
return eigenvector.tolist()
def __eq__(self, other):
return all([self.wins == other.wins,
self.match_lengths == other.match_lengths,
self.scores == other.scores,
self.normalised_scores == other.normalised_scores,
self.ranking == other.ranking,
self.ranked_names == other.ranked_names,
self.payoffs == other.payoffs,
self.payoff_matrix == other.payoff_matrix,
self.payoff_stddevs == other.payoff_stddevs,
self.score_diffs == other.score_diffs,
self.payoff_diffs_means == other.payoff_diffs_means,
self.cooperation == other.cooperation,
self.normalised_cooperation == other.normalised_cooperation,
self.vengeful_cooperation == other.vengeful_cooperation,
self.cooperating_rating == other.cooperating_rating,
self.good_partner_matrix == other.good_partner_matrix,
self.good_partner_rating == other.good_partner_rating,
self.eigenmoses_rating == other.eigenmoses_rating,
self.eigenjesus_rating == other.eigenjesus_rating])
def __ne__(self, other):
return not self.__eq__(other)
def summarise(self):
"""
Obtain summary of performance of each strategy:
ordered by rank, including median normalised score and cooperation
rating.
Output
------
A list of the form:
[[player name, median score, cooperation_rating],...]
"""
median_scores = map(nanmedian, self.normalised_scores)
median_wins = map(nanmedian, self.wins)
self.player = namedtuple("Player", ["Rank", "Name", "Median_score",
"Cooperation_rating", "Wins"])
summary_data = [perf for perf in zip(self.players,
median_scores,
self.cooperating_rating,
median_wins)]
summary_data = [self.player(rank, *summary_data[i]) for
rank, i in enumerate(self.ranking)]
return summary_data
def write_summary(self, filename):
"""
Write a csv file containing summary data of the results of the form:
"Rank", "Name", "Median-score-per-turn", "Cooperation-rating"
Parameters
----------
filename : a filepath to which to write the data
"""
summary_data = self.summarise()
with open(filename, 'w') as csvfile:
writer = csv.writer(csvfile, lineterminator='\n')
writer.writerow(self.player._fields)
for player in summary_data:
writer.writerow(player)
class ResultSetFromFile(ResultSet):
"""
A class to hold the results of a tournament. Reads in a CSV file produced
by the tournament class.
"""
def __init__(self, filename, progress_bar=True,
num_interactions=False, players=False, nrepetitions=False,
game=None, keep_interactions=False):
"""
Parameters
----------
filename : string
the file from which to read the interactions
progress_bar : bool
Whether or not to create a progress bar which will be updated
num_interactions : int
The number of interactions in the file. Used for the progress
bar. If not known but progress_bar is true, will be efficiently
read from file.
players : list
A list of the names of players. If not known will be efficiently
read from file.
nrepetitions : int
The number of repetitions of each match. If not know will be
efficiently read from file.
game : axelrod.Game
The particular game that should be used to calculate the scores.
keep_interactions : bool
Whether or not to load the interactions in to memory. WARNING:
for large tournaments this drastically increases the memory
required.
"""
if game is None:
self.game = Game()
else:
self.game = game
self.filename = filename
self.num_interactions = num_interactions
if not players and not nrepetitions:
self.players, self.nrepetitions = self._read_players_and_repetition_numbers(progress_bar=progress_bar)
else:
self.players, self.nrepetitions = players, nrepetitions
self.nplayers = len(self.players)
self._build_empty_metrics(keep_interactions=keep_interactions)
self._build_score_related_metrics(progress_bar=progress_bar,
keep_interactions=keep_interactions)
def create_progress_bar(self, desc=None):
"""
Create a progress bar for a read through of the data file.
Parameters
----------
desc : string
A description.
"""
if not self.num_interactions:
with open(self.filename) as f:
self.num_interactions = sum(1 for line in f)
return tqdm.tqdm(total=self.num_interactions, desc=desc)
def _read_players_and_repetition_numbers(self, progress_bar=False):
"""
Read the players and the repetitions numbers
Parameters
----------
progress_bar : bool
Whether or not to display a progress bar
"""
if progress_bar:
progress_bar = self.create_progress_bar(desc="Counting")
self.players_d = {}
self.repetitions_d = {}
with open(self.filename, 'r') as f:
for row in csv.reader(f):
index_pair = (int(row[0]), int(row[1]))
players = (row[2], row[3])
self._update_repetitions(index_pair)
self._update_players(index_pair, players)
if progress_bar:
progress_bar.update()
if progress_bar:
progress_bar.close()
nrepetitions = self._build_nrepetitions()
players = self._build_players()
return players, nrepetitions
def _update_players(self, index_pair, players):
"""
During a read of the data, update the internal players dictionary
Parameters
----------
index_pair : tuple
A tuple of player indices
players : tuple
A tuple of player names
"""
for index, player in zip(index_pair, players):
if index not in self.players_d:
self.players_d[index] = player
def _update_repetitions(self, index_pair):
"""
During a read of the data, update the internal repetitions dictionary
Parameters
----------
index_pair : tuple
A tuple of player indices
"""
try:
self.repetitions_d[index_pair] += 1
except KeyError:
self.repetitions_d[index_pair] = 1
def _build_nrepetitions(self):
"""
Count the number of repetitions
Returns
-------
nrepetitions : int
The number of repetitions
"""
nrepetitions = max(self.repetitions_d.values())
del self.repetitions_d # Manual garbage collection
return nrepetitions
def _build_players(self):
"""
List the players
Returns
-------
players : list
An ordered list of players
"""
players = []
for i in range(len(self.players_d)):
players.append(self.players_d[i])
del self.players_d # Manual garbage collection
return players
def read_match_chunks(self, progress_bar=False):
"""
A generator to return a given repetitions of matches
Parameters
----------
progress_bar : bool
whether or not to display a progress bar
Yields
------
repetitions : list
A list of lists include index pairs, player pairs and
repetitions. All repetitions for a given pair are yielded
together.
"""
if progress_bar:
progress_bar = self.create_progress_bar(desc="Analysing")
with open(self.filename, 'r') as f:
csv_reader = csv.reader(f)
repetitions = []
count = 0
for row in csv_reader:
repetitions.append(row)
count += 1
if progress_bar:
progress_bar.update()
if count == self.nrepetitions:
yield repetitions
repetitions = []
count = 0
if progress_bar:
progress_bar.close()
def _build_empty_metrics(self, keep_interactions=False):
"""
Creates the various empty metrics ready to be updated as the data is
read.
Parameters
----------
keep_interactions : bool
Whether or not to load the interactions in to memory
"""
plist = range(self.nplayers)
replist = range(self.nrepetitions)
self.match_lengths = [[[0 for opponent in plist] for player in plist]
for _ in replist]
self.wins = [[0 for _ in replist] for player in plist]
self.scores = [[0 for _ in replist] for player in plist]
self.normalised_scores = [[[] for _ in replist] for player in plist]
self.payoffs = [[[] for opponent in plist] for player in plist]
self.score_diffs = [[[0] * self.nrepetitions for opponent in plist]
for player in plist]
self.cooperation = [[0 for opponent in plist] for player in plist]
self.normalised_cooperation = [[[] for opponent in plist]
for player in plist]
self.good_partner_matrix = [[0 for opponent in plist]
for player in plist]
self.total_interactions = [0 for player in plist]
self.good_partner_rating = [0 for player in plist]
if keep_interactions:
self.interactions = {}
def _update_match_lengths(self, repetition, p1, p2, interaction):
self.match_lengths[repetition][p1][p2] = len(interaction)
def _update_payoffs(self, p1, p2, scores_per_turn):
self.payoffs[p1][p2].append(scores_per_turn[0])
if p1 != p2:
self.payoffs[p2][p1].append(scores_per_turn[1])
def _update_score_diffs(self, repetition, p1, p2, scores_per_turn):
diff = scores_per_turn[0] - scores_per_turn[1]
self.score_diffs[p1][p2][repetition] = diff
self.score_diffs[p2][p1][repetition] = -diff
def _update_normalised_cooperation(self, p1, p2, interaction):
normalised_cooperations = iu.compute_normalised_cooperation(interaction)
self.normalised_cooperation[p1][p2].append(normalised_cooperations[0])
self.normalised_cooperation[p2][p1].append(normalised_cooperations[1])
def _update_wins(self, repetition, p1, p2, interaction):
match_winner_index = iu.compute_winner_index(interaction,
game=self.game)
index_pair = [p1, p2]
if match_winner_index is not False:
winner_index = index_pair[match_winner_index]
self.wins[winner_index][repetition] += 1
def _update_scores(self, repetition, p1, p2, interaction):
final_scores = iu.compute_final_score(interaction, game=self.game)
for index, player in enumerate([p1, p2]):
player_score = final_scores[index]
self.scores[player][repetition] += player_score
def _update_normalised_scores(self, repetition, p1, p2, scores_per_turn):
for index, player in enumerate([p1, p2]):
score_per_turn = scores_per_turn[index]
self.normalised_scores[player][repetition].append(score_per_turn)
def _update_cooperation(self, p1, p2, cooperations):
self.cooperation[p1][p2] += cooperations[0]
self.cooperation[p2][p1] += cooperations[1]
def _update_good_partner_matrix(self, p1, p2, cooperations):
if cooperations[0] >= cooperations[1]:
self.good_partner_matrix[p1][p2] += 1
if cooperations[1] >= cooperations[0]:
self.good_partner_matrix[p2][p1] += 1
def _summarise_normalised_scores(self):
for i, rep in enumerate(self.normalised_scores):
for j, player_scores in enumerate(rep):
if player_scores != []:
self.normalised_scores[i][j] = mean(player_scores)
else:
self.normalised_scores[i][j] = 0
try:
self.progress_bar.update()
except AttributeError:
pass
def _summarise_normalised_cooperation(self):
for i, rep in enumerate(self.normalised_cooperation):
for j, cooperation in enumerate(rep):
if cooperation != []:
self.normalised_cooperation[i][j] = mean(cooperation)
else:
self.normalised_cooperation[i][j] = 0
try:
self.progress_bar.update()
except AttributeError:
pass
@update_progress_bar
def build_good_partner_rating(self):
return [sum(self.good_partner_matrix[player]) /
max(1, float(self.total_interactions[player]))
for player in range(self.nplayers)]
def _build_score_related_metrics(self, progress_bar=False,
keep_interactions=False):
"""
Read the data and carry out all relevant calculations.
Parameters
----------
progress_bar : bool
Whether or not to display a progress bar
keep_interactions : bool
Whether or not to lad the interactions in to memory
"""
match_chunks = self.read_match_chunks(progress_bar)
for match in match_chunks:
p1, p2 = int(match[0][0]), int(match[0][1])
for repetition, record in enumerate(match):
interaction = list(zip(record[4], record[5]))
if keep_interactions:
try:
self.interactions[(p1, p2)].append(interaction)
except KeyError:
self.interactions[(p1, p2)] = [interaction]
scores_per_turn = iu.compute_final_score_per_turn(interaction,
game=self.game)
cooperations = iu.compute_cooperations(interaction)
self._update_match_lengths(repetition, p1, p2, interaction)
self._update_payoffs(p1, p2, scores_per_turn)
self._update_score_diffs(repetition, p1, p2, scores_per_turn)
self._update_normalised_cooperation(p1, p2, interaction)
if p1 != p2: # Anything that ignores self interactions
for player in [p1, p2]:
self.total_interactions[player] += 1
self._update_match_lengths(repetition, p2, p1, interaction)
self._update_wins(repetition, p1, p2, interaction)
self._update_scores(repetition, p1, p2, interaction)
self._update_normalised_scores(repetition, p1, p2,
scores_per_turn)
self._update_cooperation(p1, p2, cooperations)
self._update_good_partner_matrix(p1, p2, cooperations)
if progress_bar:
self.progress_bar = tqdm.tqdm(total=10 + 2 * self.nplayers,
desc="Finishing")
self._summarise_normalised_scores()
self._summarise_normalised_cooperation()
self.ranking = self.build_ranking()
self.ranked_names = self.build_ranked_names()
self.payoff_matrix = self.build_payoff_matrix()
self.payoff_stddevs = self.build_payoff_stddevs()
self.payoff_diffs_means = self.build_payoff_diffs_means()
self.vengeful_cooperation = self.build_vengeful_cooperation()
self.cooperating_rating = self.build_cooperating_rating()
self.good_partner_rating = self.build_good_partner_rating()
self.eigenjesus_rating = self.build_eigenjesus_rating()
self.eigenmoses_rating = self.build_eigenmoses_rating()
if progress_bar:
self.progress_bar.close()
|
GetODK/central-frontend
|
src/util/router.js
|
<reponame>GetODK/central-frontend
/*
Copyright 2019 ODK Central Developers
See the NOTICE file at the top-level directory of this distribution and at
https://github.com/getodk/central-frontend/blob/master/NOTICE.
This file is part of ODK Central. It is subject to the license terms in
the LICENSE file found in the top-level directory of this distribution and at
https://www.apache.org/licenses/LICENSE-2.0. No part of ODK Central,
including this file, may be copied, modified, propagated, or distributed
except according to the terms contained in the LICENSE file.
*/
import { START_LOCATION } from 'vue-router';
import { nextTick } from 'vue';
// Returns the props for a route component.
export const routeProps = (route, props) => {
if (props == null || props === false) return {};
if (props === true) return route.params;
if (typeof props === 'function') return props(route);
// Object mode
return props;
};
// TODO/vue3. Add tests.
export const unlessFailure = (callback) => (to, from, failure) => {
if (failure == null) callback(to, from);
};
/*
afterNextNavigation() provides a way to run a callback after a navigation has
been confirmed but before the next DOM update. That is mostly only needed when
response data will be updated as part of the navigation.
Because navigation is asynchronous, if response data is updated before the
navigation is confirmed, the current route component or other processes may use
the updated data. In some cases, that can lead to unexpected behavior. For
example, if the updated data violates a validateData condition, the user may be
sent to / instead.
Because router.push() and router.replace() will return a promise that resolves
after the navigation is confirmed, another approach could be to update the
response data in a then() callback. The DOM will be updated after the navigation
is confirmed: usually that will involve the old route component being unmounted
and the new route component being mounted. However, any then() callback will be
run after the DOM is updated. That means that if the response data is updated in
a then() callback, the new route component could use outdated data when it is
first set up and mounted.
afterNextNavigation() can provide an answer to some of these subtle timing
issues, allowing the response data to be updated after the navigation has been
confirmed, but before the DOM has been updated.
*/
export const afterNextNavigation = (router, callback) => {
const removeHook = router.afterEach((to, from, failure) => {
if (failure == null) callback(to, from);
// It looks like we can't remove an afterEach hook while Vue Router is
// iterating over the afterEach hooks: if we synchronously removed this
// hook, the next afterEach hook for the navigation would be skipped.
// (Though this is probably the last hook.)
nextTick(removeHook);
});
};
export const forceReplace = ({ router, unsavedChanges }, location) => {
unsavedChanges.zero();
return router.replace(location);
};
////////////////////////////////////////////////////////////////////////////////
// RESPONSE DATA
/*
preservesData() returns `true` if the data for `key` should not be cleared when
the route changes from `from` to `to`. Otherwise it returns `false`.
- key. A request key or '*'.
- to. A Route object.
- from. A Route object.
*/
export const preservesData = (key, to, from) => {
if (from === START_LOCATION) return true;
const forKey = to.meta.preserveData[key];
if (forKey == null) return false;
const params = forKey[from.name];
if (params == null) return false;
return params.every(param => to.params[param] === from.params[param]);
};
/*
canRoute() returns `false` if response data exists that violates a validateData
condition specified for the `to` route. Otherwise it returns `true`.
- to. A Route object.
- from. A Route object.
- store. The Vuex store.
*/
export const canRoute = (to, from, store) => {
for (const [key, validator] of to.meta.validateData) {
// If the data for the request key will be cleared after the navigation is
// confirmed, we do not need to validate it.
if (preservesData('*', to, from) || preservesData(key, to, from)) {
const value = store.state.request.data[key];
if (value != null && !validator(value)) return false;
}
}
return true;
};
|
andrewstellman/pbprdf
|
src/main/scala/com/stellmangreene/pbprdf/PlayByPlay.scala
|
<filename>src/main/scala/com/stellmangreene/pbprdf/PlayByPlay.scala
package com.stellmangreene.pbprdf
import org.joda.time.DateTime
import org.eclipse.rdf4j.model.IRI
import org.eclipse.rdf4j.model.vocabulary.RDF
import org.eclipse.rdf4j.model.vocabulary.RDFS
import org.eclipse.rdf4j.repository.Repository
import com.stellmangreene.pbprdf.model.EntityIriFactory
import com.stellmangreene.pbprdf.model.Ontology
import com.stellmangreene.pbprdf.plays.EnterPlay
import com.stellmangreene.pbprdf.util.RdfOperations._
import better.files._
import com.typesafe.scalalogging.LazyLogging
import org.joda.time.format.ISODateTimeFormat
import org.joda.time.format.DateTimeFormat
import javax.xml.datatype.DatatypeFactory
//TODO: Add triples for the box score, test against official box scores
//TODO: Add triples for the players on the court for each possession
/**
* Play by play that can generate RDF and contents of a text file
*
* @author andrewstellman
*/
abstract class PlayByPlay extends LazyLogging {
/** Events from the play-by-play */
val events: Seq[Event]
/** IRI of this game */
val gameIri: IRI
/** Name of the home team */
val homeTeam: String
/** Final score for the home team */
val homeScore: String
/** Name of the away team */
val awayTeam: String
/** Final score for the away team */
val awayScore: String
/** Game location */
val gameLocation: Option[String]
/** Game time */
val gameTime: DateTime
/** Game source (eg. filename) */
val gameSource: String
/** Game period information */
val gamePeriodInfo: GamePeriodInfo
/** returns the league (e.g. Some("WNBA")) based on GamePeriodInfo, None if unrecognized */
def league = {
gamePeriodInfo match {
case GamePeriodInfo.WNBAPeriodInfo => Some("WNBA")
case GamePeriodInfo.NBAPeriodInfo => Some("NBA")
case GamePeriodInfo.NCAAWPeriodInfo => Some("NCAAW")
case GamePeriodInfo.NCAAMPeriodInfo => Some("NCAAM")
case _ => {
logger.warn("Unrecognized league")
None
}
}
}
/**
* Add the events to an RDF repository
*
* @param rep
* rdf4j repository to add the events to
*/
def addRdf(rep: Repository) = {
rep.addTriple(gameIri, RDF.TYPE, Ontology.GAME)
gameLocation.foreach(location =>
rep.addTriple(gameIri, Ontology.GAME_LOCATION, rep.getValueFactory.createLiteral(location)))
rep.addTriple(gameIri, RDFS.LABEL, rep.getValueFactory.createLiteral(this.toString))
val gregorianGameTime = DatatypeFactory.newInstance().newXMLGregorianCalendar(gameTime.toGregorianCalendar())
rep.addTriple(gameIri, Ontology.GAME_TIME, rep.getValueFactory.createLiteral(gregorianGameTime))
events.foreach(_.addRdf(rep))
Event.addPreviousAndNextTriples(rep, events)
addRosterBnodes(rep)
}
/**
* Use the "player enters" events to build the home and away team rosters and
* add a bnode for each roster
*
* @param rep
* rdf4j repository to add the events to
*/
protected def addRosterBnodes(rep: Repository) = {
val homeTeamRosterBnode = rep.getValueFactory.createBNode
val awayTeamRosterBnode = rep.getValueFactory.createBNode
rep.addTriple(EntityIriFactory.getTeamIri(homeTeam), RDF.TYPE, Ontology.TEAM)
rep.addTriple(gameIri, Ontology.HOME_TEAM, EntityIriFactory.getTeamIri(homeTeam))
rep.addTriple(gameIri, Ontology.HAS_HOME_TEAM_ROSTER, homeTeamRosterBnode)
rep.addTriple(homeTeamRosterBnode, RDF.TYPE, Ontology.ROSTER)
rep.addTriple(homeTeamRosterBnode, Ontology.ROSTER_TEAM, EntityIriFactory.getTeamIri(homeTeam))
rep.addTriple(homeTeamRosterBnode, RDFS.LABEL, rep.getValueFactory.createLiteral(homeTeam))
rep.addTriple(EntityIriFactory.getTeamIri(awayTeam), RDF.TYPE, Ontology.TEAM)
rep.addTriple(gameIri, Ontology.AWAY_TEAM, EntityIriFactory.getTeamIri(awayTeam))
rep.addTriple(gameIri, Ontology.HAS_AWAY_TEAM_ROSTER, awayTeamRosterBnode)
rep.addTriple(awayTeamRosterBnode, RDF.TYPE, Ontology.ROSTER)
rep.addTriple(awayTeamRosterBnode, Ontology.ROSTER_TEAM, EntityIriFactory.getTeamIri(awayTeam))
rep.addTriple(awayTeamRosterBnode, RDFS.LABEL, rep.getValueFactory.createLiteral(awayTeam))
val playerTeamMap: Map[String, String] = events
.filter(_.isInstanceOf[EnterPlay])
.map(_.asInstanceOf[EnterPlay])
.filter(_.playerEntering.isDefined)
.map(enterPlay => enterPlay.playerEntering.get -> enterPlay.getTeam)
.toMap
val teams = playerTeamMap.values.toSeq.distinct
if (teams.size != 2)
logger.warn(s"Found entry plays with invalid number of teams ${teams.size} for game <${gameIri}> in ${gameSource}")
val players = playerTeamMap.keys.toSeq.distinct
players.foreach(player => {
rep.addTriple(EntityIriFactory.getPlayerIri(player), RDFS.LABEL, rep.getValueFactory.createLiteral(player.trim))
val playerTeam = playerTeamMap.get(player).get
val playerIri = EntityIriFactory.getPlayerIri(player)
rep.addTriple(playerIri, RDF.TYPE, Ontology.PLAYER)
if (playerTeam == homeTeam) {
rep.addTriple(homeTeamRosterBnode, Ontology.HAS_PLAYER, playerIri)
} else if (playerTeam == awayTeam) {
rep.addTriple(awayTeamRosterBnode, Ontology.HAS_PLAYER, playerIri)
} else {
logger.warn(s"Entry plays contain team ${playerTeam} that does match home team ${homeTeam} or away team ${awayTeam} in ${gameSource}")
}
})
}
/**
* returns the contents of a text file representation of this play-by-play, or None if the play can't be rendered correctly
*/
def textFileContents: Option[Seq[String]] = {
val header = Seq(
toString,
s"${gameLocation.getOrElse("Unknown Location")}\t${ISODateTimeFormat.dateTime().print(gameTime)}")
val eventLines = events.map(_.getText)
Some(header ++ eventLines)
}
override def toString: String = {
val fmt = DateTimeFormat.forPattern("YYYY-MM-dd")
val s = s"${awayTeam} (${awayScore}) at ${homeTeam} (${homeScore}) on ${fmt.print(gameTime)}"
if (events.isEmpty) s"Empty Game: $s"
else {
s"${league.getOrElse("Unrecognized league")} game: $s - ${events.size} events"
}
}
}
|
Nyior/django-rest-paystack
|
paystack/serializers/__init__.py
|
from .customer import *
from .transaction import *
|
Nik6198/Data-Structures
|
sorting/heap.cpp
|
<gh_stars>0
qxc
#include<iostream>
using namespace std;
int main()
{
int n;
int v,temp,i;
cout<<"How many elements you want in array:";
cin>>n;
int a[n+1];
cout<<"Enter array elements:";
for(i=1;i<=n;i++)
{
cin>>a[i];
}
cout<<"sorted array elements are:";
for(i=1;i<=n;i++)
{
v=n;
//create min heap
while(v!=0)
{
i=v/2;
if(a[i]>a[v])
{
temp=a[i];
a[i]=a[v];
a[v]=temp;
}
v--;
}
//swap root with last element
temp=a[0];
a[0]=a[n];
a[n]=temp;
cout<<a[n]<<" ";
n--;
}
return 0;
}
|
GabrielePrestifilippo/EST-WA-Javascript
|
src/worldwind_old copy/navigate/Navigator.js
|
<reponame>GabrielePrestifilippo/EST-WA-Javascript<filename>src/worldwind_old copy/navigate/Navigator.js
define([
'../error/ArgumentError',
'./Camera',
'../util/Logger',
'../geom/Matrix',
'./NavigatorState'
], function (ArgumentError,
Camera,
Logger,
Matrix,
NavigatorState) {
/**
* Current state of the eye point in the same form as for Camera.
* @param worldWindow {WorldWindow} World Window to which this navigator is associated.
* @constructor
*/
var Navigator = function (worldWindow) {
this.scratchCamera = new Camera();
/**
* The {@link WorldWindow} associated with this navigator.
* @type {WorldWindow}
* @readonly
*/
this.worldWindow = worldWindow;
/**
* Field of view of the camera in degrees.
* @type {number}
*/
this.fieldOfView = 45;
/**
* This navigator's heading, in degrees clockwise from north.
* @type {Number}
* @default 0
*/
this.heading = 0;
/**
* This navigator's tilt, in degrees.
* @type {Number}
* @default 0
*/
this.tilt = 0;
/**
* Starting latitude of the navigator
* @type {Number}
*/
this.latitude = 30;
/**
* Starting longitude of the navigator
* @type {Number}
*/
this.longitude = -110;
/**
* Starting altitude of the navigator
* @type {Number}
*/
this.altitude = 10000000;
/**
* This navigator's roll, in degrees.
* @type {Number}
* @default 0
*/
this.roll = 0;
};
Object.defineProperties(Navigator.prototype, {
/**
* Latitude of the virtual camera. Degrees north or south of the Equator (0 degrees). Values range from -90
* degrees to 90 degrees.
* @memberof Navigator.prototype
* @readonly
* @type {Number}
*/
latitude: {
get: function () {
return this._latitude;
},
set: function (latitude) {
this._latitude = latitude;
}
},
/**
* Longitude of the virtual camera (eye point). Angular distance in degrees, relative to the Prime Meridian.
* Values west of the Meridian range from +-180 to 0 degrees. Values east of the Meridian range from 0
* to 180 degrees.
* @memberof Navigator.prototype
* @readonly
* @type {Number}
*/
longitude: {
get: function () {
return this._longitude;
},
set: function (longitude) {
this._longitude = longitude;
}
},
/**
* Distance of the camera from the earth's surface, in meters. Interpreted according to the Camera's
* <altitudeMode> or <gx:altitudeMode>.
* @memberOf Navigator.prototype
* @readonly
* @type {Number}
*/
altitude: {
get: function () {
return this._altitude;
},
set: function (altitude) {
this._altitude = altitude;
}
},
/**
* Direction (azimuth) of the camera, in degrees. Default=0 (true North). (See diagram.) Values range from
* 0 to 360 degrees.
* @memberof Camera.prototype
* @readonly
* @type {Number}
*/
heading: {
get: function () {
return this._heading;
},
set: function (heading) {
this._heading = heading;
}
},
/**
* Rotation, in degrees, of the camera around the X axis. A value of 0 indicates that the view is aimed
* straight down toward the earth (the most common case). A value for 90 for <tilt> indicates that the
* view
* is aimed toward the horizon. Values greater than 90 indicate that the view is pointed up into the sky.
* Values for <tilt> are clamped at +180 degrees.
* @memberof Camera.prototype
* @readonly
* @type {Number}
*/
tilt: {
get: function () {
return this._tilt;
},
set: function (tilt) {
this._tilt = tilt;
}
},
/**
* Rotation, in degrees, of the camera around the Z axis. Values range from -180 to +180 degrees.
* @memberof Camera.prototype
* @readonly
* @type {String}
*/
roll: {
get: function () {
return this._roll;
},
set: function (roll) {
this._roll = roll;
}
}
});
/**
* It retrieves properties of this navigator in the form of camera.
* @param globe {Globe} Unused here
* @param result {Camera} Camera which will be returned with current properties of navigator imprinted on it.
* @returns {Camera} result
*/
Navigator.prototype.getAsCamera = function (globe, result) {
if (!result) {
throw new ArgumentError(
Logger.logMessage(Logger.LEVEL_SEVERE, "Navigator", "getAsCamera", "missing result")
);
}
result.latitude = this.latitude;
result.longitude = this.longitude;
result.altitude = this.altitude;
result.altitudeMode = WorldWind.ABSOLUTE;
result.heading = this.heading;
result.tilt = this.tilt;
result.roll = this.roll;
return result;
};
/**
* It sets properties of this navigator based on the supplied Camera.
* @param globe {Globe} Actually unused in this computation.
* @param camera {Camera} Camera representation of the properties.
* @returns {Navigator} this
*/
Navigator.prototype.setAsCamera = function (globe, camera) {
if (!camera) {
throw new ArgumentError(
Logger.logMessage(Logger.LEVEL_SEVERE, "Navigator", "setAsCamera", "missing camera")
);
}
this.latitude = camera.latitude;
this.longitude = camera.longitude;
this.altitude = camera.altitude; // TODO: Interpret altitude based on the altitude mode.
this.heading = camera.heading;
this.tilt = camera.tilt;
this.roll = camera.roll;
return this;
};
/**
* It retrieve properties of this navigator in the form of LookAt
* @param globe {Globe} Globe used to do the computations.
* @param result {LookAt} Current navigator properties represented as LookAt
* @returns {LookAt} Updated result.
*/
Navigator.prototype.getAsLookAt = function (globe, result) {
if (!globe) {
throw new ArgumentError(
Logger.logMessage(Logger.LEVEL_SEVERE, "Navigator", "getAsLookAt", "missing globe")
);
}
if (!result) {
throw new ArgumentError(
Logger.logMessage(Logger.LEVEL_SEVERE, "Navigator", "getAsLookAt", "missing result")
);
}
this.getAsCamera(globe, this.scratchCamera); // get this navigator's properties as a Camera
globe.cameraToLookAt(this.scratchCamera, result); // convert the Camera to a LookAt
return result;
};
/**
* It sets properties of this navigator based on the information supplies as LookAt.
* @param globe {Globe} Globe used to do certain computations.
* @param lookAt {LookAt} LookAt representation of the properties.
* @returns {Navigator} this
*/
Navigator.prototype.setAsLookAt = function (globe, lookAt) {
if (!globe) {
throw new ArgumentError(
Logger.logMessage(Logger.LEVEL_SEVERE, "Navigator", "setAsLookAt", "missing globe")
);
}
if (!lookAt) {
throw new ArgumentError(
Logger.logMessage(Logger.LEVEL_SEVERE, "Navigator", "setAsLookAt", "missing lookAt")
);
}
globe.lookAtToCamera(lookAt, this.scratchCamera);
this.setAsCamera(globe, this.scratchCamera);
return this;
};
/**
* It transforms the properties of current navigator into the Navigator State. This means computing modelview matrix,
* projection matrix and others documented in the NavigatorState.
* @return {NavigatorState}
*/
Navigator.prototype.currentState = function(globe) {
var camera = this.getAsCamera(globe, this.scratchCamera),
modelview = Matrix.fromIdentity(),
projection = Matrix.fromIdentity(),
infiniteProjection = Matrix.fromIdentity(),
viewport = this.worldWindow.viewport;
this.worldWindow.computeViewingTransform(projection, modelview); // This dependency makes it very difficult to test.
infiniteProjection.setToInfiniteProjection(viewport.width, viewport.height, this.fieldOfView, 1.0);
infiniteProjection.multiplyMatrix(modelview);
return new NavigatorState(modelview, projection, infiniteProjection, viewport, camera.heading, camera.tilt);
};
return Navigator;
});
|
IKATS/ikats-datamodel
|
TemporalDataManagerWebApp/src/main/java/fr/cs/ikats/temporaldata/application/ApplicationLabels.java
|
/**
* Copyright 2018-2019 CS Systèmes d'Information
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fr.cs.ikats.temporaldata.application;
import java.text.MessageFormat;
import java.util.ResourceBundle;
/**
* Labels of the application
*/
public class ApplicationLabels {
static {
instance = new ApplicationLabels();
}
/**
* resource bundles
*/
private ResourceBundle bundle;
/**
* singleton instance
*/
private static ApplicationLabels instance;
/**
* private contructor
*/
private ApplicationLabels() {
bundle = ResourceBundle.getBundle("ApplicationLabels");
}
/**
* singleton instance getter
*
* @return instance
*/
public static ApplicationLabels getInstance() {
return instance;
}
/**
* get message with label key.
*
* @param label the label key
* @return the label
*/
public String getLabel(String label) {
return bundle.getString(label);
}
/**
* get Formated message with label key and params
*
* @param label key of the label
* @param params list of parameters to use to format the message
* @return the formatted string.
*/
public String getLabel(String label, Object... params) {
String pattern = bundle.getString(label);
return MessageFormat.format(pattern, params);
}
}
|
deeplearninc/relaax
|
relaax/common/algorithms/lib/episode.py
|
<filename>relaax/common/algorithms/lib/episode.py
from __future__ import absolute_import
from builtins import object
import numpy as np
from . import experience
class Episode(object):
def __init__(self, *args):
self.keys = args
self.experience = None
def begin(self):
assert self.experience is None
self.experience = experience.Experience(*self.keys)
def step(self, **kwargs):
assert self.experience is not None
self.experience.push_record(**kwargs)
def extend(self, **kwargs):
assert self.experience is not None
self.keys = self.experience.push_records(**kwargs)
def end(self):
assert self.experience is not None
experience = self.experience
self.experience = None
return experience
@property
def size(self):
return len(self.experience)
class ReplayBuffer(Episode):
def __init__(self, keys, buffer_size=1, seed=None):
super(ReplayBuffer, self).__init__(*keys)
assert buffer_size > 0, 'You have to provide positive buffer size'
self.buffer_size = buffer_size
if seed is not None:
np.random.seed(seed)
def step(self, **kwargs):
assert self.experience is not None
self.experience.push_record(**kwargs)
if self.size > self.buffer_size:
self.experience.del_record(self.size - self.buffer_size)
def sample(self, batch_size=1):
idx = np.random.choice(self.size, batch_size, replace=False)
return _fill(self.experience, idx)
class Dataset(Episode):
def __init__(self, keys, shuffle=True):
super(Dataset, self).__init__(*keys)
self.do_shuffle = shuffle
self._next_id = 0
def subset(self, elements=1, stochastic=True, keys=None):
assert self.size > 0, 'Source dataset is empty'
cur_keys = list(self.keys)
if keys is not None:
cur_keys = keys
d = Dataset(cur_keys, shuffle=stochastic)
d.begin()
# elements could be
# - integer: [:elements] or random subset with the same number of elements if stochastic=True
# - tuple of integers: elements=(begin_index, end_index)
# - 1-D numpy array with indices: elements=array([1, 4, 2, 0, 3])
d.experience._lists = _fill(self.experience, elements, keys=cur_keys)
return d
def shuffle(self):
indices = np.random.choice(self.size, self.size, replace=False)
self.experience._lists = _fill(self.experience, indices)
self._next_id = 0
def next_batch(self, batch_size):
if self._next_id >= self.size and self.do_shuffle:
self.shuffle()
cur_id = self._next_id
cur_batch_size = min(batch_size, self.size - self._next_id)
self._next_id += cur_batch_size
return _fill(self.experience, (cur_id, cur_id+cur_batch_size))
def iterate_once(self, batch_size):
if self.do_shuffle:
self.shuffle()
while True:
yield self.next_batch(batch_size)
if self._next_id >= self.size:
break
self._next_id = 0
def _fill(src, idx, keys=None):
if keys is None:
dst = {k: [] for k in src._lists}
else:
dst = {k: [] for k in keys}
if type(idx) is int:
assert idx <= len(src), 'Requesting size of the new data is larger than the current one'
for k, v in dst.items():
v.extend(src[k][:idx])
elif type(idx) is tuple:
assert idx[1] <= len(src), 'Requesting size of the new data is larger than the current one'
for k, v in dst.items():
v.extend(src[k][idx[0]:idx[1]])
else:
assert len(idx) <= len(src), 'Requesting size of the new data is larger than the current one'
for k, v in dst.items():
v.extend(np.asarray(src[k])[idx])
return dst
|
mateuszrzeszutek/tracing-examples
|
signalfx-tracing/signalfx-nodejs-tracing/mysql/deedScheduler/router.js
|
const tracer = require('./tracer');
const BodyParser = require('koa-bodyparser');
const Router = require('koa-router');
const scheduler = require('./scheduler');
const router = new Router();
async function addItem(ctx) {
const span = tracer.scope().active();
const deed = ctx.request.body.deed;
const note = ctx.request.body.note;
const day = ctx.request.body.day;
span.setTag('added', deed);
await scheduler.addItem(deed, note, day)
.then((response) => {
ctx.body = {
message: `You just added '${deed}' on ${day} to your deedScheduler!
Number of rows added: ${response}`,
};
})
.catch((error) => {
ctx.body = {message: `ERROR: ${error}`};
});
}
async function deleteDeed(ctx) {
const span = tracer.scope().active();
const day = ctx.query.day;
const deed = ctx.params.deed;
if (day === '__ALL__') {
span.setTag(deed, 'all');
} else {
span.setTag(deed, day);
}
await scheduler.deleteDeed(deed, day)
.then((response) => {
if (response > 0) {
if (day === '__ALL__') {
ctx.body = {message: `'${deed}' is no longer in your deedScheduler!
Number of rows affected: ${response}`};
} else {
ctx.body = {message:
`${deed} on ${day} is no longer in your deedScheduler!
Number of rows affected: ${response}`};
}
} else {
if (day === '__ALL__') {
ctx.body = {
message:
`You have no ${deed} in your deedScheduler.`,
};
} else {
ctx.body = {
message:
`You have no ${deed} on ${day} in your deedScheduler.`,
};
}
}
})
.catch((error) => {
ctx.body = {message: `ERROR: ${error}`};
});
}
async function listDeeds(ctx) {
const span = tracer.scope().active();
const day = ctx.query.day;
span.setTag('deedsList', true);
await scheduler.listDeeds(day)
.then((response) => {
if (response[0]) {
ctx.body = response;
} else {
ctx.body = {message: `No entry was found to match your query`};
}
})
.catch((error) => {
ctx.body = {message: `ERROR: ${error}`};
});
}
async function updateDeed(ctx) {
const span = tracer.scope().active();
const deed = ctx.params.deed;
const day = ctx.query.day;
const status = ctx.request.body.status;
await scheduler.updateDeed(deed, day, status)
.then((response) => {
if (response.affectedRows > 0) {
span.setTag('updated_deed', deed);
ctx.body = {message:
`You have successfully updated ${deed}.
Number of rows changed: ${response.affectedRows}`};
} else {
ctx.body = {message:
`No entry matching ${deed} on ${day} was found to update`};
}
span.setTag('rows affected', response.affectedRows);
})
.catch((error) => {
ctx.body = {message: `ERROR: ${error}`};
});
}
async function viewDeed(ctx) {
const span = tracer.scope().active();
const deed = ctx.params.deed;
const day = ctx.query.day;
const status = ctx.query.status;
await scheduler.viewDeed(deed, day, status)
.then((response) => {
if (response[0]) {
ctx.body = response;
span.setTag('viewed', deed);
} else {
ctx.body = {message: `No entry was found to match your query`};
}
})
.catch((error) => {
ctx.body = {message: `ERROR: ${error}`};
});
}
router
.use(BodyParser())
.get('/deeds', listDeeds)
.get('/deeds/deed/:deed', viewDeed)
.post('/add', addItem)
.put('/deeds/:deed', updateDeed)
.delete('/deeds/:deed', deleteDeed);
module.exports = router;
|
evmanio/evman
|
app/services/form_services/form_submission_form.rb
|
module FormServices
class FormSubmissionForm
include ActiveModel::Model
def model_name
ActiveModel::Name.new(self, nil, "FormSubmissionForm")
end
delegate :persisted, :form_id, :associated_object_id, :associated_object_type, to: :submission
attr_reader :submission, :form, :params, :current_user
validate do |form|
fields = form.fields
fields.each do |field|
field.validate
form.errors.add :base, :question_blank, question: field.label unless field.valid?
end
submission = form.submission
form.errors.add :base, :form_structure_changed if submission.form_structure_changed?
submission.valid?
submission.errors.full_messages.each do |message|
form.errors.add :base, message
end
end
def initialize submission, current_user = nil, params={}
@submission, @current_user = submission, current_user
@form = submission.form
@params = params || {}
end
def fields
@fields ||= form.data['fields'].collect do |f|
Field.new(f, submission, params)
end
end
def serialize_fields
fields.collect do |field|
{
id: field.id,
value: field.value,
label: field.label,
type: field.type
}
end
end
def title
@form.name
end
def submit
submission.data = serialize_fields
submission.submitted_by = current_user
submission.form_structure_hash = form.structure_hash if submission.new_record?
return false if invalid?
submission.save
end
def to_model
submission
end
class Field
attr_reader :definition, :value, :submission
def initialize definition, submission, params
@submission = submission
@definition = definition
@value = (params[name] || {})['value']
@value ||= submission_value(id)
@value.reject!(&:blank?) if @value.is_a? Array
@valid = true
end
def name
"field_#{id}"
end
def id
definition['id']
end
def type
definition['type'] || 'text'
end
def label
definition['label']
end
def choices
return [] unless definition['choices']
return [] if definition['choices'].empty?
@choices||= definition['choices'].map{|choice| Choice.new(choice) }
end
def required?
definition['required']
end
def edit_partial_path
"forms/fields/edit/#{type.underscore}"
end
def validate
return true unless required?
@valid = false if value.blank?
end
def valid?
@valid
end
private
def submission_value(id)
return nil unless submission.data
field = submission.data.find{ |f| f['id'] == id }
return nil unless field
field['value']
end
end
Choice = Struct.new(:name)
end
end
|
ShurtanMSC/ShurtanMSCFront
|
src/components/Shurtan/TableGrafic/TableGraficModal.js
|
import React, {useRef, useEffect, useCallback} from 'react'
import { motion, AnimatePresence } from 'framer-motion'
import {
ModalDivShurtan,
H2, H2Div, Table, Tr, Th, TdFirst, Td, InputModal, SaveDiv, SaveBtnModal, CloseBtnModal,
} from '../../../styled';
import axios from 'axios';
import {configHeader} from "../../../utills/congifHeader";
import {BASE_URL} from "../../../utills/constant"
const backdrop = {
visible: { opacity: 1 },
hidden: { opacity: 0 }
}
const modalGrafic = {
hidden: {
y: "-100vh",
opacity: 0
},
visible: {
y: "200px",
opacity: 1,
transition: { delay: 0.5 }
}
}
const TableGraficModal = ({showTableGraficModal, setShowTableGraficModal,setData, data}) => {
const findMonthName=(month)=>{
if (!month) {
throw new Error("Season is not defined")
}
switch (month) {
case "JANUARY": return "Январь"
case "FEBRUARY": return "Февраль"
case "MARCH": return "Март"
case "APRIL": return "Апрель"
case "MAY": return "Май"
case "JUNE": return "Июнь"
case "JULY": return "Июль"
case "AUGUST": return "Август"
case "SEPTEMBER": return "Сентябрь"
case "OCTOBER": return "Октябрь"
case "NOVEMBER": return "Ноябрь"
case "DECEMBER": return "Декабр"
default:return ""
}
}
const count=[1,2,3,4,5,6,7,8,9,10,11,12]
// Edit
const findMonth=(month)=>{
if (!month) {
throw new Error("Season is not defined")
}
switch (month) {
case 1: return "JANUARY"
case 2: return "FEBRUARY"
case 3: return "MARCH"
case 4: return "APRIL"
case 5: return "MAY"
case 6: return "JUNE"
case 7: return "JULY"
case 8: return "AUGUST"
case 9: return "SEPTEMBER"
case 10: return "OCTOBER"
case 11: return "NOVEMBER"
case 12: return "DECEMBER"
default:return ""
}
}
const handlerSubmit = useCallback((e) => {
e.preventDefault();
const postData=[];
for (let i = 1; i <= 12; i++) {
postData.push({
amount:document.getElementById("l"+i).value,
mining_system_id:1,
month:findMonth(i),
year:new Date().getFullYear()-1,
})
postData.push({
amount:document.getElementById("t"+i).value,
mining_system_id:1,
month:findMonth(i),
year:new Date().getFullYear(),
})
}
if (postData.length===24){
document.getElementById('save').disabled=true;
axios.post(BASE_URL + '/api/forecast/gas/add/all',
postData,configHeader
).then(res => {
setData(res.data.object)
}).catch(err => {console.log(err);
});
}
setShowTableGraficModal(prev => !prev);
},[setData,setShowTableGraficModal])
//Modal
const modalRef = useRef();
const closeModal = e => {
if( modalRef.current === e.target ) {
setShowTableGraficModal(false);
}
};
const keyPress = useCallback (e => {
if(e.key === 'Escape' && showTableGraficModal){
setShowTableGraficModal(false)
}
}, [setShowTableGraficModal, showTableGraficModal]);
useEffect(()=>{
document.addEventListener('keydown', keyPress);
return() => document.removeEventListener('keydown', keyPress);
}, [keyPress,handlerSubmit]);
return (
<AnimatePresence>
{ showTableGraficModal && (
<motion.div className="backdrop"
variants={backdrop}
initial="hidden"
animate="visible"
ref={modalRef}
onClick={closeModal}
>
<motion.div className="modalGrafic"
variants={modalGrafic}
>
<ModalDivShurtan>
<H2Div>
<H2>Прогноз добычи</H2>
</H2Div>
<form onSubmit={handlerSubmit}>
<Table>
<thead>
<Tr>
<Th style={{padding:'1rem', width:'35%'}}>Месяц</Th>
<Th>{new Date().getFullYear()-1}</Th>
<Th>{new Date().getFullYear()}</Th>
</Tr>
</thead>
<tbody>
{data.length===24?
count.map(number=>
<Tr key={number}>
<TdFirst> <label htmlFor={"Year"+number}>{findMonthName(data[number*2-2].month)}</label> </TdFirst>
<Td> <InputModal min="1" id={"l"+number} step="any" type="number" defaultValue={data[number*2-2].amount} name={"Year"+number} required /> </Td>
<Td> <InputModal min="1" id={"t"+number} step="any" type="number" defaultValue={data[number*2-1].amount} name={"Year"+number} required /> </Td>
</Tr>
)
:
count.map(number=>
<Tr key={number}>
<TdFirst> <label htmlFor={"Year"+number}>{findMonthName(findMonth(number))}</label> </TdFirst>
<Td> <InputModal min="1" step="any" id={"l"+number} type="number" name={"Year"+number} required /> </Td>
<Td> <InputModal min="1" step="any" id={"t"+number} type="number" name={"Year"+number} required /> </Td>
</Tr>
)
}
</tbody>
</Table>
<SaveDiv>
<div style={{marginLeft:'auto'}}>
<SaveBtnModal id={"save"}>Сохранит
</SaveBtnModal>
<CloseBtnModal
onClick={()=> setShowTableGraficModal(prev => !prev)}>Закрыт
</CloseBtnModal>
</div>
</SaveDiv>
</form>
</ModalDivShurtan>
</motion.div>
</motion.div>
) }
</AnimatePresence>
)
}
export default TableGraficModal
|
laudarch/GTAS
|
gtas-parent/gtas-rulesvc/src/main/java/gov/gtas/rule/RuleService.java
|
/*
* All GTAS code is Copyright 2016, The Department of Homeland Security (DHS), U.S. Customs and Border Protection (CBP).
*
* Please see LICENSE.txt for details.
*/
package gov.gtas.rule;
import gov.gtas.bo.RuleServiceRequest;
import gov.gtas.bo.RuleServiceResult;
import java.util.Map;
/**
* The interface for the Rule Engine.
*/
public interface RuleService {
/**
* Execute the rule engine on the specified request for the specified rule file.
* (Note: the file should be on the class path.)
*
* @param ruleFilePath
* the path name of the rule file to invoke the engine on.
* @param req
* the rule request message.
* @return the result of the rule engine invocation.
*/
RuleServiceResult invokeAdhocRules(String ruleFilePath, RuleServiceRequest req);
/**
* Execute the rule engine on the specified request for the default
* KnowledgeBase.
*
* @param req
* the rule request message.
* @param kbName
* The name of the knowledge base to use for executing the request.
* @return the result of the rule engine invocation.
*/
RuleServiceResult invokeRuleEngine(RuleServiceRequest req, String kbName, Map<String, KIEAndLastUpdate> rules);
}
|
city-mobil/go-mymy
|
pkg/mymy/handler_test.go
|
package mymy
import (
"testing"
"github.com/stretchr/testify/assert"
)
var (
tSource = SourceInfo{
Schema: "city",
Table: "clients",
PKs: []Column{
{
Index: 0,
Name: "id",
Type: TypeNumber,
IsAuto: true,
IsUnsigned: true,
},
},
Cols: []Column{
{Index: 1, Name: "name", Type: TypeString},
{Index: 2, Name: "email", Type: TypeString},
{Index: 3, Name: "position", Type: TypeString},
},
}
)
func TestBaseEventHandler_OnRows(t *testing.T) {
type fields struct {
table string
sync []string
}
type args struct {
e *RowsEvent
}
tests := []struct {
name string
fields fields
args args
want []*Query
wantErr bool
}{
{
name: "UnknownAction",
fields: fields{
table: "users",
sync: []string{"name", "email"},
},
args: args{
e: &RowsEvent{
Action: Action("upsert"),
Source: tSource,
Rows: [][]interface{}{
{1, "bob", "<EMAIL>", "CTO"},
},
},
},
wantErr: true,
},
{
name: "OnInsert",
fields: fields{
table: "users",
sync: []string{"name", "email"},
},
args: args{
e: &RowsEvent{
Action: ActionInsert,
Source: tSource,
Rows: [][]interface{}{
{1, "bob", "<EMAIL>", "CTO"},
{2, "alice", "<EMAIL>", "CEO"},
},
},
},
want: []*Query{
{
Action: ActionInsert,
Table: "users",
Values: []QueryArg{
{Field: "id", Value: 1},
{Field: "name", Value: "bob"},
{Field: "email", Value: "<EMAIL>"},
},
},
{
Action: ActionInsert,
Table: "users",
Values: []QueryArg{
{Field: "id", Value: 2},
{Field: "name", Value: "alice"},
{Field: "email", Value: "<EMAIL>"},
},
},
},
wantErr: false,
},
{
name: "OnUpdate",
fields: fields{
table: "users",
sync: []string{"name", "email"},
},
args: args{
e: &RowsEvent{
Action: ActionUpdate,
Source: tSource,
Rows: [][]interface{}{
{1, "bob", "<EMAIL>", "CTO"},
{1, "alice", "<EMAIL>", "CEO"},
{2, "john", "<EMAIL>", "Lifter"},
{3, "john", "<EMAIL>", "CEO"},
},
},
},
want: []*Query{
{
Action: ActionUpdate,
Table: "users",
Values: []QueryArg{
{Field: "id", Value: 1},
{Field: "name", Value: "alice"},
{Field: "email", Value: "<EMAIL>"},
},
Where: []QueryArg{
{Field: "id", Value: 1},
},
},
{
Action: ActionUpdate,
Table: "users",
Values: []QueryArg{
{Field: "id", Value: 3},
{Field: "name", Value: "john"},
{Field: "email", Value: "<EMAIL>"},
},
Where: []QueryArg{
{Field: "id", Value: 2},
},
},
},
wantErr: false,
},
{
name: "OnDelete",
fields: fields{
table: "users",
sync: []string{"name", "email"},
},
args: args{
e: &RowsEvent{
Action: ActionDelete,
Source: tSource,
Rows: [][]interface{}{
{1, "bob", "<EMAIL>", "CTO"},
{2, "alice", "<EMAIL>", "CEO"},
},
},
},
want: []*Query{
{
Action: ActionDelete,
Table: "users",
Where: []QueryArg{
{Field: "id", Value: 1},
},
},
{
Action: ActionDelete,
Table: "users",
Where: []QueryArg{
{Field: "id", Value: 2},
},
},
},
wantErr: false,
},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
eH := NewBaseEventHandler(tt.fields.table)
eH.SyncOnly(tt.fields.sync)
got, err := eH.OnRows(tt.args.e)
if tt.wantErr {
assert.Error(t, err)
assert.Nil(t, got)
} else {
assert.EqualValues(t, tt.want, got)
}
})
}
}
|
opensag/atom-openwrt
|
package/wav6/iwlwav-dev/drivers/net/wireless/intel/iwlwav/tools/mtlkroot/linux/nlmsgs.c
|
<gh_stars>1-10
/******************************************************************************
Copyright (c) 2012
Lantiq Deutschland GmbH
For licensing information, see the file 'LICENSE' in the root folder of
this software module.
******************************************************************************/
/*
* $Id$
*
*
*
* Subsystem providing communication with userspace over
* NETLINK_USERSOCK netlink protocol.
*
*/
#include "mtlkinc.h"
#include <linux/module.h>
#include <linux/skbuff.h>
#include <linux/socket.h>
#include <net/sock.h>
#include <linux/netlink.h>
#include <net/genetlink.h>
#include "nlmsgs.h"
#include "bt_acs.h"
#define LOG_LOCAL_GID GID_NLMSGS
#define LOG_LOCAL_FID 1
/* Configuration structure */
static wave_nl_socket_t wave_nl_socket;
struct sock *bt_acs_nl_sock = NULL;
DEFINE_MUTEX(bt_acs_nl_mutex);
static void bt_acs_nl_input (struct sk_buff *inskb) {}
/* TODO IWLWAV: Should add support for this in backports and remove IFDEFs */
#if LINUX_VERSION_CODE >= KERNEL_VERSION(3,19,0)
static int bt_acs_nl_bind (struct net *net, int group) { return 0; }
#elif LINUX_VERSION_CODE >= KERNEL_VERSION(3,16,0)
static int bt_acs_nl_bind (int group) { return 0; }
#else
static void bt_acs_nl_bind (int group) {}
#endif
/* Module parameter */
extern int mtlk_genl_family_id;
static int _wave_nl_parse_brd_msg_cb (struct sk_buff *skb2, struct genl_info *info);
static struct genl_multicast_group mtlk_mcgrps[] = {
[NETLINK_SIMPLE_CONFIG_GROUP] = { .name = MTLK_NETLINK_SIMPLE_CONFIG_GROUP_NAME, },
[NETLINK_IRBM_GROUP] = { .name = MTLK_NETLINK_IRBM_GROUP_NAME, },
[NETLINK_LOGSERVER_GROUP] = { .name = MTLK_NETLINK_LOGSERVER_GROUP_NAME, },
[NETLINK_FAPI_GROUP] = { .name = WAVE_NETLINK_FAPI_GROUP_NAME, },
[NETLINK_RTLOG_APPS_GROUP] = { .name = MTLK_NETLINK_RTLOG_APPS_GROUP_NAME, },
};
/* attribute policy structure */
static struct nla_policy wave_genl_policy[MTLK_GENL_ATTR_MAX + 1] =
{
[MTLK_GENL_ATTR_EVENT] = { .type = NLA_UNSPEC },
};
/* command structure */
static struct genl_ops wave_genl_ops[] = {
{
.cmd = MTLK_GENL_CMD_EVENT,
.flags = 0,
.policy = wave_genl_policy,
.doit = _wave_nl_parse_brd_msg_cb,
.dumpit = NULL,
}
};
/* family structure */
static struct genl_family mtlk_genl_family = {
.name = MTLK_GENL_FAMILY_NAME,
.version = MTLK_GENL_FAMILY_VERSION,
.maxattr = MTLK_GENL_ATTR_MAX,
.mcgrps = mtlk_mcgrps,
.n_mcgrps = ARRAY_SIZE(mtlk_mcgrps),
.ops = wave_genl_ops,
.n_ops = ARRAY_SIZE(wave_genl_ops),
};
int mtlk_nl_bt_acs_send_brd_msg(void *data, int length)
{
struct sk_buff *skb = NULL;
struct nlmsghdr *nlh;
/* no socket - no messages */
if (!bt_acs_nl_sock)
return MTLK_ERR_UNKNOWN;
skb = alloc_skb(NLMSG_SPACE(length), GFP_ATOMIC);
if (skb == NULL)
return MTLK_ERR_NO_MEM;
nlh = (struct nlmsghdr*) skb->data;
nlh->nlmsg_len = NLMSG_SPACE(length);
nlh->nlmsg_pid = 0;
nlh->nlmsg_flags = 0;
/* fill the message header */
skb_put(skb, NLMSG_SPACE(length));
wave_memcpy(NLMSG_DATA(nlh), length, data, length);
NETLINK_CB_PORTID(skb) = 0; /* from kernel */
if (netlink_broadcast(bt_acs_nl_sock, skb, 0, NETLINK_BT_ACS_GROUP, GFP_ATOMIC))
return MTLK_ERR_UNKNOWN;
return MTLK_ERR_OK;
}
int mtlk_nl_bt_acs_init(void)
{
struct netlink_kernel_cfg bt_acs_nl_cfg;
memset(&bt_acs_nl_cfg, 0, sizeof(bt_acs_nl_cfg));
bt_acs_nl_cfg.groups = NETLINK_BT_ACS_GROUP_LAST;
bt_acs_nl_cfg.flags = 0;
bt_acs_nl_cfg.input = &bt_acs_nl_input;
bt_acs_nl_cfg.cb_mutex = &bt_acs_nl_mutex;
bt_acs_nl_cfg.bind = &bt_acs_nl_bind;
bt_acs_nl_sock = netlink_kernel_create(&init_net, NETLINK_BT_ACS, &bt_acs_nl_cfg);
if (!bt_acs_nl_sock) {
return MTLK_ERR_UNKNOWN;
}
return MTLK_ERR_OK;
}
void mtlk_nl_bt_acs_cleanup(void)
{
if(bt_acs_nl_sock){
sock_release(bt_acs_nl_sock->sk_socket);
}
}
BOOL wave_nl_is_active (void)
{
return wave_nl_socket.is_active;
}
int mtlk_nl_send_brd_msg(void *data, int length, gfp_t flags, u32 dst_group, u8 cmd)
{
struct sk_buff *skb;
struct nlattr *attr;
void *msg_header;
int size, genl_res, send_group;
int res = MTLK_ERR_UNKNOWN;
struct mtlk_nl_msghdr *mhdr;
int full_len = length + sizeof(*mhdr);
/* allocate memory */
size = nla_total_size(full_len);
skb = genlmsg_new(size, flags);
if (!skb)
return MTLK_ERR_NO_MEM;
/* add the genetlink message header */
msg_header = genlmsg_put(skb, 0, 0, &mtlk_genl_family, 0, MTLK_GENL_CMD_EVENT);
if (!msg_header)
goto out_free_skb;
/* fill the data */
attr = nla_reserve(skb, MTLK_GENL_ATTR_EVENT, full_len);
if (!attr)
goto out_free_skb;
mhdr = (struct mtlk_nl_msghdr*) (nla_data(attr));
wave_memcpy(mhdr->fingerprint, sizeof(mhdr->fingerprint), "mtlk", 4);
mhdr->proto_ver = MTLK_NL_PROTOCOL_VERSION;
mhdr->cmd_id = cmd;
mhdr->data_len = length;
wave_memcpy((char *)mhdr + sizeof(*mhdr), length, data, length);
/* send multicast genetlink message */
#if LINUX_VERSION_CODE < KERNEL_VERSION(4,0,0)
genl_res = genlmsg_end(skb, msg_header);
if (genl_res < 0)
goto out_free_skb;
#else
genlmsg_end(skb, msg_header);
#endif
send_group = dst_group;
genl_res = genlmsg_multicast(&mtlk_genl_family, skb, 0, send_group, flags);
if (genl_res) {
ILOG2_D("Error %d sending netlink message", genl_res);
return MTLK_ERR_UNKNOWN;
} else
return MTLK_ERR_OK;
out_free_skb:
nlmsg_free(skb);
return res;
}
int wave_nl_update_receive_callback (wave_nl_callback_t rx_clb_fn, void *rx_clb_fn_params, u8 cmd)
{
wave_nl_socket_t *nl_socket = &wave_nl_socket;
if (!nl_socket->is_active)
return MTLK_ERR_UNKNOWN;
if (rx_clb_fn && nl_socket->receive_callback[cmd])
return MTLK_ERR_ALREADY_EXISTS;
nl_socket->receive_callback_ctx[cmd] = rx_clb_fn_params;
nl_socket->receive_callback[cmd] = rx_clb_fn;
return MTLK_ERR_OK;
}
static void _wave_nl_parse_msg (struct mtlk_nl_msghdr *hdr)
{
const char mtlk_fingerprint[] = { 'm', 't', 'l', 'k' };
wave_nl_socket_t *nl_socket = &wave_nl_socket;
/* Ignore packets from unknown applications */
MTLK_ASSERT(sizeof(hdr->fingerprint) == sizeof(mtlk_fingerprint));
if (memcmp(hdr->fingerprint, mtlk_fingerprint, sizeof(mtlk_fingerprint))) {
return;
}
/* Ignore non-Recovery packets */
if ((hdr->cmd_id != NL_DRV_CMD_FAPI_NOTIFY) &&
(hdr->cmd_id != NL_DRV_CMD_RTLOG_NOTIFY))
{
return;
}
/* Check version of protocol */
if (hdr->proto_ver != MTLK_NL_PROTOCOL_VERSION) {
ELOG_DD("Wrong MTLK Protocol version for Netlink socket: got %d, expected %d.",
hdr->proto_ver, MTLK_NL_PROTOCOL_VERSION);
return;
}
/* Call RX-callback function if any */
if (nl_socket->receive_callback[hdr->cmd_id]) {
nl_socket->receive_callback[hdr->cmd_id](nl_socket->receive_callback_ctx[hdr->cmd_id], (void *)(hdr+1));
}
else {
ELOG_V("Fail to parse Notification message from application: RX-callback function not found.");
}
}
static int _wave_nl_parse_brd_msg_cb (struct sk_buff *skb2, struct genl_info *info)
{
struct nlattr *attr;
void *data;
if (info == NULL)
return 0;
attr = info->attrs[MTLK_GENL_ATTR_EVENT];
if (attr) {
data = nla_data(attr);
if (NULL == data) {
ELOG_V("Fail to receive Notification message from application: payload is absent.");
}
else {
_wave_nl_parse_msg((struct mtlk_nl_msghdr *)data);
}
}
else {
ELOG_D("Fail to receive Notification message from application: "
"no genl_info.attrs[%d].", MTLK_GENL_CMD_EVENT);
}
return 0;
}
int mtlk_nl_init (wave_nl_callback_t rx_clb_fn, void *rx_clb_fn_params, u8 cmd)
{
int result;
wave_nl_socket_t *nl_socket = &wave_nl_socket;
memset(nl_socket, 0, sizeof(*nl_socket));
nl_socket->receive_callback[cmd] = rx_clb_fn;
nl_socket->receive_callback_ctx[cmd] = rx_clb_fn_params;
if(mtlk_genl_family_id) {
mtlk_genl_family.id = mtlk_genl_family_id;
}
result = genl_register_family(&mtlk_genl_family);
if (result) {
mtlk_osal_emergency_print("Failed to register Netlink family: res=%d", result);
return MTLK_ERR_UNKNOWN;
}
/* Pass actual value back to module param
* to be readable via sysfs and iwpriv */
mtlk_genl_family_id = mtlk_genl_family.id;
if (mtlk_nl_bt_acs_init() != MTLK_ERR_OK) {
mtlk_osal_emergency_print("Failed to create Netlink socket for ASC-algo");
goto unreg_ops;
}
nl_socket->is_active = TRUE;
return MTLK_ERR_OK;
unreg_ops:
genl_unregister_family(&mtlk_genl_family);
memset(nl_socket, 0, sizeof(*nl_socket));
return MTLK_ERR_UNKNOWN;
}
void mtlk_nl_cleanup (void)
{
genl_unregister_family(&mtlk_genl_family);
memset(&wave_nl_socket, 0, sizeof(wave_nl_socket));
mtlk_nl_bt_acs_cleanup();
ILOG1_V("Netlink module cleanup done");
}
EXPORT_SYMBOL(wave_nl_is_active);
EXPORT_SYMBOL(wave_nl_update_receive_callback);
EXPORT_SYMBOL(mtlk_nl_send_brd_msg);
EXPORT_SYMBOL(mtlk_nl_bt_acs_send_brd_msg);
|
suveng/demo
|
spring/boot-stat-machine/src/main/java/my/suveng/statmachine/demo/own/IStat.java
|
<filename>spring/boot-stat-machine/src/main/java/my/suveng/statmachine/demo/own/IStat.java<gh_stars>1-10
package my.suveng.statmachine.demo.own;
/**
* 状态抽闲
*
* @author suwenguang
**/
public interface IStat {
/**
* 转化状态前的回调
*
* @author suwenguang
*/
void doBefore();
/**
* 转化到这个状态需要做的事情
*
* @author suwenguang
*/
void doAction();
/**
* 转化到这个状态 后要做的事情
*
* @author suwenguang
*/
void doAfter();
}
|
abdalla/docker-image-for-python-boto3-mysql
|
deps/unixODBC-2.3.4/Drivers/Postgre7.1/statement.h
|
/* File: statement.h
*
* Description: See "statement.c"
*
* Comments: See "notice.txt" for copyright and license information.
*
*/
#ifndef __STATEMENT_H__
#define __STATEMENT_H__
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "psqlodbc.h"
#include "bind.h"
#ifndef WIN32
#include "isql.h"
#else
#include <windows.h>
#include <sql.h>
#endif
#ifndef FALSE
#define FALSE (BOOL)0
#endif
#ifndef TRUE
#define TRUE (BOOL)1
#endif
typedef enum {
STMT_ALLOCATED, /* The statement handle is allocated, but not used so far */
STMT_READY, /* the statement is waiting to be executed */
STMT_PREMATURE, /* ODBC states that it is legal to call e.g. SQLDescribeCol before
a call to SQLExecute, but after SQLPrepare. To get all the necessary
information in such a case, we simply execute the query _before_ the
actual call to SQLExecute, so that statement is considered to be "premature".
*/
STMT_FINISHED, /* statement execution has finished */
STMT_EXECUTING /* statement execution is still going on */
} STMT_Status;
#define STMT_TRUNCATED -2
#define STMT_INFO_ONLY -1 /* not an error message, just a notification to be returned by SQLError */
#define STMT_OK 0 /* will be interpreted as "no error pending" */
#define STMT_EXEC_ERROR 1
#define STMT_STATUS_ERROR 2
#define STMT_SEQUENCE_ERROR 3
#define STMT_NO_MEMORY_ERROR 4
#define STMT_COLNUM_ERROR 5
#define STMT_NO_STMTSTRING 6
#define STMT_ERROR_TAKEN_FROM_BACKEND 7
#define STMT_INTERNAL_ERROR 8
#define STMT_STILL_EXECUTING 9
#define STMT_NOT_IMPLEMENTED_ERROR 10
#define STMT_BAD_PARAMETER_NUMBER_ERROR 11
#define STMT_OPTION_OUT_OF_RANGE_ERROR 12
#define STMT_INVALID_COLUMN_NUMBER_ERROR 13
#define STMT_RESTRICTED_DATA_TYPE_ERROR 14
#define STMT_INVALID_CURSOR_STATE_ERROR 15
#define STMT_OPTION_VALUE_CHANGED 16
#define STMT_CREATE_TABLE_ERROR 17
#define STMT_NO_CURSOR_NAME 18
#define STMT_INVALID_CURSOR_NAME 19
#define STMT_INVALID_ARGUMENT_NO 20
#define STMT_ROW_OUT_OF_RANGE 21
#define STMT_OPERATION_CANCELLED 22
#define STMT_INVALID_CURSOR_POSITION 23
#define STMT_VALUE_OUT_OF_RANGE 24
#define STMT_OPERATION_INVALID 25
#define STMT_PROGRAM_TYPE_OUT_OF_RANGE 26
#define STMT_BAD_ERROR 27
/* statement types */
enum {
STMT_TYPE_UNKNOWN = -2,
STMT_TYPE_OTHER = -1,
STMT_TYPE_SELECT = 0,
STMT_TYPE_INSERT,
STMT_TYPE_UPDATE,
STMT_TYPE_DELETE,
STMT_TYPE_CREATE,
STMT_TYPE_ALTER,
STMT_TYPE_DROP,
STMT_TYPE_GRANT,
STMT_TYPE_REVOKE
};
#define STMT_UPDATE(stmt) (stmt->statement_type > STMT_TYPE_SELECT)
/* Parsing status */
enum {
STMT_PARSE_NONE = 0,
STMT_PARSE_COMPLETE,
STMT_PARSE_INCOMPLETE,
STMT_PARSE_FATAL
};
/* Result style */
enum {
STMT_FETCH_NONE = 0,
STMT_FETCH_NORMAL,
STMT_FETCH_EXTENDED
};
typedef struct {
COL_INFO *col_info; /* cached SQLColumns info for this table */
char name[MAX_TABLE_LEN+1];
char alias[MAX_TABLE_LEN+1];
} TABLE_INFO;
typedef struct {
TABLE_INFO *ti; /* resolve to explicit table names */
int precision;
int display_size;
int length;
int type;
char nullable;
char func;
char expr;
char quote;
char dquote;
char numeric;
char dot[MAX_TABLE_LEN+1];
char name[MAX_COLUMN_LEN+1];
char alias[MAX_COLUMN_LEN+1];
} FIELD_INFO;
/******** Statement Handle ***********/
struct StatementClass_ {
ConnectionClass *hdbc; /* pointer to ConnectionClass this statement belongs to */
QResultClass *result; /* result of the current statement */
HSTMT FAR *phstmt;
StatementOptions options;
STMT_Status status;
char *__error_message;
int __error_number;
/* information on bindings */
BindInfoClass *bindings; /* array to store the binding information */
BindInfoClass bookmark;
int bindings_allocated;
/* information on statement parameters */
int parameters_allocated;
ParameterInfoClass *parameters;
Int4 currTuple; /* current absolute row number (GetData, SetPos, SQLFetch) */
int save_rowset_size; /* saved rowset size in case of change/FETCH_NEXT */
int rowset_start; /* start of rowset (an absolute row number) */
int bind_row; /* current offset for Multiple row/column binding */
int last_fetch_count; /* number of rows retrieved in last fetch/extended fetch */
int current_col; /* current column for GetData -- used to handle multiple calls */
int lobj_fd; /* fd of the current large object */
char *statement; /* if non--null pointer to the SQL statement that has been executed */
TABLE_INFO **ti;
FIELD_INFO **fi;
int nfld;
int ntab;
int parse_status;
int statement_type; /* According to the defines above */
int data_at_exec; /* Number of params needing SQLPutData */
int current_exec_param; /* The current parameter for SQLPutData */
char put_data; /* Has SQLPutData been called yet? */
char errormsg_created; /* has an informative error msg been created? */
char manual_result; /* Is the statement result manually built? */
char prepare; /* is this statement a prepared statement or direct */
char internal; /* Is this statement being called internally? */
char cursor_name[MAX_CURSOR_LEN+1];
char stmt_with_params[STD_STATEMENT_LEN]; /* statement after parameter substitution */
int reexecute;
};
#define SC_get_conn(a) (a->hdbc)
#define SC_get_Result(a) (a->result);
#define SC_get_errornumber(a) (a->__error_number)
#define SC_set_errornumber(a, n) (a->__error_number = n)
#define SC_get_errormsg(a) (a->__error_message)
/* options for SC_free_params() */
#define STMT_FREE_PARAMS_ALL 0
#define STMT_FREE_PARAMS_DATA_AT_EXEC_ONLY 1
/* Statement prototypes */
StatementClass *SC_Constructor(void);
void InitializeStatementOptions(StatementOptions *opt);
char SC_Destructor(StatementClass *self);
int statement_type(char *statement);
char parse_statement(StatementClass *stmt);
void SC_pre_execute(StatementClass *self);
char SC_unbind_cols(StatementClass *self);
char SC_recycle_statement(StatementClass *self);
void SC_clear_error(StatementClass *self);
void SC_set_error(StatementClass *self, int errnum, const char *msg);
void SC_set_errormsg(StatementClass *self, const char *msg);
char SC_get_error(StatementClass *self, int *number, char **message);
char *SC_create_errormsg(StatementClass *self);
RETCODE SC_execute(StatementClass *self);
RETCODE SC_fetch(StatementClass *self);
void SC_free_params(StatementClass *self, char option);
void SC_log_error(char *func, char *desc, StatementClass *self);
unsigned long SC_get_bookmark(StatementClass *self);
#endif
|
ZakZubair/codesandbox-client
|
src/common/utl.test.js
|
<gh_stars>0
import { getSandboxOptions } from './url';
function testSandboxOptions(url: string) {
expect(getSandboxOptions(url)).toMatchSnapshot();
}
describe('url parameters', () => {
it('keeps everything false on normal urls', () => {
testSandboxOptions('https://codesandbox.io/s/new');
});
it('sets current module if there is one', () => {
testSandboxOptions('https://codesandbox.io/s/new?module=test');
});
it('sets preview view', () => {
testSandboxOptions('https://codesandbox.io/s/new?view=preview');
});
it('sets editor view', () => {
testSandboxOptions('https://codesandbox.io/s/new?view=editor');
});
it("doesn't set unknown fields", () => {
testSandboxOptions('https://codesandbox.io/s/new?view=both');
});
it('can hide navigation', () => {
testSandboxOptions('https://codesandbox.io/s/new?hidenavigation=1');
});
it('can autoresize', () => {
testSandboxOptions('https://codesandbox.io/s/new?autoresize=1');
});
it('can handle multiple options', () => {
testSandboxOptions(
'https://codesandbox.io/s/new?autoresize=1&view=editor&module=test&hidenavigation=1',
);
});
});
|
aaarsene/o3de
|
Code/Framework/AzCore/Tests/TimeDataStatistics.cpp
|
/*
* Copyright (c) Contributors to the Open 3D Engine Project. For complete copyright and license terms please see the LICENSE at the root of this distribution.
*
* SPDX-License-Identifier: Apache-2.0 OR MIT
*
*/
#include <AzCore/UnitTest/TestTypes.h>
#include <AzCore/UnitTest/UnitTest.h>
#include <AzTest/AzTest.h>
#include <AzCore/std/string/string.h>
#include <AzCore/std/parallel/thread.h>
#include <AzCore/Statistics/TimeDataStatisticsManager.h>
#include <AzCore/Component/Component.h>
#include <AzCore/Component/ComponentApplication.h>
#include <AzCore/Component/TickBus.h>
#include <AzCore/Component/EntityUtils.h>
#include <AzCore/Debug/FrameProfilerBus.h>
#include <AzCore/Debug/FrameProfilerComponent.h>
using namespace AZ;
using namespace Debug;
namespace UnitTest
{
/**
* Validate functionality of the convenience class TimeDataStatisticsManager.
* It is a specialized version of RunningStatisticsManager that works with Timer type
* of registers that can be captured with the FrameProfilerBus::OnFrameProfilerData()
*/
class TimeDataStatisticsManagerTest
: public AllocatorsFixture
, public FrameProfilerBus::Handler
{
static constexpr const char* PARENT_TIMER_STAT = "ParentStat";
static constexpr const char* CHILD_TIMER_STAT0 = "ChildStat0";
static constexpr const char* CHILD_TIMER_STAT1 = "ChildStat1";
public:
TimeDataStatisticsManagerTest()
: AllocatorsFixture()
{
}
void SetUp() override
{
AllocatorsFixture::SetUp();
m_statsManager = AZStd::make_unique<Statistics::TimeDataStatisticsManager>();
}
void TearDown() override
{
m_statsManager = nullptr;
AllocatorsFixture::TearDown();
}
//////////////////////////////////////////////////////////////////////////
// FrameProfilerBus
virtual void OnFrameProfilerData(const FrameProfiler::ThreadDataArray& data)
{
for (size_t iThread = 0; iThread < data.size(); ++iThread)
{
const FrameProfiler::ThreadData& td = data[iThread];
FrameProfiler::ThreadData::RegistersMap::const_iterator regIt = td.m_registers.begin();
for (; regIt != td.m_registers.end(); ++regIt)
{
const FrameProfiler::RegisterData& rd = regIt->second;
u32 unitTestCrc = AZ_CRC("UnitTest", 0x8089cea8);
if (unitTestCrc != rd.m_systemId)
{
continue; //Not for us.
}
ASSERT_EQ(ProfilerRegister::PRT_TIME, rd.m_type);
const FrameProfiler::FrameData& fd = rd.m_frames.back();
m_statsManager->PushTimeDataSample(rd.m_name, fd.m_timeData);
}
}
}
//////////////////////////////////////////////////////////////////////////
int ChildFunction0(int numIterations, int sleepTimeMilliseconds)
{
AZ_PROFILE_TIMER("UnitTest", CHILD_TIMER_STAT0);
AZStd::this_thread::sleep_for(AZStd::chrono::milliseconds(sleepTimeMilliseconds));
int result = 5;
for (int i = 0; i < numIterations; ++i)
{
result += i % 3;
}
return result;
}
int ChildFunction1(int numIterations, int sleepTimeMilliseconds)
{
AZ_PROFILE_TIMER("UnitTest", CHILD_TIMER_STAT1);
AZStd::this_thread::sleep_for(AZStd::chrono::milliseconds(sleepTimeMilliseconds));
int result = 5;
for (int i = 0; i < numIterations; ++i)
{
result += i % 3;
}
return result;
}
int ParentFunction(int numIterations, int sleepTimeMilliseconds)
{
AZ_PROFILE_TIMER("UnitTest", PARENT_TIMER_STAT);
AZStd::this_thread::sleep_for(AZStd::chrono::milliseconds(sleepTimeMilliseconds));
int result = 0;
result += ChildFunction0(numIterations, sleepTimeMilliseconds);
result += ChildFunction1(numIterations, sleepTimeMilliseconds);
return result;
}
void run()
{
Debug::FrameProfilerBus::Handler::BusConnect();
ComponentApplication app;
ComponentApplication::Descriptor desc;
desc.m_useExistingAllocator = true;
desc.m_enableDrilling = false; // we already created a memory driller for the test (AllocatorsFixture)
ComponentApplication::StartupParameters startupParams;
startupParams.m_allocator = &AllocatorInstance<SystemAllocator>::Get();
Entity* systemEntity = app.Create(desc, startupParams);
systemEntity->CreateComponent<FrameProfilerComponent>();
systemEntity->Init();
systemEntity->Activate(); // start frame component
const int sleepTimeAllFuncsMillis = 1;
const int numIterations = 10;
for (int iterationCounter = 0; iterationCounter < numIterations; ++iterationCounter)
{
ParentFunction(numIterations, sleepTimeAllFuncsMillis);
//Collect all samples.
app.Tick();
}
//Verify we have three running stats.
{
AZStd::vector<Statistics::NamedRunningStatistic*> allStats;
m_statsManager->GetAllStatistics(allStats);
EXPECT_EQ(allStats.size(), 3);
}
AZStd::string parentStatName(PARENT_TIMER_STAT);
AZStd::string child0StatName(CHILD_TIMER_STAT0);
AZStd::string child1StatName(CHILD_TIMER_STAT1);
ASSERT_TRUE(m_statsManager->GetStatistic(parentStatName) != nullptr);
ASSERT_TRUE(m_statsManager->GetStatistic(child0StatName) != nullptr);
ASSERT_TRUE(m_statsManager->GetStatistic(child1StatName) != nullptr);
EXPECT_EQ(m_statsManager->GetStatistic(parentStatName)->GetNumSamples(), numIterations);
EXPECT_EQ(m_statsManager->GetStatistic(child0StatName)->GetNumSamples(), numIterations);
EXPECT_EQ(m_statsManager->GetStatistic(child1StatName)->GetNumSamples(), numIterations);
const double minimumExpectDurationOfChildFunctionMicros = 1;
const double minimumExpectDurationOfParentFunctionMicros = 1;
EXPECT_GE(m_statsManager->GetStatistic(parentStatName)->GetMinimum(), minimumExpectDurationOfParentFunctionMicros);
EXPECT_GE(m_statsManager->GetStatistic(parentStatName)->GetAverage(), minimumExpectDurationOfParentFunctionMicros);
EXPECT_GE(m_statsManager->GetStatistic(parentStatName)->GetMaximum(), minimumExpectDurationOfParentFunctionMicros);
EXPECT_GE(m_statsManager->GetStatistic(child0StatName)->GetMinimum(), minimumExpectDurationOfChildFunctionMicros);
EXPECT_GE(m_statsManager->GetStatistic(child0StatName)->GetAverage(), minimumExpectDurationOfChildFunctionMicros);
EXPECT_GE(m_statsManager->GetStatistic(child0StatName)->GetMaximum(), minimumExpectDurationOfChildFunctionMicros);
EXPECT_GE(m_statsManager->GetStatistic(child1StatName)->GetMinimum(), minimumExpectDurationOfChildFunctionMicros);
EXPECT_GE(m_statsManager->GetStatistic(child1StatName)->GetAverage(), minimumExpectDurationOfChildFunctionMicros);
EXPECT_GE(m_statsManager->GetStatistic(child1StatName)->GetMaximum(), minimumExpectDurationOfChildFunctionMicros);
//Let's validate TimeDataStatisticsManager::RemoveStatistics()
m_statsManager->RemoveStatistic(child1StatName);
ASSERT_TRUE(m_statsManager->GetStatistic(parentStatName) != nullptr);
ASSERT_TRUE(m_statsManager->GetStatistic(child0StatName) != nullptr);
EXPECT_EQ(m_statsManager->GetStatistic(child1StatName), nullptr);
//Let's store the sample count for both parentStatName and child0StatName.
const AZ::u64 numSamplesParent = m_statsManager->GetStatistic(parentStatName)->GetNumSamples();
const AZ::u64 numSamplesChild0 = m_statsManager->GetStatistic(child0StatName)->GetNumSamples();
//Let's call child1 function again and call app.Tick(). child1StatName should be readded to m_statsManager.
ChildFunction1(numIterations, sleepTimeAllFuncsMillis);
app.Tick();
ASSERT_TRUE(m_statsManager->GetStatistic(child1StatName) != nullptr);
EXPECT_EQ(m_statsManager->GetStatistic(parentStatName)->GetNumSamples(), numSamplesParent);
EXPECT_EQ(m_statsManager->GetStatistic(child0StatName)->GetNumSamples(), numSamplesChild0);
EXPECT_EQ(m_statsManager->GetStatistic(child1StatName)->GetNumSamples(), 1);
Debug::FrameProfilerBus::Handler::BusDisconnect();
app.Destroy();
}
AZStd::unique_ptr<Statistics::TimeDataStatisticsManager> m_statsManager;
};//class TimeDataStatisticsManagerTest
TEST_F(TimeDataStatisticsManagerTest, Test)
{
run();
}
//End of all Tests of TimeDataStatisticsManagerTest
}//namespace UnitTest
|
sw1115/climate-watch
|
spec/support/shared_contexts/net_zero_content/categories.rb
|
<gh_stars>10-100
RSpec.shared_context 'Net Zero categories' do
let(:global_type) {
FactoryBot.create(:indc_category_type, name: ::Indc::CategoryType::GLOBAL)
}
let(:overview_type) {
FactoryBot.create(
:indc_category_type, name: ::Indc::CategoryType::OVERVIEW
)
}
let!(:overview) {
FactoryBot.create(
:indc_category,
parent: nil,
category_type: global_type,
slug: 'overview',
name: 'Overview'
)
}
let!(:net_zero_category) {
FactoryBot.create(
:indc_category,
parent: overview,
category_type: overview_type,
slug: 'net_zero_category',
name: 'Net Zero Category'
)
}
end
|
PolRod/aleph
|
lib/schemas/redis_store.rb
|
<filename>lib/schemas/redis_store.rb<gh_stars>10-100
require 'json'
module Schemas
module RedisStore
EXPIRE = 3.days
def redis_retrieve
r = Redis.current.get(key)
r ? JSON.parse(r) : []
end
def redis_store!(schema_rows)
Redis.current.del(key)
Redis.current.set(key, JSON.generate(schema_rows))
# should be deleting the key before we store
# but set expire just in case we switch keys in the code
Redis.current.expire(key, EXPIRE)
end
end
end
|
olivier-maury/unity
|
std-plugins/src/main/java/pl/edu/icm/unity/stdext/identity/AbstractStaticIdentityTypeProvider.java
|
/*
* Copyright (c) 2013 ICM Uniwersytet Warszawski All rights reserved.
* See LICENCE file for licensing information.
*/
package pl.edu.icm.unity.stdext.identity;
import pl.edu.icm.unity.types.basic.Identity;
/**
* Base class for static identity types, which simply store the identity value in the database.
* @author <NAME>
*/
public abstract class AbstractStaticIdentityTypeProvider extends AbstractIdentityTypeProvider
{
@Override
public Identity createNewIdentity(String realm, String target, long entityId)
{
throw new IllegalStateException("This identity type doesn't support dynamic identity creation.");
}
@Override
public boolean isDynamic()
{
return false;
}
@Override
public boolean isTargeted()
{
return false;
}
@Override
public boolean isExpired(Identity idRepresentation)
{
return false;
}
@Override
public boolean isEmailVerifiable()
{
return false;
}
}
|
loicgasser/ngeo
|
src/message/extraModule.js
|
<filename>src/message/extraModule.js
/**
* @module ngeo.message.extraModule
*/
import ngeoMessageNotification from 'ngeo/message/Notification.js';
import ngeoMessageDisclaimer from 'ngeo/message/Disclaimer.js';
import ngeoMessageDisplaywindowComponent from 'ngeo/message/displaywindowComponent.js';
import ngeoMessagePopupComponent from 'ngeo/message/popupComponent.js';
import ngeoMessagePopup from 'ngeo/message/Popup.js';
import ngeoMessagePopoverComponent from 'ngeo/message/popoverComponent.js';
import ngeoMessageModalComponent from 'ngeo/message/modalComponent.js';
/**
* @type {angular.Module}
*/
const exports = angular.module('ngeoMessageExtraModule', [
ngeoMessageNotification.module.name,
ngeoMessageDisplaywindowComponent.name,
ngeoMessageDisclaimer.module.name,
ngeoMessagePopupComponent.name,
ngeoMessagePopup.module.name,
ngeoMessagePopoverComponent.name,
ngeoMessageModalComponent.name,
]);
export default exports;
|
xaoxuu/AXKit
|
Products/AXKit.framework/Headers/NSObject+AXAdd.h
|
//
// NSObject+AXAdd.h
// AXKit
//
// Created by xaoxuu on 13/03/2018.
// Copyright © 2018 Titan Studio. All rights reserved.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
@interface NSObject (AXAdd)
/**
获取所有子类
@return 所有子类
*/
+ (NSArray *)ax_subclasses;
@end
NS_ASSUME_NONNULL_END
|
trespasserw/MPS
|
testbench/testsolutions/editor.menus.tests/test_gen/jetbrains/mps/lang/editor/menus/tests/Include_MenuAndTargetNodeCorrespondence_Test.java
|
package jetbrains.mps.lang.editor.menus.tests;
/*Generated by MPS */
import jetbrains.mps.MPSLaunch;
import jetbrains.mps.lang.test.runtime.BaseTransformationTest;
import org.junit.ClassRule;
import jetbrains.mps.lang.test.runtime.TestParametersCache;
import org.junit.Rule;
import jetbrains.mps.lang.test.runtime.RunWithCommand;
import org.junit.Test;
import jetbrains.mps.lang.test.runtime.BaseTestBody;
import jetbrains.mps.lang.test.runtime.TransformationTest;
import java.util.List;
import org.jetbrains.mps.openapi.model.SNode;
import jetbrains.mps.internal.collections.runtime.ListSequence;
import java.util.ArrayList;
import jetbrains.mps.baseLanguage.behavior.ConceptFunction__BehaviorDescriptor;
import org.junit.Assert;
import jetbrains.mps.lang.test.matcher.NodesMatcher;
import jetbrains.mps.lang.test.runtime.CheckErrorMessagesRunnable;
import jetbrains.mps.project.ProjectBase;
import jetbrains.mps.lang.test.runtime.CheckExpectedMessageRunnable;
import jetbrains.mps.errors.MessageStatus;
import jetbrains.mps.smodel.SNodePointer;
import org.jetbrains.mps.openapi.persistence.PersistenceFacade;
import jetbrains.mps.smodel.builder.SNodeBuilder;
import org.jetbrains.mps.openapi.language.SConcept;
import jetbrains.mps.smodel.adapter.structure.MetaAdapterFactory;
import org.jetbrains.mps.openapi.language.SReferenceLink;
@MPSLaunch
public class Include_MenuAndTargetNodeCorrespondence_Test extends BaseTransformationTest {
@ClassRule
public static final TestParametersCache ourParamCache = new TestParametersCache(Include_MenuAndTargetNodeCorrespondence_Test.class, "${mps_home}", "r:4f8193a2-048e-4ddf-b505-dfca00e8c910(jetbrains.mps.lang.editor.menus.tests@tests)", false);
@Rule
public final RunWithCommand myWithCommandRule = new RunWithCommand(this);
public Include_MenuAndTargetNodeCorrespondence_Test() {
super(ourParamCache);
}
@Test
public void test_targetNodeForMenuShouldReturnMenuConcept() throws Throwable {
new TestBody(this).test_targetNodeForMenuShouldReturnMenuConcept();
}
@Test
public void test_MatchingMenuForCurrentNode6903010549536714073() throws Throwable {
new TestBody(this).test_MatchingMenuForCurrentNode6903010549536714073();
}
@Test
public void test_NonMatchingMenuForCurrentNode6903010549536714075() throws Throwable {
new TestBody(this).test_NonMatchingMenuForCurrentNode6903010549536714075();
}
/*package*/ static class TestBody extends BaseTestBody {
/*package*/ TestBody(TransformationTest owner) {
super(owner);
}
public void test_targetNodeForMenuShouldReturnMenuConcept() throws Exception {
addNodeById("2705676212747008271");
addNodeById("6903010549536712693");
{
List<SNode> nodesBefore = ListSequence.fromListAndArray(new ArrayList<SNode>(), createSNodeType_sxmjox_a0a0a0c0d9());
List<SNode> nodesAfter = ListSequence.fromListAndArray(new ArrayList<SNode>(), ConceptFunction__BehaviorDescriptor.getExpectedReturnType_idhEwIGRD.invoke(getNodeById("2705676212747008292")));
Assert.assertTrue("The nodes '" + nodesBefore + "' and '" + nodesAfter + "' do not match!", new NodesMatcher(nodesBefore, nodesAfter).diff().isEmpty());
}
}
public void test_MatchingMenuForCurrentNode6903010549536714073() throws Exception {
SNode nodeToCheck = getRealNodeById("6903010549536713473");
SNode operation = getRealNodeById("6903010549536714073");
new CheckErrorMessagesRunnable(nodeToCheck, false, false, ((ProjectBase) myProject).getPlatform()).includeSelf(true).exclude(ListSequence.fromList(new ArrayList<CheckExpectedMessageRunnable>())).run();
}
public void test_NonMatchingMenuForCurrentNode6903010549536714075() throws Exception {
SNode nodeToCheck = getRealNodeById("6903010549536712731");
SNode operation = getRealNodeById("6903010549536714075");
new CheckExpectedMessageRunnable.CheckExpectedRuleMessageRunnable(nodeToCheck, MessageStatus.ERROR, new SNodePointer("r:00000000-0000-4000-0000-011c8959029a(jetbrains.mps.lang.editor.typesystem)", "6903010549536798466"), "", myProject.getRepository(), ((ProjectBase) myProject).getPlatform()).run();
}
private static SNode createSNodeType_sxmjox_a0a0a0c0d9() {
PersistenceFacade facade = PersistenceFacade.getInstance();
SNodeBuilder n0 = new SNodeBuilder().init(CONCEPTS.SNodeType$hR);
n0.setReference(LINKS.concept$OMgE, new SNodePointer(facade.createModelReference("r:c6b5a119-ed4d-420e-b7df-fa1b4101c68f(jetbrains.mps.lang.editor.menus.testLanguage.structure)"), facade.createNodeId("2705676212746996052")));
return n0.getResult();
}
}
private static final class CONCEPTS {
/*package*/ static final SConcept SNodeType$hR = MetaAdapterFactory.getConcept(0x7866978ea0f04cc7L, 0x81bc4d213d9375e1L, 0x108f968b3caL, "jetbrains.mps.lang.smodel.structure.SNodeType");
}
private static final class LINKS {
/*package*/ static final SReferenceLink concept$OMgE = MetaAdapterFactory.getReferenceLink(0x7866978ea0f04cc7L, 0x81bc4d213d9375e1L, 0x108f968b3caL, 0x1090e46ca51L, "concept");
}
}
|
betagouv/react-final-form-utils
|
src/utils/composeValidators.js
|
export const composeValidators = (...validators) => value =>
validators.reduce(
(error, validator) =>
error ||
(typeof validator === 'function' && validator(value)),
undefined
)
export default composeValidators
|
sunianping/elasticsearch
|
core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountAggregator.java
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.valuecount;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.LongArray;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
* A field data based aggregator that counts the number of values a specific field has within the aggregation context.
*
* This aggregator works in a multi-bucket mode, that is, when serves as a sub-aggregator, a single aggregator instance aggregates the
* counts for all buckets owned by the parent aggregator)
*/
public class ValueCountAggregator extends NumericMetricsAggregator.SingleValue {
final ValuesSource valuesSource;
// a count per bucket
LongArray counts;
public ValueCountAggregator(String name, ValuesSource valuesSource,
SearchContext aggregationContext, Aggregator parent, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData)
throws IOException {
super(name, aggregationContext, parent, pipelineAggregators, metaData);
this.valuesSource = valuesSource;
if (valuesSource != null) {
counts = context.bigArrays().newLongArray(1, true);
}
}
@Override
public LeafBucketCollector getLeafCollector(LeafReaderContext ctx,
final LeafBucketCollector sub) throws IOException {
if (valuesSource == null) {
return LeafBucketCollector.NO_OP_COLLECTOR;
}
final BigArrays bigArrays = context.bigArrays();
final SortedBinaryDocValues values = valuesSource.bytesValues(ctx);
return new LeafBucketCollectorBase(sub, values) {
@Override
public void collect(int doc, long bucket) throws IOException {
counts = bigArrays.grow(counts, bucket + 1);
if (values.advanceExact(doc)) {
counts.increment(bucket, values.docValueCount());
}
}
};
}
@Override
public double metric(long owningBucketOrd) {
return valuesSource == null ? 0 : counts.get(owningBucketOrd);
}
@Override
public InternalAggregation buildAggregation(long bucket) {
if (valuesSource == null || bucket >= counts.size()) {
return buildEmptyAggregation();
}
return new InternalValueCount(name, counts.get(bucket), pipelineAggregators(), metaData());
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new InternalValueCount(name, 0L, pipelineAggregators(), metaData());
}
@Override
public void doClose() {
Releasables.close(counts);
}
}
|
theFaustus/demo-oca
|
src/md/tekwill/demo/inheritance/employeehomework/Employee.java
|
package md.tekwill.demo.inheritance.employeehomework;
import java.time.LocalDate;
import java.util.Random;
class Employee {
protected String name;
protected String employeeNumber;
protected LocalDate hireDate;
public Employee(String name) {
this(name, LocalDate.now());
}
public Employee(String name, LocalDate hireDate) {
this(name, "", hireDate);
}
public Employee(String name, String employeeNumber, LocalDate hireDate) {
this.name = name;
if (!isValid(employeeNumber))
this.employeeNumber = generateEmployeeNumber();
else
this.employeeNumber = employeeNumber;
this.hireDate = hireDate;
}
private String generateEmployeeNumber() {
return new StringBuilder()
.append(new Random().nextInt(9))
.append(new Random().nextInt(9))
.append(new Random().nextInt(9))
.append("-")
.append((char) (new Random().nextInt(12) + 65))
.toString();
}
private boolean isValid(String employeeNumber) {
return employeeNumber.length() == 5
&& Character.isDigit(employeeNumber.charAt(0))
&& Character.isDigit(employeeNumber.charAt(1))
&& Character.isDigit(employeeNumber.charAt(2))
&& employeeNumber.charAt(3) == '-'
&& employeeNumber.charAt(4) > 65
&& employeeNumber.charAt(4) < 77;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getEmployeeNumber() {
return employeeNumber;
}
public void setEmployeeNumber(String employeeNumber) {
this.employeeNumber = employeeNumber;
}
public LocalDate getHireDate() {
return hireDate;
}
public void setHireDate(LocalDate hireDate) {
this.hireDate = hireDate;
}
@Override
public String toString() {
return "Employee{" +
"name='" + name + '\'' +
", employeeNumber='" + employeeNumber + '\'' +
", hireDate=" + hireDate +
'}';
}
}
|
googege/algo-learn
|
java/algorithm/recursion/ValidBinarySearchTree.java
|
<gh_stars>100-1000
package algorithm.recursion;
/**
* @author roseduan
* @time 2020/9/23 8:11 下午
* @description 验证二叉搜索树
*/
public class ValidBinarySearchTree {
/**
* 根据二叉搜索树的特征,递归
*/
public boolean isValidBST(TreeNode root) {
return helper(root, Long.MIN_VALUE, Long.MAX_VALUE);
}
private boolean helper(TreeNode node, long min, long max) {
if (node == null) {
return true;
}
if (node.val <= min || node.val >= max) {
return false;
}
return helper(node.left, min, node.val) && helper(node.right, node.val, max);
}
/**
* 中序遍历
*/
private TreeNode prev = null;
public boolean isValidBST2(TreeNode root) {
if (root == null) {
return true;
}
if (!isValidBST2(root.left)) {
return false;
}
if (prev != null && prev.val >= root.val) {
return false;
}
prev = root;
return isValidBST2(root.right);
}
public static class TreeNode {
int val;
TreeNode left;
TreeNode right;
TreeNode(int val) { this.val = val; }
TreeNode(int val, TreeNode left, TreeNode right) {
this.val = val;
this.left = left;
this.right = right;
}
}
}
|
saurabhgis/CV
|
wab/widgets/Geoprocessing/resultrenderers/FeatureSetRenderer.js
|
define([
'dojo/_base/declare',
'dojo/_base/lang',
'dojo/_base/array',
'dojo/_base/html',
'dojo/dom-style',
'dojo/dom-attr',
'dojo/on',
'dijit/_TemplatedMixin',
'esri/layers/GraphicsLayer',
'esri/layers/FeatureLayer',
'esri/graphicsUtils',
'esri/renderers/SimpleRenderer',
'esri/renderers/jsonUtils',
'esri/InfoTemplate',
'jimu/exportUtils',
'jimu/dijit/ExportChooser',
'dojo/text!./FeatureSetRenderer.html',
'../BaseResultRenderer',
'../LayerOrderUtil',
'./defaultSymbol'
], function(declare, lang, array, html, domStyle, domAttr, on, _TemplatedMixin, GraphicsLayer,
FeatureLayer, graphicsUtils, SimpleRenderer, rendererUtils, InfoTemplate,
exportUtils, ExportChooser, template, BaseResultRenderer, LayerOrderUtil, defaultSymbol){
return declare([BaseResultRenderer, _TemplatedMixin], {
baseClass: 'jimu-gp-resultrenderer-base jimu-gp-renderer-draw-feature',
templateString: template,
postCreate: function(){
this.inherited(arguments);
if(this.value.features && this.value.features.length > 0){
this._displayText();
this._drawResultFeature(this.param, this.value);
}
},
destroy: function(){
if(this.resultLayer){
this.map.removeLayer(this.resultLayer);
}
this.inherited(arguments);
},
_displayText: function(){
domStyle.set(this.clearNode, 'display', '');
domAttr.set(this.clearNode, 'title', this.nls.clear);
this.own(on(this.clearNode, 'click', lang.hitch(this, function(){
if(this.resultLayer){
if(this.map.infoWindow.isShowing){
this.map.infoWindow.hide();
}
this.resultLayer.clear();
//remove layer so it will not displayed in Layer List or Legend widget
this.map.removeLayer(this.resultLayer);
}
domStyle.set(this.exportNode, 'display', 'none');
domStyle.set(this.clearNode, 'display', 'none');
this.labelContent.innerHTML = this.nls.cleared;
})));
if(this.config.showExportButton){
domStyle.set(this.exportNode, 'display', '');
domAttr.set(this.exportNode, 'title', this.nls.exportOutput);
var ds = exportUtils.createDataSource({
type: exportUtils.TYPE_FEATURESET,
data: this.value,
filename: this.param.name
});
this.exportChooser = new ExportChooser({
dataSource: ds
});
this.exportChooser.hide();
html.place(this.exportChooser.domNode, this.domNode);
this.own(on(this.exportNode, 'click', lang.hitch(this, function(event){
event.preventDefault();
event.stopPropagation();
this.exportChooser.show(event.clientX, event.clientY);
})));
}
},
_drawResultFeature: function(param, featureset){
if(this.config.shareResults && !this.config.useDynamicSchema){
if(!param.defaultValue || !param.defaultValue.geometryType){
throw Error('Output parameter default value does not provide enough information' +
' to draw feature layer.');
}
param.defaultValue.name = param.name;
var featureCollection = {
layerDefinition: param.defaultValue,
featureSet: null
};
this.resultLayer = new FeatureLayer(featureCollection, {
id: this.widgetUID + param.name
});
}else{
this.resultLayer = new GraphicsLayer({
id: this.widgetUID + param.name
});
}
this.resultLayer.title = param.label || param.name;
this._addResultLayer(param.name);
if(!param.popup){
param.popup = {
enablePopup: true,
title: '',
fields: []
};
}
var len = featureset.features.length, renderer = param.renderer;
if(this.config.useDynamicSchema || !renderer){
if(featureset.geometryType === 'esriGeometryPoint'){
renderer = new SimpleRenderer(defaultSymbol.pointSymbol);
}else if(featureset.geometryType === 'esriGeometryPolyline'){
renderer = new SimpleRenderer(defaultSymbol.lineSymbol);
}else if(featureset.geometryType === 'esriGeometryPolygon'){
renderer = new SimpleRenderer(defaultSymbol.polygonSymbol);
}
}else{
renderer = rendererUtils.fromJson(renderer);
}
var infoTemplate;
if(param.popup.enablePopup){
//Use param.popup.title or a non-exist field name as the title of popup window.
infoTemplate = new InfoTemplate(param.popup.title || '${Non-Exist-Field}',
this._generatePopupContent(featureset));
}
for (var i = 0; i < len; i++) {
if(infoTemplate){
featureset.features[i].setInfoTemplate(infoTemplate);
}
this.resultLayer.add(featureset.features[i]);
}
this.resultLayer.setRenderer(renderer);
try{
var extent = graphicsUtils.graphicsExtent(featureset.features);
if(extent){
this.resultLayer.fullExtent = extent.expand(1.4);
this.map.setExtent(this.resultLayer.fullExtent);
}
}
catch(e){
console.error(e);
}
},
_addResultLayer: function(paramName){
var layerOrderUtil = new LayerOrderUtil(this.config, this.map);
try{
layerOrderUtil.calculateLayerIndex(paramName, this.widgetUID).then(
lang.hitch(this, function(layerIndex){
if(layerIndex !== -1){
this.map.addLayer(this.resultLayer, layerIndex);
}else{
this.map.addLayer(this.resultLayer);
}
}));
}catch(err){
console.error(err.message);
console.warn('Draw result feature set on the top of map');
this.map.addLayer(this.resultLayer);
}
},
_generatePopupContent: function(featureset){
var str = '<div class="geoprocessing-popup">' +
'<table class="geoprocessing-popup-table" ' +
'cellpadding="0" cellspacing="0">' + '<tbody>';
var rowStr = '';
var fields;
if(!this.config.useDynamicSchema &&
this.param.popup.fields &&
this.param.popup.fields.length > 0){
fields = this.param.popup.fields;
}else{
fields = featureset.fields;
}
array.forEach(fields, function(field){
var row = '<tr valign="top">' +
'<td class="attr-name">' + field.alias + '</td>' +
'<td class="attr-value">${' + field.name + '}</td>' +
'</tr>';
rowStr += row;
});
return str + rowStr + '</tbody></table></div>';
}
});
});
|
prateeksingh0001/FlexNeuART
|
trec_eval-9.0.7/m_P_avgjg.c
|
/*
Copyright (c) 2008 - <NAME>.
Permission is granted for use and modification of this file for
research, non-commercial purposes.
*/
#include "common.h"
#include "sysfunc.h"
#include "trec_eval.h"
#include "functions.h"
#include "trec_format.h"
static int
te_calc_P_avgjg (const EPI *epi, const REL_INFO *rel_info,
const RESULTS *results, const TREC_MEAS *tm, TREC_EVAL *eval);
static long long_cutoff_array[] = {5, 10, 15, 20, 30, 100, 200, 500, 1000};
static PARAMS default_P_avgjg_cutoffs = {
NULL, sizeof (long_cutoff_array) / sizeof (long_cutoff_array[0]),
&long_cutoff_array[0]};
/* See trec_eval.h for definition of TREC_MEAS */
TREC_MEAS te_meas_P_avgjg =
{"P_avgjg",
" Precision at cutoffs, averaged over judgment groups (users)\n\
Precision measured at various doc level cutoffs in the ranking.\n\
If the cutoff is larger than the number of docs retrieved, then\n\
it is assumed nonrelevant docs fill in the rest. Eg, if a method\n\
retrieves 15 docs of which 4 are relevant, then P20 is 0.2 (4/20).\n\
If there are multiple relevance judgment sets for this query, Precision\n\
is averaged over the judgment groups.\n\
Cutoffs must be positive without duplicates\n\
Default param: trec_eval -m P.5,10,15,20,30,100,200,500,1000\n",
te_init_meas_a_float_cut_long,
te_calc_P_avgjg,
te_acc_meas_a_cut,
te_calc_avg_meas_a_cut,
te_print_single_meas_a_cut,
te_print_final_meas_a_cut,
(void *) &default_P_avgjg_cutoffs, -1};
static int
te_calc_P_avgjg (const EPI *epi, const REL_INFO *rel_info,
const RESULTS *results, const TREC_MEAS *tm, TREC_EVAL *eval)
{
long *cutoffs = (long *) tm->meas_params->param_values;
long cutoff_index;
long i,jg;
RES_RELS_JG rr;
long rel_so_far;
if (UNDEF == te_form_res_rels_jg (epi, rel_info, results, &rr))
return (UNDEF);
for (jg = 0; jg < rr.num_jgs; jg++) {
cutoff_index = 0;
rel_so_far = 0;
for (i = 0; i < rr.jgs[jg].num_ret; i++) {
if (i == cutoffs[cutoff_index]) {
/* Calculate previous cutoff threshold.
Note all guaranteed to be positive by init_meas */
eval->values[tm->eval_index+cutoff_index].value +=
(double) rel_so_far / (double) i;
if (++cutoff_index == tm->meas_params->num_params)
break;
}
if (rr.jgs[jg].results_rel_list[i] >= epi->relevance_level)
rel_so_far++;
}
/* calculate values for those cutoffs not achieved */
while (cutoff_index < tm->meas_params->num_params) {
eval->values[tm->eval_index+cutoff_index].value +=
(double) rel_so_far/(double) cutoffs[cutoff_index];
cutoff_index++;
}
}
if (rr.num_jgs > 1) {
for (cutoff_index = 0; cutoff_index < tm->meas_params->num_params;
cutoff_index++)
eval->values[tm->eval_index + cutoff_index].value /= rr.num_jgs;
}
return (1);
}
|
lechium/iOS1351Headers
|
usr/libexec/FullKeyboardAccess/FKACommandsViewController.h
|
<reponame>lechium/iOS1351Headers<filename>usr/libexec/FullKeyboardAccess/FKACommandsViewController.h
//
// Generated by classdumpios 1.0.1 (64 bit) (iOS port by DreamDevLost)(Debug version compiled Sep 26 2020 13:48:20).
//
// Copyright (C) 1997-2019 <NAME>.
//
#import <UIKit/UIViewController.h>
@class AXSSKeyChord;
@interface FKACommandsViewController : UIViewController
{
AXSSKeyChord *_textEditingModeExitKeyChord; // 8 = 0x8
}
- (void).cxx_destruct; // IMP=0x0000000100008bc4
@property(retain, nonatomic) AXSSKeyChord *textEditingModeExitKeyChord; // @synthesize textEditingModeExitKeyChord=_textEditingModeExitKeyChord;
- (_Bool)_canShowWhileLocked; // IMP=0x0000000100008b98
- (void)loadView; // IMP=0x0000000100008538
- (_Bool)requiresNativeFocus; // IMP=0x0000000100008530
@end
|
Droeftoeter/react-material-icons
|
src/maps/LocalHotel.js
|
import React from 'react';
import BaseIcon from '../BaseIcon';
export default props => (
<BaseIcon
{ ...props }
>
<path d="M14 26c3.31 0 6-2.69 6-6s-2.69-6-6-6-6 2.69-6 6 2.69 6 6 6zm24-12H22v14H6V10H2v30h4v-6h36v6h4V22c0-4.42-3.58-8-8-8z"/>
</BaseIcon>
);
|
CarletonURocketry/2019-avionics
|
src/targets/sam/same54/src/sdhc-test.c
|
//
// sdhc-test.c
// index
//
// Created by <NAME> on 2021-07-28.
// Copyright © 2021 <NAME>. All rights reserved.
//
#include "sdhc-test.h"
#include "sd-commands.h"
#define SDHC_ADMA2_DESC_ACT_NOP_Val 0b00
#define SDHC_ADMA2_DESC_ACT_TRAN_Val 0b10
#define SDHC_ADMA2_DESC_ACT_LINK_Val 0b11
#define SDHC_ADMA2_DESC_VALID (1 << 0)
#define SDHC_ADMA2_DESC_END (1 << 1)
#define SDHC_ADMA2_DESC_INTERRUPT (1 << 2)
#define SDHC_ADMA2_DESC_ACT(x) ((x & 0x3) << 4)
#define SDHC_ADMA2_DESC_ACT_NOP SDHC_ADMA2_DESC_ACT(SDHC_ADMA2_DESC_ACT_NOP_Val)
#define SDHC_ADMA2_DESC_ACT_TRAN SDHC_ADMA2_DESC_ACT(SDHC_ADMA2_DESC_ACT_TRAN_Val)
#define SDHC_ADMA2_DESC_ACT_LINK SDHC_ADMA2_DESC_ACT(SDHC_ADMA2_DESC_ACT_LINK_Val)
struct sdhc_adma2_descriptor32 {
union {
struct {
uint16_t valid:1;
uint16_t end:1;
uint16_t interrupt:1;
uint16_t RESERVED:1;
uint16_t act:2;
uint16_t RESERVED1:10;
} bits;
uint16_t raw;
} attributes;
uint16_t length;
uint32_t address;
};
void init_sdhc_test(void)
{
/* Configure Pin MUX */
PORT->Group[0].PMUX[4].bit.PMUXE = 0x8; // PA08: SDCMD
PORT->Group[0].PINCFG[8].bit.PMUXEN = 0b1;
PORT->Group[0].PMUX[4].bit.PMUXO = 0x8; // PA09: SDDAT0
PORT->Group[0].PINCFG[9].bit.PMUXEN = 0b1;
PORT->Group[0].PMUX[5].bit.PMUXO = 0x8; // PA10: SDDAT1
PORT->Group[0].PINCFG[10].bit.PMUXEN = 0b1;
PORT->Group[0].PMUX[5].bit.PMUXO = 0x8; // PA11: SDDAT2
PORT->Group[0].PINCFG[11].bit.PMUXEN = 0b1;
PORT->Group[1].PMUX[5].bit.PMUXO = 0x8; // PB10: SDDAT3
PORT->Group[1].PINCFG[10].bit.PMUXEN = 0b1;
PORT->Group[1].PMUX[5].bit.PMUXO = 0x8; // PB11: SDCK
PORT->Group[1].PINCFG[11].bit.PMUXEN = 0b1;
PORT->Group[1].PMUX[6].bit.PMUXE = 0x8; // PB12: SDCD
PORT->Group[1].PINCFG[12].bit.PMUXEN = 0b1;
/* Enable Bus Clock for SDHC0 */
MCLK->AHBMASK.reg |= MCLK_AHBMASK_SDHC0;
/* Select Generic Clock for SDHC0 (GLCK5 at 100 MHz from DPLL1) */
do {
GCLK->PCHCTRL[SDHC0_GCLK_ID].reg = GCLK_PCHCTRL_CHEN | GCLK_PCHCTRL_GEN_GCLK5;
} while(!GCLK->PCHCTRL[SDHC0_GCLK_ID].bit.CHEN);
// Slow clock is already selected by initial clock configuration code
/* Reset SDHC instance */
SDHC0->SRR.bit.SWRSTALL = 1;
while (SDHC0->SRR.bit.SWRSTALL);
/* Configure SDHC instance */
// Start with a 400 KHz clock, will increase later
const uint16_t clk_setting = 100000000UL / 400000UL / 2;
SDHC0->CCR.reg = (SDHC_CCR_SDCLKFSEL(clk_setting & 0xFF) |
SDHC_CCR_USDCLKFSEL((clk_setting >> 8) & 0x3) |
SDHC_CCR_INTCLKEN);
// Wait for clock to become stable
while (!SDHC0->CCR.bit.INTCLKS);
// 1 bit mode, no high speed for now, use 32 bit ADMA2
SDHC0->HC1R.reg = SDHC_HC1R_DMASEL_32BIT;
// Configure timeout value
SDHC0->TCR.reg = SDHC_TCR_DTCVAL(13);
}
enum sdhc_cmd_rsp_type {
/** No response */
SDHC_CMD_RSP_TYPE_NONE,
/** 48 bit response */
SDHC_CMD_RSP_TYPE_R1,
/** 48 bit response with busy signal */
SDHC_CMD_RSP_TYPE_R1B,
/** 136 bit response */
SDHC_CMD_RSP_TYPE_R2,
/** 48 bit response, no CRC */
SDHC_CMD_RSP_TYPE_R3,
/** 48 bit response */
SDHC_CMD_RSP_TYPE_R6,
/** 48 bit response */
SDHC_CMD_RSP_TYPE_R7
};
static SDHC_CR_Type sdhc_get_cr_val(uint8_t command, enum sdhc_cmd_rsp_type rsp,
int data)
{
SDHC_CR_Type cr;
switch (rsp) {
case SDHC_CMD_RSP_TYPE_NONE:
cr.reg = (SDHC_CR_RESPTYP_NONE | SDHC_CR_CMDCCEN_DISABLE |
SDHC_CR_CMDICEN_DISABLE);
break;
case SDHC_CMD_RSP_TYPE_R1:
case SDHC_CMD_RSP_TYPE_R6:
case SDHC_CMD_RSP_TYPE_R7:
cr.reg = (SDHC_CR_RESPTYP_48_BIT | SDHC_CR_CMDCCEN_ENABLE |
SDHC_CR_CMDICEN_ENABLE);
break;
case SDHC_CMD_RSP_TYPE_R1B:
cr.reg = (SDHC_CR_RESPTYP_48_BIT_BUSY | SDHC_CR_CMDCCEN_ENABLE |
SDHC_CR_CMDICEN_ENABLE);
break;
case SDHC_CMD_RSP_TYPE_R2:
cr.reg = (SDHC_CR_RESPTYP_136_BIT | SDHC_CR_CMDCCEN_ENABLE |
SDHC_CR_CMDICEN_DISABLE);
break;
case SDHC_CMD_RSP_TYPE_R3:
cr.reg = (SDHC_CR_RESPTYP_48_BIT | SDHC_CR_CMDCCEN_DISABLE |
SDHC_CR_CMDICEN_DISABLE);
break;
};
cr.reg |= ((data ? SDHC_CR_DPSEL_DATA : SDHC_CR_DPSEL_NO_DATA) |
((command == 12) ? SDHC_CR_CMDTYP_ABORT :
SDHC_CR_CMDTYP_NORMAL) |
SDHC_CR_CMDIDX(command));
return cr;
}
static inline void sdhc_enable_cmd_interrupts(int enable_transfer_wait,
int enable_data_interrupts)
{
// Enable command complete interrupt and transfer complete interrupt if
// requested
SDHC0->NISTER.reg |= (SDHC_NISTER_CMDC |
(enable_transfer_wait ? SDHC_NISTER_TRFC : 0));
SDHC0->NISIER.reg |= (SDHC_NISIER_CMDC |
(enable_transfer_wait ? SDHC_NISIER_TRFC : 0));
// Enable command error interrupts
SDHC0->EISTER.reg |= (SDHC_EISTER_CMDTEO |
SDHC_EISTER_CMDCRC |
SDHC_EISTER_CMDEND |
SDHC_EISTER_CMDIDX);
SDHC0->EISIER.reg |= (SDHC_EISIER_CMDTEO |
SDHC_EISIER_CMDCRC |
SDHC_EISIER_CMDEND |
SDHC_EISIER_CMDIDX);
if (enable_data_interrupts) {
// Enable data error interrupts
SDHC0->EISTER.reg |= (SDHC_EISTER_DATTEO |
SDHC_EISTER_DATCRC |
SDHC_EISTER_DATEND |
SDHC_EISTER_ACMD |
SDHC_EISTER_ADMA);
SDHC0->EISIER.reg |= (SDHC_EISIER_DATTEO |
SDHC_EISIER_DATCRC |
SDHC_EISIER_DATEND |
SDHC_EISIER_ACMD |
SDHC_EISIER_ADMA);
} else if (enable_transfer_wait) {
// Enable data timeout error interrupt
SDHC0->EISTER.reg |= SDHC_EISTER_DATTEO;
SDHC0->EISIER.reg |= SDHC_EISIER_DATTEO;
}
}
static int sdhc_do_cmd(uint8_t command, uint32_t arg,
enum sdhc_cmd_rsp_type rsp)
{
/* Make sure that eveything is idle */
while (SDHC0->PSR.bit.CMDINHC || SDHC0->PSR.bit.CMDINHD);
/* Enable required interrupts */
sdhc_enable_cmd_interrupts(rsp == SDHC_CMD_RSP_TYPE_R1B, 0);
/* Configure registers for command */
SDHC0->ARG1R.reg = arg;
SDHC0->TMR.reg = 0;
SDHC0->CR = sdhc_get_cr_val(command, rsp, 0);
/* Wait for command to finish */
while (!SDHC0->NISTR.bit.CMDC);
if (SDHC0->NISTR.bit.ERRINT) {
// Error
return 1;
}
/* Wait for the busy signal if we need to */
if (rsp == SDHC_CMD_RSP_TYPE_R1B) {
while (!SDHC0->NISTR.bit.TRFC);
if (SDHC0->NISTR.bit.ERRINT) {
// Error
return 1;
}
}
/* All done! The response, if any, is in the response registers */
return 0;
}
static int sdhc_do_transfer(uint8_t command, uint32_t arg, uint16_t block_count,
uint16_t block_size, uint8_t *buffer, int write)
{
/* Make sure that eveything is idle */
while (SDHC0->PSR.bit.CMDINHC || SDHC0->PSR.bit.CMDINHD);
/* Make sure that SD clock is on */
SDHC0->CCR.bit.SDCLKEN = 1;
/* Enable required interrupts */
sdhc_enable_cmd_interrupts(1, 1);
/* Configure an ADMA2 descriptor */
struct sdhc_adma2_descriptor32 adma2_desc = { 0 };
uint32_t const len = block_count * block_size;
if (len < 65536) {
adma2_desc.length = len;
} else if (len == 65536) {
adma2_desc.length = 0;
} else {
return 1;
}
adma2_desc.address = (uint32_t)buffer;
adma2_desc.attributes.raw = (SDHC_ADMA2_DESC_VALID |
SDHC_ADMA2_DESC_END |
SDHC_ADMA2_DESC_ACT_TRAN);
/* Set ADMA2 descriptor base address */
SDHC0->ASAR[0].reg = (uint32_t)&adma2_desc;
/* Configure registers for command */
unsigned const multi = block_count > 1;
SDHC0->SSAR.CMD23.ARG2 = block_count;
SDHC0->BSR.reg = SDHC_BSR_BLOCKSIZE(block_size);
SDHC0->BCR.reg = SDHC_BCR_BCNT(block_count);
SDHC0->ARG1R.reg = arg;
SDHC0->TMR.reg = (SDHC_TMR_DMAEN_ENABLE |
(multi << SDHC_TMR_BCEN_Pos) |
SDHC_TMR_ACMDEN_CMD23 |
(write ? SDHC_TMR_DTDSEL_WRITE :
SDHC_TMR_DTDSEL_READ) |
(multi << SDHC_TMR_MSBSEL_Pos));
SDHC0->CR = sdhc_get_cr_val(command, SDHC_CMD_RSP_TYPE_R1, 1);
/* Wait for command to finish */
while (!SDHC0->NISTR.bit.CMDC);
if (SDHC0->NISTR.bit.ERRINT) {
// Error
return 1;
}
/* Wait transfer to finish */
while (!SDHC0->NISTR.bit.TRFC);
if (SDHC0->NISTR.bit.ERRINT) {
// Error
return 1;
}
/* All done! */
return 0;
}
int sdhc_test_init_card(void)
{
int ret;
uint32_t arg;
uint16_t rca;
uint16_t clk_setting;
uint8_t buffer[512];
/* Enable SD card clock */
SDHC0->CCR.bit.SDCLKEN = 1;
/* Enable SD bus power */
SDHC0->PCR.reg = SDHC_PCR_SDBVSEL_3V3 | SDHC_PCR_SDBPWR;
/* Send command 0 */
ret = sdhc_do_cmd(SD_CMD0, 0, SDHC_CMD_RSP_TYPE_NONE);
if (ret != 0) {
return 1;
}
/* Send command 8 */
arg = (union sd_cmd8_arg){
.check_pattern = 0xAA,
.voltage_supplied = SD_VHS_27_36
}.raw;
for (;;) {
ret = sdhc_do_cmd(SD_CMD8, arg, SDHC_CMD_RSP_TYPE_R7);
if (ret != 0) {
return 1;
}
union sd_cmd8_rsp rsp = sd_get_cmd8_rsp(SDHC0->RR);
if ((rsp.check_pattern_echo == 0xAA) &&
(rsp.voltage_accepted == SD_VHS_27_36)) {
break;
}
}
/* Initialize */
arg = (union sd_acmd41_arg){
.volt_range_3V2_3V3 = 1,
.volt_range_3V3_3V4 = 1,
.xpc = 1,
.hcs = 1
}.raw;
for (;;) {
/* Send command 55 */
ret = sdhc_do_cmd(SD_CMD55, 0, SDHC_CMD_RSP_TYPE_R1);
if (ret != 0) {
return 1;
}
/* Send send ACMD41 to set supported voltage ranges and init */
ret = sdhc_do_cmd(SD_ACMD41, arg, SDHC_CMD_RSP_TYPE_R3);
if (ret != 0) {
return 1;
}
//union sd_acmd41_rsp rsp = (union sd_acmd41_rsp){ .raw = SDHC0->RR[0].reg };
//if (rsp.busy) {
//volatile uint32_t test = SDHC0->RR[0].reg;
if (SDHC0->RR[0].reg & (UINT32_C(1) << 31)) {
break;
}
for (volatile unsigned i = 0; i < 10000; i++);
}
/* Send CMD2 to get card ID and go into IDENT state */
ret = sdhc_do_cmd(SD_CMD2, 0, SDHC_CMD_RSP_TYPE_R2);
if (ret != 0) {
return 1;
}
/* Send CMD3 to get Relative Address (RCA) for card */
ret = sdhc_do_cmd(SD_CMD3, 0, SDHC_CMD_RSP_TYPE_R6);
if (ret != 0) {
return 1;
}
union sd_cmd3_rsp rsp = sd_get_cmd3_rsp(SDHC0->RR);
rca = rsp.rca;
// Normally send CMD9 here to read Card Specific Data, skipped because we
// are using a known card for this test.
/* Send CMD7 to select card and go into TRAN state */
arg = (union sd_rca_arg){ .rca = rca }.raw;
ret = sdhc_do_cmd(SD_CMD7, arg, SDHC_CMD_RSP_TYPE_R1);
if (ret != 0) {
return 1;
}
/* Increase clock speed to 25 MHz */
SDHC0->CCR.bit.SDCLKEN = 0;
clk_setting = 100000000 / 25000000 / 2;
SDHC0->CCR.reg = (SDHC_CCR_SDCLKFSEL(clk_setting & 0xFF) |
SDHC_CCR_USDCLKFSEL((clk_setting >> 8) & 0x3) |
SDHC_CCR_INTCLKEN);
while (!SDHC0->CCR.bit.INTCLKS);
SDHC0->CCR.bit.SDCLKEN = 1;
/* Send CMD6 to switch to high speed mode */
arg = (union sd_cmd6_arg){
.function_group_1 = SD_FG1_ACCESS_MODE_SDR25,
.function_group_2 = SD_FG_NO_CHANGE,
.function_group_3 = SD_FG_NO_CHANGE,
.function_group_4 = SD_FG_NO_CHANGE,
.function_group_5 = SD_FG_NO_CHANGE,
.function_group_6 = SD_FG_NO_CHANGE,
.mode = 1
}.raw;
ret = sdhc_do_transfer(SD_CMD6, arg, 1,
sizeof(union sd_switch_function_status_rsp), buffer,
0);
if (ret != 0) {
return 1;
}
/* Switch to high speed mode and increase clock speed to 50 MHz */
SDHC0->HC1R.bit.HSEN = 1;
SDHC0->CCR.bit.SDCLKEN = 0;
clk_setting = 100000000 / 50000000 / 2;
SDHC0->CCR.reg = (SDHC_CCR_SDCLKFSEL(clk_setting & 0xFF) |
SDHC_CCR_USDCLKFSEL((clk_setting >> 8) & 0x3) |
SDHC_CCR_INTCLKEN);
while (!SDHC0->CCR.bit.INTCLKS);
SDHC0->CCR.bit.SDCLKEN = 1;
/* Send command 55 */
ret = sdhc_do_cmd(SD_CMD55, 0, SDHC_CMD_RSP_TYPE_R1);
if (ret != 0) {
return 1;
}
/* Send ACMD6 to switch to a 4 bit wide bus */
arg = (union sd_acmd6_arg){ .bus_width = SD_BUS_WIDTH_4 }.raw;
ret = sdhc_do_cmd(SD_ACMD6, arg, SDHC_CMD_RSP_TYPE_R1);
if (ret != 0) {
return 1;
}
/* We need to switch to a 4 bit bus now */
SDHC0->HC1R.bit.DW = SDHC_HC1R_DW_4BIT_Val;
// Normally send ACMD51 here to read SD Card Configuration Register (SCR),
// skipped because we are using a known card for this test.
/* Send CMD16 to set block length */
ret = sdhc_do_cmd(SD_CMD16, 512, SDHC_CMD_RSP_TYPE_R1);
if (ret != 0) {
return 1;
}
SDHC0->CCR.bit.SDCLKEN = 0;
return 0;
}
int sdhc_test_read(uint32_t address, uint16_t num_blocks,
uint8_t *buffer)
{
int ret = sdhc_do_transfer((num_blocks > 1) ? SD_CMD18 : SD_CMD17, address,
num_blocks, 512, buffer, 0);
SDHC0->CCR.bit.SDCLKEN = 0;
return ret;
}
int sdhc_test_write(uint32_t address, uint16_t num_blocks,
uint8_t *buffer)
{
int ret = sdhc_do_transfer((num_blocks > 1) ? SD_CMD25 : SD_CMD24, address,
num_blocks, 512, buffer, 1);
SDHC0->CCR.bit.SDCLKEN = 0;
return ret;
}
|
LearnersGuild/idm-api-spec
|
webpack/module.js
|
<filename>webpack/module.js<gh_stars>0
const path = require('path')
const autoprefixer = require('autoprefixer')
const ExtractTextPlugin = require('extract-text-webpack-plugin')
module.exports = ({config, root}) => {
const sassResources = require('src/common/styles/sassResources')
const rules = [
{
test: /\.jsx?$/,
exclude: /node_modules/,
use: {
loader: 'babel-loader',
options: config.app.hotReload ? {
plugins: [
['react-transform', {
transforms: [{
transform: 'react-transform-hmr',
imports: ['react'],
locals: ['module'],
}],
}],
],
} : {},
},
},
// global styles
{
test: /Root\.css$/,
use: _extractText([
{
loader: 'css-loader',
options: {sourceMap: true},
},
], {fallback: 'style-loader'}),
},
// react-toolbox
{
test: /\.scss$/,
use: _extractText([
{
loader: 'css-loader',
options: {
sourceMap: true,
modules: true,
localIdentName: '[name]__[local]__[hash:base64:5]',
importLoaders: 3,
}
},
{
loader: 'postcss-loader',
options: {
parsers: [autoprefixer],
sourceMap: true,
},
},
{
loader: 'sass-loader',
options: {
sourceMap: true,
data: `@import "${path.resolve(root, 'common/styles/theme.scss')}";`,
},
},
{
loader: 'sass-resources-loader',
options: {resources: sassResources},
},
], {fallback: 'style-loader'}),
include: [
path.resolve(root, 'node_modules', 'react-toolbox'),
path.resolve(root, 'common'),
]
},
// app css styles
{
test: /\.css$/,
use: _extractText([
{
loader: 'css-loader',
options: {
sourceMap: true,
modules: true,
localIdentName: '[name]__[local]__[hash:base64:5]',
importLoaders: 2,
},
}
], {fallback: 'style-loader'}),
include: [
path.resolve(root, 'common'),
],
exclude: [
path.resolve(root, 'common', 'containers', 'Root.css'),
]
},
{
test: /\.json$/,
loader: 'json-loader',
},
{
test: /\.(woff2?|ttf|eot|svg)$/,
use: {
loader: 'url-loader',
options: {limit: 10000},
},
},
]
const noParse = [
/node_modules\/google-libphonenumber\/dist/,
]
function _extractText(loaders, options = {}) {
if (config.app.minify) {
return ExtractTextPlugin.extract(Object.assign({use: loaders}, options))
}
// place fallback loader in line with others when not using ExtractTextPlugin
if (options.fallback) {
loaders.unshift(options.fallback)
}
return loaders
}
return {rules, noParse}
}
|
KaliedaRik/Scrawl-canvas
|
source/mixin/dom.js
|
// # DOM mixin
// This mixin builds on the base and position mixins to give DOM elements (Scrawl-canvas [Stack](../factory/stack.html), [Canvas](../factory/canvas.html) and [Element](../factory/element.html) wrapper objects) the ability to act as __artefacts__ in a Scrawl-canvas stack environment.
// + Absolute and relative positioning and dimensioning
// + Positioning in the 3rd (z) dimension (absolute values only)
// + Use other artefacts as `pivot`, `mimic` and `path` objects
// + Allow other artefacts to use Element objects as their pivot or mimic object
// + Track the real (3D) positions of the DOM element's corners, so that they can participate in collision detection functionality (for example, drag-and-drop)
// + Allow other artefacts to use Element object corners as their pivot reference
// + 3D-rotation management - extending beyond `roll` (z) rotation to include `pitch` (x) and `yaw` (y) rotations
// + DOM element class and CSS management
// + Real-time localized mouse/pointer cursor coordinate tracking
// #### Imports
import { constructors, artefact } from '../core/library.js';
import { mergeOver, pushUnique, removeItem, isa_obj, isa_dom, isa_quaternion, xt, xta, λnull, Ωempty } from '../core/utilities.js';
import { uiSubscribedElements, currentCorePosition, applyCoreResizeListener, addLocalMouseMoveListener, removeLocalMouseMoveListener } from '../core/userInteraction.js';
import { addDomShowElement, setDomShowRequired, domShow } from '../core/document.js';
import { makeQuaternion, requestQuaternion, releaseQuaternion } from '../factory/quaternion.js';
import positionMix from '../mixin/position.js';
import deltaMix from './delta.js';
import pivotMix from './pivot.js';
import mimicMix from './mimic.js';
import pathMix from './path.js';
import anchorMix from '../mixin/anchor.js';
// #### Export function
export default function (P = Ωempty) {
// #### Mixins
// + [position](../mixin/position.html)
// + [delta](../mixin/delta.html)
// + [pivot](../mixin/pivot.html)
// + [mimic](../mixin/mimic.html)
// + [path](../mixin/path.html)
// + [anchor](../mixin/anchor.html)
P = positionMix(P);
P = deltaMix(P);
P = pivotMix(P);
P = mimicMix(P);
P = pathMix(P);
P = anchorMix(P);
// #### Shared attributes
// TODO: we need to test how various Javascript frameworks interact with Scrawl-canvas functionality in this area. In particular: [vue.js](https://vuejs.org/); [React](https://reactjs.org/); [Svelte](https://svelte.dev/)
let defaultAttributes = {
// __domElement__ - Wrapper objects reference handle to its DOM element
domElement: '',
// __pitch__, __yaw__ - rotation management in the `x` and `y` axes, to go with the __roll__ attribute (for `z` axis rotation) defined in the position mixin. Like roll, values should be Numbers representing ___degrees___ (not radians)
pitch: 0,
yaw: 0,
// __offsetZ__ - Number - unlike the X and Y offsets, offsetZ can only ever be a number as there is no 3d box (as such) to act as a length for relative N% strings (and 'front', 'center', 'back' strings would be equally nonsensical)
offsetZ: 0,
// __css__ - a Javascript Object to hold key:value CSS values. Scrawl-canvas does not track any non-positioning-related CSS attributes; this attribute is a convenience function to allow developers to add CSS to the DOM element's `style` attribute.
// + Styles added to DOM elements using this object are applied directly to the element, thus having precedence over all other CSS declarations such as those included in a <style> tag in the document, or in a CSS style sheet file.
css: null,
// __classes__ - a String representation of the DOM element's `classNode` attribute.
classes: '',
// __position__ - a String representation of the DOM element's `position` attribute.
// + by default all Scrawl-canvas Stack, Canvas and Element wrapper DOM elements are given a position value of `absolute`
// + root Stack and Canvas wrappers have a position value of `relative` - this is to make sure their DOM elements remain in the document flow, thus attempting to minimize Scrawl-canvas's impact on the wider environment
// + other possible values - except `static` - will be respected if they are explicitly set on the DOM elements prior to Scrawl-canvas initialization.
position: 'absolute',
// __checkForResize__ - Boolean - automatically update stuff when the element changes its dimensions
// + triggers as part of the [userInteraction](../core/userInteraction.html) `updateUiSubscribedElement` functionality
checkForResize: false,
// __trackHere__ - String flag - when set, Scrawl-canvas will track mouse/touch cursor's _local_ position (relative to top-left corner) over the wrapper's DOM element
// + Stack and Canvas wrappers have this flag set to true `subscribe`, by default
// + Element wrappers default to false `''`, as expected
// + The value can also be set to `'local'`, which will set up a local listener to help track mouse movements across a 3d rotated DOM element
// + ___BE AWARE___ that setting the value to `'local'` is an _experimental technology!_ There are a number of issues surrounding the functionality - principally that we lose the ability to dynamically resize the rotated element/canvas: changing the element's dimensions will lead to inaccuracies in mouse cursor positioning!
trackHere: '',
// __activePadding__ - Number - if the `trackHere` attribute is set to `'local'` then the here object will generally remain true whatever the position of the mouse cursor - this is because here coordinates are only updated as the cursor moves over the element, not when it moves beyond its borders. The activePadding Number supplies a padding area along the inside edge of the element - if the mouse moves into this area then a the here.active boolean will become false. This is not foolproof because it will often miss a rapidly moving cursor!
activePadding: 5,
// __domAttributes__ - pseudo-attribute which is not retained by the wrapper object. See `updateDomAttributes` function below for details on how to use this functionality when creating or updating (via `set`), for example, Element objects
};
P.defs = mergeOver(P.defs, defaultAttributes);
// #### Packet management
P.packetExclusions = pushUnique(P.packetExclusions, ['domElement', 'pathCorners', 'rotation']);
P.packetFunctions = pushUnique(P.packetFunctions, ['onEnter', 'onLeave', 'onDown', 'onUp']);
// `processDOMPacketOut` - internal helper function
P.processDOMPacketOut = function (key, value, includes) {
return this.processFactoryPacketOut(key, value, includes);
};
// `processFactoryPacketOut` - internal helper function
P.processFactoryPacketOut = function (key, value, includes) {
let result = true;
if(includes.indexOf(key) < 0 && value === this.defs[key]) result = false;
return result;
};
// `finalizePacketOut` - internal helper function
P.finalizePacketOut = function (copy, items) {
if (isa_dom(this.domElement)) {
let el = this.domElement;
let mynode = el.cloneNode(true);
let kids = mynode.querySelectorAll('[data-corner-div="sc"]');
kids.forEach(kid => mynode.removeChild(kid));
copy.outerHTML = mynode.outerHTML;
copy.host = el.parentElement.id;
}
copy = this.handlePacketAnchor(copy, items);
return copy;
};
// #### Clone management
// `postCloneAction` - internal helper function
P.postCloneAction = function(clone, items) {
if (this.onEnter) clone.onEnter = this.onEnter;
if (this.onLeave) clone.onLeave = this.onLeave;
if (this.onDown) clone.onDown = this.onDown;
if (this.onUp) clone.onUp = this.onUp;
return clone;
};
// #### Kill management
// No additional kill functionality required
// #### Get, Set, deltaSet
let S = P.setters,
D = P.deltaSetters;
S.trackHere = function(val) {
if (xt(val)) {
if (val) {
pushUnique(uiSubscribedElements, this.name);
if (val === 'local') addLocalMouseMoveListener(this);
}
else {
removeItem(uiSubscribedElements, this.name);
removeLocalMouseMoveListener(this);
}
this.trackHere = val;
}
};
// `position`
S.position = function (item) {
this.position = item;
this.dirtyPosition = true;
};
// `visibility`
S.visibility = function (item) {
this.visibility = item;
this.dirtyVisibility = true;
};
// `offsetZ`
S.offsetZ = function (item) {
this.offsetZ = item;
this.dirtyOffsetZ = true;
};
D.offsetZ = function (item) {
this.offsetZ += item;
this.dirtyOffsetZ = true;
};
// `roll`
S.roll = function (item) {
this.roll = this.checkRotationAngle(item);
this.dirtyRotation = true;
};
D.roll = function (item) {
this.roll = this.checkRotationAngle(this.roll + item);
this.dirtyRotation = true;
};
// `pitch`
S.pitch = function (item) {
this.pitch = this.checkRotationAngle(item);
this.dirtyRotation = true;
};
D.pitch = function (item) {
this.pitch = this.checkRotationAngle(this.pitch + item);
this.dirtyRotation = true;
};
// `yaw`
S.yaw = function (item) {
this.yaw = this.checkRotationAngle(item);
this.dirtyRotation = true;
};
D.yaw = function (item) {
this.yaw = this.checkRotationAngle(this.yaw + item);
this.dirtyRotation = true;
};
// `css`
S.css = function (item) {
this.css = (this.css) ? mergeOver(this.css, item) : item;
this.dirtyCss = true;
};
// `classes`
S.classes = function (item) {
this.classes = item;
this.dirtyClasses = true;
};
// `domAttributes` - see `updateDomAttributes` below
S.domAttributes = function (item) {
this.updateDomAttributes(item);
}
// #### Prototype functions
// `checkRotationAngle` - internal function - a quick check for rotational (`roll`, `pitch`, `yaw`) setter/deltaSetter functionality. Attempts to keep values of these attributes between -360 and + 360
P.checkRotationAngle = function (angle) {
if (angle < -180 || angle > 180) {
angle += (angle > 0) ? -360 : 360;
}
return angle;
};
// `updateDomAttributes` - DOM wrapper objects do not keep track of their DOM element attribute values; this function is a convenience function to make updating those attributes a bit easier. Function arguments can be one of:
// + `(attribute-String, value)`
// + `({attribute-String: value, attribute-String: value, etc})`
P.updateDomAttributes = function (items, value) {
if (this.domElement) {
let el = this.domElement;
if (items.substring && xt(value)) {
if (value) el.setAttribute(items, value);
else el.removeAttribute(items);
}
else if (isa_obj(items)) {
Object.entries(items).forEach(([item, val]) => {
if (val) el.setAttribute(item, val);
else el.removeAttribute(item);
});
}
}
return this;
};
// `initializeDomLayout` - internal function
// + Used by factory constructors to help wrap DOM elements in a Stack, Canvas or Element wrapper
// + TODO - there's a lot of improvements we can do here - the aim should be to create the wrapper object and update the objects DOM element's style and dimensions attributes - specifically shifting `position` from "static" to "absolute" - in a way that does not disturb the page view in any way whatsoever (pixel-perfect!) so website visitors are completely unaware that the work has taken place
P.initializeDomLayout = function (items) {
let el = items.domElement,
elStyle = el.style;
elStyle.boxSizing = 'border-box';
if (el && items.setInitialDimensions) {
let dims = el.getBoundingClientRect(),
trans = el.style.transform,
transOrigin = el.style.transformOrigin,
host = false,
hostDims;
if (items && items.host) {
host = items.host;
if (host.substring && artefact[host]) host = artefact[host];
}
// TODO - discover scale
// discover dimensions (width, height)
this.currentDimensions[0] = dims.width;
this.currentDimensions[1] = dims.height;
items.width = dims.width;
items.height = dims.height;
// recover classes already assigned to the element
if (el.className) items.classes = el.className;
// go with lock defaults - no work required
// discover start (boundingClientRect - will be the difference between this object and its host (parent) object 'top' and 'left' values)
if (host && host.domElement) {
hostDims = host.domElement.getBoundingClientRect();
if (hostDims) {
items.startX = dims.left - hostDims.left;
items.startY = dims.top - hostDims.top;
}
}
// TODO go with offset defaults - though may be worthwhile checking if the translate style has been set?
// TODO discover handle (transform, transformOrigin)
// TODO go with rotation (pitch, yaw, roll) defaults - no further work required?
// for Stack artefacts only, discover perspective and perspective-origin values
if (this.type === 'Stack') {
// TODO - currently assumes all lengths supplied are in px - need a way to calculate non-px values
if (!xt(items.perspective) && !xt(items.perspectiveZ)) {
// TODO - this isn't working! see Demo DOM 003 where attempting to set the perspective in CSS causes the demo to fail
// + Workaround is to explicitly set the stack's perspectiveZ value in Javascript
items.perspectiveZ = (xt(elStyle.perspective) && elStyle.perspective) ? parseFloat(elStyle.perspective) : 0;
}
let perspectiveOrigin = elStyle.perspectiveOrigin;
if (perspectiveOrigin.length) {
perspectiveOrigin = perspectiveOrigin.split(' ');
if (perspectiveOrigin.length > 0 && !xt(items.perspective) && !xt(items.perspectiveX)) items.perspectiveX = perspectiveOrigin[0];
if (!xt(items.perspective) && !xt(items.perspectiveY)) {
if (perspectiveOrigin.length > 1) items.perspectiveY = perspectiveOrigin[1];
else items.perspectiveY = perspectiveOrigin[0];
}
}
}
}
};
// ##### DOM element class attribute management
// `addClasses`
P.addClasses = function (item) {
if (item.substring) {
let classes = this.classes;
classes += ` ${item}`;
classes = classes.trim();
classes = classes.replace(/[\s\uFEFF\xA0]+/g, ' ');
if (classes !== this.classes) {
this.classes = classes;
this.dirtyClasses = true;
}
}
return this;
};
// `removeClasses`
P.removeClasses = function (item) {
if (item.substring) {
let classes = this.classes,
targets = item.split(),
search;
targets.forEach(cls => {
search = new RegExp(' ?' + cls + ' ?');
classes = classes.replace(search, ' ');
classes = classes.trim();
classes = classes.replace(/[\s\uFEFF\xA0]+/g, ' ');
});
if (classes !== this.classes) {
this.classes = classes;
this.dirtyClasses = true;
}
}
return this;
};
// ##### DOM element corners management
// Scrawl-canvas keeps track of its DOM wrapper element's positions by creating four zero-dimension <div> elements and adding them as absolutely positioned children to the element. We can then get these children to report on their real coordinates (even when the parent is 3D rotated) by calling `getClientRects` on them.
// `addPathCorners`
P.addPathCorners = function () {
if (this.domElement && !this.noUserInteraction) {
let pointMaker = function () {
let p = document.createElement('div');
p.style.width = 0;
p.style.height = 0;
p.style.position = 'absolute';
p.style.margin = 0;
p.style.border = 0;
p.style.padding = 0;
return p;
};
let tl = pointMaker(),
tr = pointMaker(),
br = pointMaker(),
bl = pointMaker();
tl.style.top = '0%';
tl.style.left = '0%';
tl.setAttribute('data-corner-div', 'sc');
tr.style.top = '0%';
tr.style.left = '100%';
tr.setAttribute('data-corner-div', 'sc');
br.style.top = '100%';
br.style.left = '100%';
br.setAttribute('data-corner-div', 'sc');
bl.style.top = '100%';
bl.style.left = '0%';
bl.setAttribute('data-corner-div', 'sc');
let el = this.domElement;
el.appendChild(tl);
el.appendChild(tr);
el.appendChild(br);
el.appendChild(bl);
this.pathCorners.push(tl);
this.pathCorners.push(tr);
this.pathCorners.push(br);
this.pathCorners.push(bl);
if (!this.currentCornersData) this.currentCornersData = [];
}
return this;
};
// `checkCornerPositions`
P.checkCornerPositions = function (corner) {
let pathCorners = this.pathCorners;
if (pathCorners.length === 4) {
let here = this.getHere(),
x = currentCorePosition.scrollX - (here.offsetX || 0),
y = currentCorePosition.scrollY - (here.offsetY || 0),
round = Math.round,
results = [],
client;
const cornerPush = function (c) {
let coord = c[0];
if (coord) {
results.push(round(coord.left + x));
results.push(round(coord.top + y));
}
else results.push(0, 0);
};
switch (corner) {
case 'topLeft' :
client = pathCorners[0].getClientRects();
cornerPush(client);
return results;
case 'topRight' :
client = pathCorners[1].getClientRects();
cornerPush(client);
return results;
case 'bottomRight' :
client = pathCorners[2].getClientRects();
cornerPush(client);
return results;
case 'bottomLeft' :
client = pathCorners[3].getClientRects();
cornerPush(client);
return results;
default :
pathCorners.forEach(point => {
if (isa_dom(point)) {
client = point.getClientRects();
cornerPush(client);
}
});
return results;
}
}
}
// `getCornerCoordinate`
const cornerCoordinateLabels = ['topLeft', 'topRight', 'bottomRight', 'bottomLeft'];
P.getCornerCoordinate = function (corner) {
if (cornerCoordinateLabels.indexOf(corner) >= 0) return this.checkCornerPositions(corner);
else return [].concat(this.currentStampPosition);
};
// ##### Collision detection
// `cleanPathObject`
// + Scrawl-canvas uses the DOM wrapper element's child <div> elements' position coordinates to build a `Path2D object` (which will be some form of trapezium).
// + We can now perform collision detection in the same way as we do for Canvas-based entity objects using `CanvasRenderingContext2D.isPointInPath`
P.cleanPathObject = function () {
this.dirtyPathObject = false;
if (this.domElement && !this.noUserInteraction) {
if (!this.pathCorners.length) this.addPathCorners();
if (!this.currentCornersData) this.currentCornersData = [];
let cornerData = this.currentCornersData;
cornerData.length = 0;
cornerData.push(...this.checkCornerPositions());
let p = this.pathObject = new Path2D();
p.moveTo(cornerData[0], cornerData[1]);
p.lineTo(cornerData[2], cornerData[3]);
p.lineTo(cornerData[4], cornerData[5]);
p.lineTo(cornerData[6], cornerData[7]);
p.closePath();
}
};
// `checkHit`
P.checkHit = function (items = [], mycell) {
if (this.noUserInteraction) return false;
if (!this.pathObject || this.dirtyPathObject) this.cleanPathObject();
let tests = (!Array.isArray(items)) ? [items] : items,
poolCellFlag = false;
if (!mycell) {
mycell = requestCell();
poolCellFlag = true;
}
let engine = mycell.engine,
stamp = this.currentStampPosition,
x = stamp[0],
y = stamp[1],
tx, ty;
if (tests.some(test => {
if (Array.isArray(test)) {
tx = test[0];
ty = test[1];
}
else if (xta(test, test.x, test.y)) {
tx = test.x;
ty = test.y;
}
else return false;
if (!tx.toFixed || !ty.toFixed || isNaN(tx) || isNaN(ty)) return false;
return engine.isPointInPath(this.pathObject, tx, ty);
}, this)) {
if (poolCellFlag) releaseCell(mycell);
return {
x: tx,
y: ty,
artefact: this,
};
}
if (poolCellFlag) releaseCell(mycell);
return false;
};
// ##### Display cycle functionality
// `cleanRotation`
P.cleanRotation = function () {
this.dirtyRotation = false;
if (!this.rotation || !isa_quaternion(this.rotation)) this.rotation = makeQuaternion();
if (!this.currentRotation || !isa_quaternion(this.rotation)) this.currentRotation = makeQuaternion();
let calculatedRotation = this.rotation;
calculatedRotation.setFromEuler({
pitch: this.pitch || 0,
yaw: this.yaw || 0,
roll: this.roll || 0,
});
if (calculatedRotation.getMagnitude() !== 1) calculatedRotation.normalize();
let processedRotation = requestQuaternion(),
path = this.path,
mimic = this.mimic,
pivot = this.pivot,
lock = this.lockTo;
if (path && lock.indexOf('path') >= 0) {
processedRotation.set(calculatedRotation);
// TODO check to see if path roll needs to be added
}
else if (mimic && this.useMimicRotation && lock.indexOf('mimic') >= 0) {
if (xt(mimic.currentRotation)) {
processedRotation.set(mimic.currentRotation);
if (this.addOwnRotationToMimic) processedRotation.quaternionRotate(calculatedRotation);
}
else this.dirtyMimicRotation = true;
}
else {
processedRotation.set(calculatedRotation);
if (pivot && this.addPivotRotation && lock.indexOf('pivot') >= 0) {
if (xt(pivot.currentRotation)) processedRotation.quaternionRotate(pivot.currentRotation);
else this.dirtyPivotRotation = true;
}
}
this.currentRotation.set(processedRotation);
releaseQuaternion(processedRotation);
this.dirtyPositionSubscribers = true;
if (this.mimicked && this.mimicked.length) this.dirtyMimicRotation = true;
};
// `cleanOffsetZ`
P.cleanOffsetZ = function () {
// nothing to do here - function only exists in case we need to do stuff in future Scrawl-canvas version
this.dirtyOffsetZ = false;
};
// `cleanContent`
P.cleanContent = function () {
this.dirtyContent = false;
let el = this.domElement;
if (el) this.dirtyDimensions = true;
};
// `cleanDisplayShape` - overwritten in Stack and Canvas artefacts via the displayShape mixin
P.cleanDisplayShape = λnull;
// `cleanDisplayArea` - overwritten in Stack and Canvas artefacts via the displayShape mixin
P.cleanDisplayArea = λnull;
// `prepareStamp` - check all the dirty flags and call the appropriate `clean` functions if they are set
P.prepareStamp = function () {
if (this.dirtyScale || this.dirtyDimensions || this.dirtyStart || this.dirtyOffset || this.dirtyHandle || this.dirtyRotation) this.dirtyPathObject = true;
if (this.dirtyContent) this.cleanContent();
if (this.dirtyScale) this.cleanScale();
if (this.dirtyDimensions) this.cleanDimensions();
if (this.dirtyDisplayArea) this.cleanDisplayArea();
if (this.dirtyDisplayShape) this.cleanDisplayShape();
if (this.dirtyLock) this.cleanLock();
if (this.dirtyStart) this.cleanStart();
if (this.dirtyOffset) this.cleanOffset();
if (this.dirtyOffsetZ) this.cleanOffsetZ();
if (this.dirtyHandle) this.cleanHandle();
if (this.dirtyRotation) this.cleanRotation();
if (this.isBeingDragged || this.lockTo.indexOf('mouse') >= 0 || this.lockTo.indexOf('particle') >= 0) {
this.dirtyStampPositions = true;
this.dirtyStampHandlePositions = true;
}
if (this.pivoted.length) this.dirtyStampPositions = true;
if (this.dirtyStampPositions) this.cleanStampPositions();
if (this.dirtyStampHandlePositions) this.cleanStampHandlePositions();
if (this.dirtyPathObject) this.cleanPathObject();
};
// `stamp` - builds a set of Strings which can then be applied to the DOM wrapper's element's `style` attribute.
// + The functionality for performing the update is defined in the [document](../core/document.html) module's `domShow` function, which will be called for each DOM-based artefact during the 'show' stage of the Display cycle
// + Function returns a promise
//
// Only DOM elements whose attribute values have changed will be updated - as made clear by setting the appropriate dirty flags. Affected style attributes are:
// + `perspectiveOrigin` and `perspective` - Stack wrappers only
// + `position` (relative vs absolute, not position within a Stack)
// + `width` and `height` - for dimensions
// + `transformOrigin` - relating to wrapper `handle` values
// + `transform` - for positioning and rotation within a Stack element
// + `display` - for visibility
P.stamp = function () {
// do not process if the DOM element is missing
if (!this.domElement) return false;
// calculate transform strings on each iteration
let [stampX, stampY] = this.currentStampPosition,
[handleX, handleY] = this.currentStampHandlePosition,
scale = this.currentScale;
let rotation = this.currentRotation,
v, vx, vy, vz, angle;
let nTransformOrigin = `${handleX}px ${handleY}px 0`,
nTransform = `translate(${stampX - handleX}px,${stampY - handleY}px)`;
if (this.yaw || this.pitch || this.roll || (this.pivot && this.addPivotRotation) || (this.mimic && this.useMimicRotation) || (this.path && this.addPathRotation)) {
v = rotation.v;
vx = v.x;
vy = v.y;
vz = v.z;
angle = rotation.getAngle(false);
nTransform += ` rotate3d(${vx},${vy},${vz},${angle}rad)`;
}
if (this.offsetZ) nTransform += ` translateZ(${this.offsetZ}px)`;
if (scale !== 1) nTransform += ` scale(${scale},${scale})`;
if (nTransform !== this.currentTransformString) {
this.currentTransformString = nTransform;
this.dirtyTransform = true;
}
if (nTransformOrigin !== this.currentTransformOriginString) {
this.currentTransformOriginString = nTransformOrigin;
this.dirtyTransformOrigin = true;
}
// determine whether there is a need to trigger a redraw of the DOM element
if (this.dirtyTransform || this.dirtyPerspective || this.dirtyPosition || this.dirtyDomDimensions || this.dirtyTransformOrigin || this.dirtyVisibility || this.dirtyCss || this.dirtyClasses || this.domShowRequired) {
addDomShowElement(this.name);
setDomShowRequired(true);
}
// update artefacts subscribed to this artefact (using it as their pivot or mimic source), if required
if (this.dirtyPositionSubscribers) this.updatePositionSubscribers();
// if this artefact's pivot or mimic source was playing up, reset appropriate dirty flags so we can try and fix on next iteration
if(this.dirtyMimicRotation || this.dirtyPivotRotation) {
this.dirtyMimicRotation = false;
this.dirtyPivotRotation = false;
this.dirtyRotation = true;
}
if(this.dirtyMimicScale) {
this.dirtyMimicScale = false;
this.dirtyScale = true;
}
};
// `apply`
// + I really don't like this functionality - see if we can purge it from the code base?
P.apply = function() {
applyCoreResizeListener();
this.prepareStamp();
this.stamp()
domShow(this.name);
this.dirtyPathObject = true;
this.cleanPathObject();
};
// Return the prototype
return P;
};
|
troy0820/openshift-azure
|
vendor/github.com/openshift/origin/pkg/templateservicebroker/servicebroker/unbind_test.go
|
<gh_stars>10-100
package servicebroker
import (
"net/http"
"reflect"
"testing"
templatev1 "github.com/openshift/api/template/v1"
faketemplatev1 "github.com/openshift/client-go/template/clientset/versioned/typed/template/v1/fake"
"github.com/openshift/origin/pkg/templateservicebroker/openservicebroker/api"
authorizationv1 "k8s.io/api/authorization/v1"
"k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apiserver/pkg/authentication/user"
"k8s.io/client-go/kubernetes/fake"
clienttesting "k8s.io/client-go/testing"
)
func TestUnbindConflict(t *testing.T) {
fakekc := &fake.Clientset{}
fakekc.AddReactor("create", "subjectaccessreviews", func(action clienttesting.Action) (bool, runtime.Object, error) {
return true, &authorizationv1.SubjectAccessReview{Status: authorizationv1.SubjectAccessReviewStatus{Allowed: true}}, nil
})
faketemplateclient := &faketemplatev1.FakeTemplateV1{Fake: &clienttesting.Fake{}}
faketemplateclient.AddReactor("get", "brokertemplateinstances", func(action clienttesting.Action) (bool, runtime.Object, error) {
return true, &templatev1.BrokerTemplateInstance{
Spec: templatev1.BrokerTemplateInstanceSpec{
BindingIDs: []string{"bindingid"},
},
}, nil
})
var conflict int
faketemplateclient.AddReactor("update", "brokertemplateinstances", func(action clienttesting.Action) (bool, runtime.Object, error) {
if conflict > 0 {
conflict--
return true, nil, errors.NewConflict(templatev1.Resource("brokertemplateinstance"), "", nil)
}
return true, &templatev1.BrokerTemplateInstance{}, nil
})
b := &Broker{
templateclient: faketemplateclient,
kc: fakekc,
}
// after 5 conflicts we give up and return ConcurrencyError
conflict = 5
resp := b.Unbind(&user.DefaultInfo{}, "", "bindingid")
if !reflect.DeepEqual(resp, api.NewResponse(http.StatusUnprocessableEntity, &api.ConcurrencyError, nil)) {
t.Errorf("got response %#v, expected 422/ConcurrencyError", *resp)
}
// with fewer conflicts, we should get there in the end
conflict = 4
resp = b.Unbind(&user.DefaultInfo{}, "", "bindingid")
if !reflect.DeepEqual(resp, api.NewResponse(http.StatusOK, &api.UnbindResponse{}, nil)) {
t.Errorf("got response %#v, expected 200", *resp)
}
// also check that Gone is returned appropriately
resp = b.Unbind(&user.DefaultInfo{}, "", "doesnotexist")
if !reflect.DeepEqual(resp, api.NewResponse(http.StatusGone, &api.UnbindResponse{}, nil)) {
t.Errorf("got response %#v, expected 410", *resp)
}
}
|
EvilBabyDemon/RubberDucky
|
src/main/java/commandHandling/commands/publicCommands/place/PlaceVerify.java
|
<filename>src/main/java/commandHandling/commands/publicCommands/place/PlaceVerify.java<gh_stars>0
package commandHandling.commands.publicCommands.place;
import services.PlaceWebSocket;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.util.LinkedList;
public class PlaceVerify {
private final PlaceData placeData;
public PlaceVerify(PlaceData placeData) {
this.placeData = placeData;
main();
}
private void main() {
BufferedImage place = PlaceWebSocket.getImage(true);
LinkedList<String> fixingQ = new LinkedList<>();
for (int i = 0; i < placeData.drawnPixels && i < placeData.pixels.size(); i++) {
String command = placeData.pixels.get(i);
String[] split = command.split(" ");
int x = Integer.parseInt(split[2]);
int y = Integer.parseInt(split[3]);
Color colour = Color.decode(split[4]);
Color placeC = new Color(place.getRGB(x, y));
if (placeC.getAlpha() != 0 && !compareColors(colour, placeC)) {
fixingQ.add(command);
}
}
placeData.fixingQ = fixingQ;
}
private boolean compareColors (Color img, Color place) {
return img.getRed() == place.getRed() &&
img.getGreen() == place.getGreen() &&
img.getBlue() == place.getBlue();
}
}
|
mattclegg/homebrew-emacs
|
Formula/timerfunctions.rb
|
require File.expand_path("../../Homebrew/emacs_formula", __FILE__)
class Timerfunctions < EmacsFormula
desc "Enhanced version of timer.el"
homepage "http://elpa.gnu.org/packages/timerfunctions.html"
url "http://elpa.gnu.org/packages/timerfunctions-1.4.2.el"
sha256 "854bc3716a77db2bc5f7f264166f6156e7a3b3c3296e1adface15f04b6455888"
depends_on EmacsRequirement
def install
mv "timerfunctions-#{version}.el", "timerfunctions.el"
byte_compile "timerfunctions.el"
(share/"emacs/site-lisp/timerfunctions").install "timerfunctions.el",
"timerfunctions.elc"
end
def caveats; <<~EOS
Add the following to your init file:
(require 'timerfunctions)
EOS
end
test do
(testpath/"test.el").write <<~EOS
(add-to-list 'load-path "#{share}/emacs/site-lisp/timerfunctions")
(load "timerfunctions")
(print timerfunctions-version)
EOS
assert_match version.to_s, shell_output("emacs -Q --batch -l #{testpath}/test.el").strip
end
end
|
xinming365/LeetCode
|
187_find_repeat_DNA.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2022/1/27 7:18 下午
# @Author : xinming
# @File : 187_find_repeat_DNA.py
from typing import List
from collections import defaultdict
class Solution:
def findRepeatedDnaSequences(self, s: str) -> List[str]:
l = len(s)
L=10
dict_dna = defaultdict(int) #
# 例如11个A,遍历两次。
for i in range(l-L+1):
dict_dna[s[i:i+L]]+=1
out = []
for k, v in dict_dna.items():
if v>1:
out.append(k)
return out
if __name__=='__main__':
s = "AAAAACCCCCAAAAACCCCCCAAAAAGGGTTT"
out = Solution().findRepeatedDnaSequences(s)
print(out)
|
cehbrecht/md-ingestion
|
tests/community/test_egidatahub.py
|
import os
from mdingestion.community.egidatahub import EgidatahubDublinCore
from tests.common import TESTDATA_DIR
def test_dublin_core():
xmlfile = os.path.join(TESTDATA_DIR, 'egidatahub-oai_dc', 'SET_1', 'xml', '3d0c278c-47d3-5dee-9a56-43b1a5b5d3dd.xml') # noqa
reader = EgidatahubDublinCore()
doc = reader.read(xmlfile)
assert 'EGI-DataHub' in doc.title[0]
assert 'Other' in doc.discipline
assert 'https://creativecommons.org/licenses/by-nc-nd/4.0/' in doc.rights
assert 'http://hdl.handle.net/21.T15999/Fw2lYnA' == doc.pid
assert 'https://datahub.egi.eu/share/dec6359cdf03b3a7405ac75b70a4cecb' == doc.source
assert doc.open_access is True
assert 'EGI' in doc.keywords
|
mykhsystematic/hapi-fhir
|
hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/BanUnsupportedHttpMethodsInterceptor.java
|
package ca.uhn.fhir.rest.server.interceptor;
/*
* #%L
* HAPI FHIR - Server Framework
* %%
* Copyright (C) 2014 - 2019 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.util.HashSet;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import ca.uhn.fhir.rest.api.RequestTypeEnum;
import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException;
/**
* This interceptor causes the server to reject invocations for HTTP methods
* other than those supported by the server with an HTTP 405. This is a requirement
* of some security assessments.
*/
public class BanUnsupportedHttpMethodsInterceptor extends InterceptorAdapter {
private Set<RequestTypeEnum> myAllowedMethods = new HashSet<RequestTypeEnum>();
public BanUnsupportedHttpMethodsInterceptor() {
myAllowedMethods.add(RequestTypeEnum.GET);
myAllowedMethods.add(RequestTypeEnum.OPTIONS);
myAllowedMethods.add(RequestTypeEnum.DELETE);
myAllowedMethods.add(RequestTypeEnum.PUT);
myAllowedMethods.add(RequestTypeEnum.POST);
myAllowedMethods.add(RequestTypeEnum.PATCH);
myAllowedMethods.add(RequestTypeEnum.HEAD);
}
@Override
public boolean incomingRequestPreProcessed(HttpServletRequest theRequest, HttpServletResponse theResponse) {
RequestTypeEnum requestType = RequestTypeEnum.valueOf(theRequest.getMethod());
if (myAllowedMethods.contains(requestType)) {
return true;
}
throw new MethodNotAllowedException("Method not supported: " + theRequest.getMethod());
}
}
|
making-books-ren-today/test_eval_3_shxco
|
mep/books/migrations/0006_item_creators.py
|
<reponame>making-books-ren-today/test_eval_3_shxco<filename>mep/books/migrations/0006_item_creators.py
# -*- coding: utf-8 -*-
# Generated by Django 1.11.12 on 2018-05-01 20:20
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('people', '0008_person_is_organization'),
('books', '0005_item_mepid_allow_null'),
]
operations = [
migrations.AddField(
model_name='item',
name='creators',
field=models.ManyToManyField(through='books.Creator', to='people.Person'),
),
]
|
danjpgriffin/totallylazy
|
test/com/googlecode/totallylazy/CharactersTest.java
|
<gh_stars>100-1000
package com.googlecode.totallylazy;
import org.junit.Ignore;
import org.junit.Test;
import static com.googlecode.totallylazy.Characters.ASCII;
import static com.googlecode.totallylazy.Characters.UTF16;
import static com.googlecode.totallylazy.Characters.UTF8;
import static com.googlecode.totallylazy.Characters.characters;
import static com.googlecode.totallylazy.Characters.in;
import static com.googlecode.totallylazy.Characters.range;
import static com.googlecode.totallylazy.Characters.set;
import static com.googlecode.totallylazy.Files.file;
import static com.googlecode.totallylazy.Files.workingDirectory;
import static com.googlecode.totallylazy.matchers.Matchers.is;
import static java.lang.Character.MAX_VALUE;
import static java.lang.Character.MIN_VALUE;
import static org.hamcrest.MatcherAssert.assertThat;
public class CharactersTest {
@Test
public void canDetectedIfACharsetContainsACharacter() throws Exception {
assertThat(characters("λΣ").forAll(in(UTF8)), is(true));
assertThat(characters("λΣ").forAll(in(ASCII)), is(false));
}
@Test
public void canCreateARangeOfCharacters() throws Exception {
Sequence<Character> range = range(MIN_VALUE, MAX_VALUE);
assertThat(range.size(), is(65536));
assertThat(range.contains('λ'), is(true));
}
@Test
public void canConvertACharsetIntoARealSet() throws Exception {
assertThat(set(UTF8).contains('λ'), is(true));
}
@Test
@Ignore
public void canDetectValidJavaIdentifier() throws Exception {
String chars = characters(UTF16).filter(Characters.identifierStart).toString(" ");
Files.write(chars.getBytes(UTF16), file(workingDirectory(), "javaidentifiers.txt"));
}
}
|
nitspalash/blehx_app
|
js/components/sale/index.js
|
import React, { Component } from "react";
import { Image, View, StatusBar, TouchableOpacity,ScrollView,AsyncStorage } from "react-native";
import { Container,Spinner, Header,Thumbnail, Title, Button, Icon, Tabs, Tab, Right, Left, Body, Content,ListItem ,List,Text} from 'native-base';
import ResponsiveImage from 'react-native-responsive-image';
import PropTypes from 'prop-types';
import {connect} from 'react-redux';
import {productlist} from '../../actions/productActions';
import {productlist_bycategory} from '../../actions/productActions';
import {categorylist} from '../../actions/categoryActions';
//import Spinner from 'react-native-loading-spinner-overlay';
import styles from "./styles";
const logo = require("../../../img/splash-logo.png");
import TabOne from './tabOne';
import TabTwo from './tabTwo';
const addcart = require("../../../img/add-cart.png");
const bid = require("../../../img/bid.png");
const producti = require("../../../img/product-1.png");
const banner = require("../../../img/car.png");
class Sale extends Component {
constructor(props) {super(props)
this.state = {
visible: false,
productLists:'',
categorylists:''
}
}
// eslint-disable-line
componentWillMount(){
this.props.productlist('').then(res=>{
//alert(JSON.stringify(res));
// alert('not get');
if(res.ack=='1'){
//console.log(this.props.posts);
this.setState({visible:false});
}
})
this.props.categorylist().then(res=>{
if(res.ack == '1'){
// const category = res.category_list;
//this.setState({categorylists:category})
this.setState({visible:false});
}
})
this.serchcategory=this.serchcategory.bind(this);
this.productdetails=this.productdetails.bind(this);
//this.toggleCancel = this.toggleCancel.bind(this);
}
serchcategory(id)
{
this.ackey = id;
// alert(id)
this.props.productlist(id).then(res=>{
// alert(JSON.stringify(res));
// alert('not get');
if(res){
const product = res;
this.setState({productLists:product})
}
})
}
productdetails(id,data)
{
//alert(JSON.stringify(data[id-1].id));
// alert(id)
this.props.navigation.navigate("Details",data[id-1].id)
}
render() {
let searchListing = [];
let categoryListing = [];
//let ttt
//let ttt2
if(this.props.category)
{
for(let i = 0; i < this.props.category.length; i++){
categoryListing.push(
<View key = {i} style={{flex:1}}>
<TouchableOpacity onPress={()=>this.serchcategory(this.props.category[i].id)} style={this.ackey == this.props.category[i].id ? styles.aStyle : styles.iStyle}>
{
// this.props.category[i].image != ''?(
// <Thumbnail source={{uri: this.props.category[i].image }} style={styles.cimage} />
// ):(
<Thumbnail source={banner} style={styles.cimage}/>
//)
}
</TouchableOpacity >
<View><Text style={styles.text}>{this.props.category[i].name}</Text></View>
</View>
)
}
}
else{
categoryListing = (
<Spinner />
);
}
//alert(JSON.stringify(this.props.product.data));
if(this.props.product.data)
{
for(let i = 0; i < this.props.product.data.length; i++){
searchListing.push(<View key = {i} style={{flexDirection: 'row',paddingBottom:10}}>
<View style={styles.graybox}>
<View style={{flex:1}}>
<TouchableOpacity onPress={()=>{this.productdetails(i,this.props.product.data)}}>
<View style={{backgroundColor: "#35e1e1",flex:1,justifyContent: 'center',alignItems: 'center',}}>
{
// this.props.product.data[i].image != ''?(
// <Thumbnail source={{uri: this.props.product.data[i].image }} style={styles.pimage} />
// ):(
<Thumbnail source={producti} style={styles.pimage}/>
//)
}
</View>
</TouchableOpacity>
<View style={{padding:5}}>
<View style={{flexDirection: 'row'}}>
<View style={{flex:0.7}}><Text>${this.props.product.data[i].price}</Text></View>
<View style={{flexDirection: 'row'}}>
<Image source={addcart} style={styles.icon}/>
<Image source={bid} style={styles.icon}/>
</View>
</View>
<View style={{flexDirection: 'row'}}>
<View><Text style={styles.text}>{this.props.product.data[i].name}</Text></View>
</View>
</View>
</View>
</View>
<View style={{width:10}}></View>
{
this.props.product.data.length != i+1?(
<View style={styles.graybox}>
<View style={{flex:1}}>
<TouchableOpacity onPress={()=>{this.productdetails(i+1,this.props.product.data)}}>
<View style={{backgroundColor: "#35e1e1",flex:1,justifyContent: 'center',alignItems: 'center',}}>
{
// this.props.product.data[i+1].image != ''?(
// <Thumbnail source={{uri: this.props.product.data[i+1].image }} style={styles.pimage} />
// ):(
<Thumbnail source={producti} style={styles.pimage} />
//)
}
</View>
</TouchableOpacity>
<View style={{padding:5}}>
<View style={{flexDirection: 'row'}}>
<View style={{flex:0.7}}><Text>${this.props.product.data[i+1].price}</Text></View>
<View style={{flexDirection: 'row'}}>
<Image source={addcart} style={styles.icon}/>
<Image source={bid} style={styles.icon}/>
</View>
</View>
<View style={{flexDirection: 'row'}}>
<View><Text style={styles.text}>{this.props.product.data[i+1].name}</Text></View>
</View>
</View>
</View>
</View>
):(
<View style={styles.graybox}>
<View style={{flex:1}}>
<TouchableOpacity >
<View style={{backgroundColor: "#35e1e1",flex:1,justifyContent: 'center',alignItems: 'center',}}>
</View>
</TouchableOpacity>
</View>
</View>
)
}
</View>
)
i = i+1;
}
}
else if(this.props.product.message)
{
searchListing =<Text style={{textAlign: 'center',fontWeight: 'bold', fontSize: 18,width: 200,}}> {this.props.product.message} </Text>
}
else{ //alert();
searchListing = (
<Spinner />
);
}
return (
<Container style={styles.container}>
<Header style={{ backgroundColor: "#112D42" }} androidStatusBarColor="#062a3c"
iosBarStyle="light-content">
<Left>
<Button
transparent
onPress={() => this.props.navigation.navigate("DrawerOpen")}>
<Icon name="menu" />
</Button>
</Left>
<Body>
<Title>Home</Title>
</Body>
<Right />
</Header>
<Content >
<View style={{flexDirection: 'row',flex:1}}>
<View style={{flex:1}}><Button
title="Learn More"data
color="#000000"
accessibilityLabel="Learn more about this purple button"
style={{backgroundColor: "gray",borderBottomColor: '#47315a',
borderBottomWidth: 1}}
><Text style={{color:"#FFFFFF"}}> Product Category </Text>
</Button>
</View>
<View style={{flex:1,width:100}}>
<Button
onPress={()=>this.props.navigation.navigate("Indexmining")}
title="Learn More"
color="#841584"
accessibilityLabel="Learn more about this purple button"
style={{backgroundColor: "#f1f1f1"}}
><Text style={{color:"#000000"}}> Mining </Text>
</Button>
</View>
</View>
<View>
<View >
<View style={{flexDirection: 'row', justifyContent:'center', padding:10}}>
<ScrollView horizontal={true} contentContainerStyle={{flexGrow:1}} focusableInTouchMode={false}>
{categoryListing}
</ScrollView>
</View>
</View>
<View >
</View>
<View style={{backgroundColor: "#F1F1F1", padding:10}}>
{searchListing}
</View>
</View>
</Content>
</Container>
);
}
}
//export default Sale;
Sale.propTypes = {
auth : PropTypes.object.isRequired,
productlist:PropTypes.func.isRequired,
categorylist:PropTypes.func.isRequired,
product : PropTypes.object.isRequired,
category : PropTypes.object.isRequired
}
const mapStateToProps = (state)=>{ //alert(JSON.stringify(state.product.data));
return {
auth:state.auth,
product:state.product.data,
category : state.category.data,
}
// alert(JSON.stringify(product))
}
const mapDispatchToProps = (dispatch)=>{
return {
productlist:(id)=>dispatch(productlist(id)),
categorylist:()=>dispatch(categorylist()),
}
}
export default connect(mapStateToProps,mapDispatchToProps)(Sale);
|
falki147/GMLAST
|
test/src/TypeTest.cpp
|
#include <GMLAST/AST/ArrayOperator.hpp>
#include <GMLAST/AST/AssignStatement.hpp>
#include <GMLAST/AST/BinaryOperator.hpp>
#include <GMLAST/AST/BreakStatement.hpp>
#include <GMLAST/AST/ContinueStatement.hpp>
#include <GMLAST/AST/DeclarationStatement.hpp>
#include <GMLAST/AST/DoStatement.hpp>
#include <GMLAST/AST/DotOperator.hpp>
#include <GMLAST/AST/DoubleConstant.hpp>
#include <GMLAST/AST/EnumStatement.hpp>
#include <GMLAST/AST/Error.hpp>
#include <GMLAST/AST/ExitStatement.hpp>
#include <GMLAST/AST/ForStatement.hpp>
#include <GMLAST/AST/FunctionCall.hpp>
#include <GMLAST/AST/IfStatement.hpp>
#include <GMLAST/AST/IntConstant.hpp>
#include <GMLAST/AST/RepeatStatement.hpp>
#include <GMLAST/AST/ReturnStatement.hpp>
#include <GMLAST/AST/Statements.hpp>
#include <GMLAST/AST/StringConstant.hpp>
#include <GMLAST/AST/SwitchStatement.hpp>
#include <GMLAST/AST/UnuaryOperator.hpp>
#include <GMLAST/AST/Variable.hpp>
#include <GMLAST/AST/WhileStatement.hpp>
#include <GMLAST/AST/WithStatement.hpp>
#include <catch.hpp>
#include <memory>
template <typename T, typename... Args>
std::unique_ptr<T> MakeUnique(Args&&... args) {
return std::unique_ptr<T>(new T(std::forward<Args>(args)...));
}
TEST_CASE("Array Operator", "[TypeTest]") {
const auto node1 = MakeUnique<GMLAST::ArrayOperator>(
GMLAST::ArrayOperator::Type::Array, MakeUnique<GMLAST::Error>(),
MakeUnique<GMLAST::Error>());
CHECK(node1->nodeType() == GMLAST::NodeType::ArrayOperator);
const auto node2 = MakeUnique<GMLAST::ArrayOperator>(
GMLAST::ArrayOperator::Type::Array, MakeUnique<GMLAST::Error>(),
MakeUnique<GMLAST::Error>(), MakeUnique<GMLAST::Error>());
CHECK(node2->nodeType() == GMLAST::NodeType::ArrayOperator);
}
TEST_CASE("Assign Statement", "[TypeTest]") {
const auto node = MakeUnique<GMLAST::AssignStatement>(
GMLAST::AssignStatement::Type::Assign, MakeUnique<GMLAST::Error>(),
MakeUnique<GMLAST::Error>());
CHECK(node->nodeType() == GMLAST::NodeType::AssignStatement);
}
TEST_CASE("Binary Operator", "[TypeTest]") {
const auto node = MakeUnique<GMLAST::BinaryOperator>(
GMLAST::BinaryOperator::Type::Add, MakeUnique<GMLAST::Error>(),
MakeUnique<GMLAST::Error>());
CHECK(node->nodeType() == GMLAST::NodeType::BinaryOperator);
}
TEST_CASE("Break Statement", "[TypeTest]") {
const auto node = MakeUnique<GMLAST::BreakStatement>();
CHECK(node->nodeType() == GMLAST::NodeType::BreakStatement);
}
TEST_CASE("Continue Statement", "[TypeTest]") {
const auto node = MakeUnique<GMLAST::ContinueStatement>();
CHECK(node->nodeType() == GMLAST::NodeType::ContinueStatement);
}
TEST_CASE("Declaration Statement", "[TypeTest]") {
const auto node = MakeUnique<GMLAST::DeclarationStatement>(
false, std::vector<GMLAST::DeclarationEntry>{});
CHECK(node->nodeType() == GMLAST::NodeType::DeclarationStatement);
}
TEST_CASE("Do Statement", "[TypeTest]") {
const auto node = MakeUnique<GMLAST::DoStatement>(
MakeUnique<GMLAST::Error>(), MakeUnique<GMLAST::Error>());
CHECK(node->nodeType() == GMLAST::NodeType::DoStatement);
}
TEST_CASE("Dot Operator", "[TypeTest]") {
const auto node =
MakeUnique<GMLAST::DotOperator>("", MakeUnique<GMLAST::Error>());
CHECK(node->nodeType() == GMLAST::NodeType::DotOperator);
}
TEST_CASE("Double Constant", "[TypeTest]") {
const auto node = MakeUnique<GMLAST::DoubleConstant>(0);
CHECK(node->nodeType() == GMLAST::NodeType::DoubleConstant);
}
TEST_CASE("Enum Statement", "[TypeTest]") {
const auto node =
MakeUnique<GMLAST::EnumStatement>("", std::vector<GMLAST::EnumEntry>{});
CHECK(node->nodeType() == GMLAST::NodeType::EnumStatement);
}
TEST_CASE("Error", "[TypeTest]") {
const auto node = MakeUnique<GMLAST::Error>();
CHECK(node->nodeType() == GMLAST::NodeType::Error);
}
TEST_CASE("Exit Statement", "[TypeTest]") {
const auto node = MakeUnique<GMLAST::ExitStatement>();
CHECK(node->nodeType() == GMLAST::NodeType::ExitStatement);
}
TEST_CASE("For Statement", "[TypeTest]") {
const auto node = MakeUnique<GMLAST::ForStatement>(
MakeUnique<GMLAST::Error>(), MakeUnique<GMLAST::Error>(),
MakeUnique<GMLAST::Error>(), MakeUnique<GMLAST::Error>());
CHECK(node->nodeType() == GMLAST::NodeType::ForStatement);
}
TEST_CASE("Function Call", "[TypeTest]") {
const auto node = MakeUnique<GMLAST::FunctionCall>(
"", std::vector<std::unique_ptr<GMLAST::Value>>{});
CHECK(node->nodeType() == GMLAST::NodeType::FunctionCall);
}
TEST_CASE("If Statement", "[TypeTest]") {
const auto node1 = MakeUnique<GMLAST::IfStatement>(
MakeUnique<GMLAST::Error>(), MakeUnique<GMLAST::Error>());
CHECK(node1->nodeType() == GMLAST::NodeType::IfStatement);
const auto node2 = MakeUnique<GMLAST::IfStatement>(
MakeUnique<GMLAST::Error>(), MakeUnique<GMLAST::Error>(),
MakeUnique<GMLAST::Error>());
CHECK(node2->nodeType() == GMLAST::NodeType::IfStatement);
}
TEST_CASE("Int Constant", "[TypeTest]") {
const auto node = MakeUnique<GMLAST::IntConstant>(0);
CHECK(node->nodeType() == GMLAST::NodeType::IntConstant);
}
TEST_CASE("Repeat Statement", "[TypeTest]") {
const auto node = MakeUnique<GMLAST::RepeatStatement>(
MakeUnique<GMLAST::Error>(), MakeUnique<GMLAST::Error>());
CHECK(node->nodeType() == GMLAST::NodeType::RepeatStatement);
}
TEST_CASE("Return Statement", "[TypeTest]") {
const auto node =
MakeUnique<GMLAST::ReturnStatement>(MakeUnique<GMLAST::Error>());
CHECK(node->nodeType() == GMLAST::NodeType::ReturnStatement);
}
TEST_CASE("Statements", "[TypeTest]") {
const auto node = MakeUnique<GMLAST::Statements>(
std::vector<std::unique_ptr<GMLAST::Statement>>{});
CHECK(node->nodeType() == GMLAST::NodeType::Statements);
}
TEST_CASE("String Constant", "[TypeTest]") {
const auto node = MakeUnique<GMLAST::StringConstant>("");
CHECK(node->nodeType() == GMLAST::NodeType::StringConstant);
}
TEST_CASE("Switch Statement", "[TypeTest]") {
const auto node = MakeUnique<GMLAST::SwitchStatement>(
MakeUnique<GMLAST::Error>(), std::vector<GMLAST::SwitchEntry>{});
CHECK(node->nodeType() == GMLAST::NodeType::SwitchStatement);
}
TEST_CASE("Unuary Operator", "[TypeTest]") {
const auto node = MakeUnique<GMLAST::UnuaryOperator>(
GMLAST::UnuaryOperator::Type::Plus, MakeUnique<GMLAST::Error>());
CHECK(node->nodeType() == GMLAST::NodeType::UnuaryOperator);
}
TEST_CASE("Variable", "[TypeTest]") {
const auto node = MakeUnique<GMLAST::Variable>("");
CHECK(node->nodeType() == GMLAST::NodeType::Variable);
}
TEST_CASE("While Statement", "[TypeTest]") {
const auto node = MakeUnique<GMLAST::WhileStatement>(
MakeUnique<GMLAST::Error>(), MakeUnique<GMLAST::Error>());
CHECK(node->nodeType() == GMLAST::NodeType::WhileStatement);
}
TEST_CASE("With Statement", "[TypeTest]") {
const auto node = MakeUnique<GMLAST::WithStatement>(
MakeUnique<GMLAST::Error>(), MakeUnique<GMLAST::Error>());
CHECK(node->nodeType() == GMLAST::NodeType::WithStatement);
}
|
codegeekgao/framework
|
Spring-Framerwork/src/main/java/com/codegeek/ioc/day6/service/AccountService.java
|
<filename>Spring-Framerwork/src/main/java/com/codegeek/ioc/day6/service/AccountService.java
package com.codegeek.ioc.day6.service;
import com.codegeek.ioc.day6.model.Product;
import java.math.BigDecimal;
/**
* @author CodeGeekGao
* @version Id: AccountService.java, v 1.0 2020/6/30 11:59 PM CodeGeekGao
*/
public interface AccountService {
String buy( String productName,Integer buyCount,String user);
}
|
resystem/500-cidades-graph-api-serverless
|
models/disability.list.js
|
export const DISABILITY_LIST = [
'hearing_disability',
'deaf',
'physical_disability',
'mental_disability',
'visually_impaired',
'blind',
'multiple_disabilities',
'ASD',
'not_said',
'other',
];
export const todelete = '';
|
Arronzheng/roncoo-education
|
roncoo-education-course/roncoo-education-course-service/src/main/java/com/roncoo/education/course/service/biz/pc/PcApiCourseBiz.java
|
<filename>roncoo-education-course/roncoo-education-course-service/src/main/java/com/roncoo/education/course/service/biz/pc/PcApiCourseBiz.java
package com.roncoo.education.course.service.biz.pc;
import java.math.BigDecimal;
import java.util.List;
import com.qiniu.common.QiniuException;
import com.roncoo.education.course.service.common.dto.auth.AuthCourseAuditSaveDTO;
import com.roncoo.education.course.service.common.req.*;
import com.roncoo.education.course.service.dao.impl.mapper.entity.*;
import com.roncoo.education.system.common.bean.vo.SysVO;
import com.roncoo.education.system.feign.IBossSys;
import com.roncoo.education.util.aliyun.Aliyun;
import com.roncoo.education.util.aliyun.AliyunUtil;
import com.roncoo.education.util.enums.*;
import com.roncoo.education.util.qiniu.Qiniu;
import com.roncoo.education.util.qiniu.QiniuUtil;
import com.roncoo.education.util.tencentcloud.Tencent;
import com.roncoo.education.util.tencentcloud.TencentUtil;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.StringUtils;
import com.roncoo.education.course.service.common.resq.CourseChapterPeriodViewRESQ;
import com.roncoo.education.course.service.common.resq.CourseChapterViewRESQ;
import com.roncoo.education.course.service.common.resq.CourseGetRESQ;
import com.roncoo.education.course.service.common.resq.CoursePageRESQ;
import com.roncoo.education.course.service.common.resq.CourseViewRESQ;
import com.roncoo.education.course.service.dao.CourseAuditDao;
import com.roncoo.education.course.service.dao.CourseCategoryDao;
import com.roncoo.education.course.service.dao.CourseChapterDao;
import com.roncoo.education.course.service.dao.CourseChapterPeriodDao;
import com.roncoo.education.course.service.dao.CourseDao;
import com.roncoo.education.course.service.dao.CourseIntroduceAuditDao;
import com.roncoo.education.course.service.dao.CourseIntroduceDao;
import com.roncoo.education.course.service.dao.ZoneCourseDao;
import com.roncoo.education.course.service.dao.impl.mapper.entity.CourseExample.Criteria;
import com.roncoo.education.user.common.bean.vo.LecturerVO;
import com.roncoo.education.user.feign.IBossLecturer;
import com.roncoo.education.util.base.Page;
import com.roncoo.education.util.base.PageUtil;
import com.roncoo.education.util.base.Result;
import com.roncoo.education.util.tools.BeanUtil;
import com.xiaoleilu.hutool.util.ObjectUtil;
/**
* 课程信息
*/
@Component
public class PcApiCourseBiz {
@Autowired
private IBossLecturer bossLecturer;
@Autowired
private ZoneCourseDao zoneCourseDao;
@Autowired
private CourseDao dao;
@Autowired
private CourseAuditDao courseAuditDao;
@Autowired
private CourseChapterDao courseChapterDao;
@Autowired
private CourseIntroduceDao courseIntroduceDao;
@Autowired
private CourseIntroduceAuditDao courseIntroduceAuditDao;
@Autowired
private CourseCategoryDao courseCategoryDao;
@Autowired
private CourseChapterPeriodDao courseChapterPeriodDao;
@Autowired
private IBossSys bossSys;
/**
* 分页列出
*
* @param req
* @return
*/
public Result<Page<CoursePageRESQ>> list(CoursePageREQ req) {
CourseExample example = new CourseExample();
Criteria c = example.createCriteria();
if (req.getCategoryId1() != null) {
c.andCategoryId1EqualTo(req.getCategoryId1());
}
if (!StringUtils.isEmpty(req.getCourseName())) {
c.andCourseNameLike(PageUtil.rightLike(req.getCourseName()));
}
if (req.getStatusId() != null) {
c.andStatusIdEqualTo(req.getStatusId());
}
if (req.getIsFree() != null) {
c.andIsFreeEqualTo(req.getIsFree());
}
if (req.getIsPutaway() != null) {
c.andIsPutawayEqualTo(req.getIsPutaway());
}
example.setOrderByClause(" status_id desc, is_putaway desc, course_sort desc, id desc ");
Page<Course> page = dao.listForPage(req.getPageCurrent(), req.getPageSize(), example);
Page<CoursePageRESQ> listForPage = PageUtil.transform(page, CoursePageRESQ.class);
// 获取分类名称
for (CoursePageRESQ resq : listForPage.getList()) {
if (req.getZoneId() != null) {
// 校验专区是否存在课程
ZoneCourse zoneCourse = zoneCourseDao.getZoneIdAndCourseId(resq.getId(), req.getZoneId());
if (ObjectUtil.isNull(zoneCourse)) {
// 不存在
resq.setIsAddZoneCourse(0);
} else {
// 存在
resq.setIsAddZoneCourse(1);
}
}
LecturerVO lecturer = bossLecturer.getByLecturerUserNo(resq.getLecturerUserNo());
if (ObjectUtil.isNotNull(lecturer)) {
resq.setLecturerName(lecturer.getLecturerName());
}
if (resq.getCategoryId1() != null && resq.getCategoryId1() != 0) {
CourseCategory courseCategory = courseCategoryDao.getById(resq.getCategoryId1());
if (!StringUtils.isEmpty(courseCategory)) {
resq.setCategoryName1(courseCategory.getCategoryName());
}
}
if (resq.getCategoryId2() != null && resq.getCategoryId2() != 0) {
CourseCategory courseCategory = courseCategoryDao.getById(resq.getCategoryId2());
if (!StringUtils.isEmpty(courseCategory)) {
resq.setCategoryName2(courseCategory.getCategoryName());
}
}
if (resq.getCategoryId3() != null && resq.getCategoryId3() != 0) {
CourseCategory courseCategory = courseCategoryDao.getById(resq.getCategoryId3());
if (!StringUtils.isEmpty(courseCategory)) {
resq.setCategoryName3(courseCategory.getCategoryName());
}
}
}
return Result.success(listForPage);
}
@Transactional
public Result<Integer> update(CourseUpdateREQ req) {
if (req.getId() == null) {
return Result.error("ID不能为空");
}
Course course = dao.getById(req.getId());
if (ObjectUtil.isNull(course)) {
return Result.error("找不到课程信息");
}
if (IsFreeEnum.FREE.getCode().equals(req.getIsFree())) {
req.setCourseOriginal(BigDecimal.ZERO);
req.setCourseDiscount(BigDecimal.ZERO);
}
Course record = BeanUtil.copyProperties(req, Course.class);
int result = dao.updateById(record);
if (result > 0) {
// 同步更新审核表
CourseAudit courseAudit = BeanUtil.copyProperties(req, CourseAudit.class);
courseAudit.setGmtCreate(null);
courseAudit.setGmtModified(null);
int recordAudit = courseAuditDao.updateById(courseAudit);
if (recordAudit < 0) {
return Result.error(ResultEnum.COURSE_UPDATE_FAIL);
}
}
if (StringUtils.hasText(req.getIntroduce())) {
// 更新课程介绍
CourseIntroduce courseIntroduce = courseIntroduceDao.getById(course.getIntroduceId());
if (ObjectUtil.isNull(courseIntroduce)) {
return Result.error("找不到课程简介信息");
}
courseIntroduce.setId(course.getIntroduceId());
courseIntroduce.setIntroduce(req.getIntroduce());
int results = courseIntroduceDao.updateById(courseIntroduce);
if (results < 0) {
return Result.error(ResultEnum.COURSE_UPDATE_FAIL);
}
// 同步更新审核表
CourseIntroduceAudit courseIntroduceAudit = courseIntroduceAuditDao.getById(course.getIntroduceId());
if (ObjectUtil.isNull(courseIntroduceAudit)) {
return Result.error("找不到课程简介信息");
}
courseIntroduceAudit.setGmtCreate(null);
courseIntroduceAudit.setGmtModified(null);
courseIntroduceAudit.setId(course.getIntroduceId());
courseIntroduceAudit.setIntroduce(req.getIntroduce());
courseIntroduceAuditDao.updateById(courseIntroduceAudit);
}
return Result.success(result);
}
/**
* 查看(课程修改使用)
*
* @param req
* @return
*/
public Result<CourseGetRESQ> get(CourseGetREQ req) {
if (req.getId() == null) {
return Result.error("ID不能为空");
}
// 根据id查找课程信息
Course record = dao.getById(req.getId());
CourseGetRESQ resq = BeanUtil.copyProperties(record, CourseGetRESQ.class);
if (ObjectUtil.isNotNull(resq)) {
// 获取分类名称
if (resq.getCategoryId1() != null && resq.getCategoryId1() != 0) {
CourseCategory courseCategory = courseCategoryDao.getById(resq.getCategoryId1());
resq.setCategoryName1(courseCategory.getCategoryName());
}
if (resq.getCategoryId2() != null && resq.getCategoryId2() != 0) {
CourseCategory courseCategory = courseCategoryDao.getById(resq.getCategoryId2());
resq.setCategoryName2(courseCategory.getCategoryName());
}
if (resq.getCategoryId3() != null && resq.getCategoryId3() != 0) {
CourseCategory courseCategory = courseCategoryDao.getById(resq.getCategoryId3());
resq.setCategoryName3(courseCategory.getCategoryName());
}
// 根据id查找课程简介信息
CourseIntroduce courseIntroduce = courseIntroduceDao.getById(record.getIntroduceId());
// 把课程简介带回课程信息
if (ObjectUtil.isNotNull(courseIntroduce)) {
resq.setIntroduce(courseIntroduce.getIntroduce());
}
}
return Result.success(resq);
}
/**
* 查看详情
*
* @param req
* @return
*/
public Result<CourseViewRESQ> view(CourseViewREQ req) {
if (req.getId() == null) {
return Result.error("ID不能为空");
}
Course record = dao.getById(req.getId());
CourseViewRESQ resq = BeanUtil.copyProperties(record, CourseViewRESQ.class);
// 获取分类名称
if (resq.getCategoryId1() != null && resq.getCategoryId1() != 0) {
CourseCategory courseCategory = courseCategoryDao.getById(resq.getCategoryId1());
resq.setCategoryName1(courseCategory.getCategoryName());
}
if (resq.getCategoryId2() != null && resq.getCategoryId2() != 0) {
CourseCategory courseCategory = courseCategoryDao.getById(resq.getCategoryId2());
resq.setCategoryName2(courseCategory.getCategoryName());
}
if (resq.getCategoryId3() != null && resq.getCategoryId3() != 0) {
CourseCategory courseCategory = courseCategoryDao.getById(resq.getCategoryId3());
resq.setCategoryName3(courseCategory.getCategoryName());
}
// 章节
List<CourseChapter> ChapterList = courseChapterDao.listByCourseIdAndStatusId(resq.getId(),
StatusIdEnum.YES.getCode());
if (CollectionUtils.isNotEmpty(ChapterList)) {
List<CourseChapterViewRESQ> courseChapterVOList = PageUtil.copyList(ChapterList,
CourseChapterViewRESQ.class);
for (CourseChapterViewRESQ courseChapter : courseChapterVOList) {
// 课时
List<CourseChapterPeriod> periodList = courseChapterPeriodDao
.listByChapterIdAndStatusId(courseChapter.getId(), StatusIdEnum.YES.getCode());
courseChapter
.setCourseChapterPeriodList(PageUtil.copyList(periodList, CourseChapterPeriodViewRESQ.class));
}
resq.setCourseChapterList(courseChapterVOList);
}
return Result.success(resq);
}
public Result<Integer> save(CourseSaveREQ courseSaveREQ) {
// Course record = BeanUtil.copyProperties(courseSaveREQ,Course.class);
// for (int i = 0; i < courseSaveREQ.getCategorys().size(); i++) {
// record.transferSet(i,Long.valueOf(courseSaveREQ.getCategorys().get(i)));
// }
// record.setStatusId(1);
// if(record.getIsFree() == 1){
// record.setCourseOriginal(BigDecimal.ZERO);
// }
// record.setIsPutaway(1);
// 原价小于0
if (courseSaveREQ.getCourseOriginal().compareTo(BigDecimal.valueOf(0)) == -1) {
return Result.error("售价不能小于0");
}
// 课程收费但价格为空
if (IsFreeEnum.CHARGE.getCode().equals(courseSaveREQ.getIsFree())) {
if (courseSaveREQ.getCourseOriginal() == null) {
return Result.error("价格不能为空");
}
}
// 课程介绍
CourseIntroduceAudit courseIntroduceAudit = new CourseIntroduceAudit();
courseIntroduceAudit.setIntroduce(courseSaveREQ.getCourseDesc());
courseIntroduceAuditDao.save(courseIntroduceAudit);
// 课程
CourseAudit record = BeanUtil.copyProperties(courseSaveREQ, CourseAudit.class);
if (IsFreeEnum.FREE.getCode().equals(courseSaveREQ.getIsFree())) {
// 课程免费就设置价格为0(原价、优惠价)
record.setCourseOriginal(BigDecimal.valueOf(0));
record.setCourseDiscount(BigDecimal.valueOf(0));
}
record.setStatusId(StatusIdEnum.YES.getCode());
record.setIsPutaway(IsPutawayEnum.YES.getCode());
record.setAuditStatus(AuditStatusEnum.WAIT.getCode());
record.setIntroduceId(courseIntroduceAudit.getId());
record.setCourseDiscount(courseSaveREQ.getCourseOriginal());
record.setLecturerUserNo(courseSaveREQ.getLecturerUserNo());
for (int i = 0; i < courseSaveREQ.getCategorys().size(); i++) {
record.transferSet(i,Long.valueOf(courseSaveREQ.getCategorys().get(i)));
}
// 查询更新后的课程审核信息
int i = courseAuditDao.save(record);
if (i > 0) {
return Result.success(i);
}
return Result.error(ResultEnum.COURSE_SAVE_FAIL);
}
public Result<Integer> delete(CourseDeleteREQ req) {
if (StringUtils.isEmpty(req.getId())) {
return Result.error("ID不能为空");
}
Course course = dao.getById(req.getId());
if (ObjectUtil.isNull(course)) {
return Result.error("找不到课程信息");
}else{
List<CourseChapter> ChapterList = courseChapterDao.listByCourseIdAndStatusId(req.getId(),
StatusIdEnum.YES.getCode());
if (CollectionUtils.isNotEmpty(ChapterList)) {
return Result.error("请先删除下级分类");
}
}
SysVO sys = bossSys.getSys();
if(sys.getFileType().equals(FileTypeEnum.TENCENT)){
TencentUtil.deleteFile(course.getCourseLogo(), BeanUtil.copyProperties(sys, Tencent.class));
}else if(sys.getFileType().equals(FileTypeEnum.QINIU)){
try {
QiniuUtil.deletePic(course.getCourseLogo(), BeanUtil.copyProperties(sys, Qiniu.class));
} catch (QiniuException e) {
return Result.error(e.code(),e.response.toString());
}
}else{
AliyunUtil.delete(course.getCourseLogo(), BeanUtil.copyProperties(sys, Aliyun.class));
}
int results = dao.deleteById(req.getId());
if (results > 0) {
return Result.success(results);
}
return Result.error(ResultEnum.COURSE_DELETE_FAIL);
}
}
|
gianricardo/OpcUaStack
|
tst/OpcUaStackCore/BuildInTypes/OpcUaNumber_t.cpp
|
<reponame>gianricardo/OpcUaStack
#include "unittest.h"
#include "OpcUaStackCore/BuildInTypes/OpcUaNumber.h"
#include "OpcUaStackCore/Base/Utility.h"
#include <sstream>
#include <boost/iostreams/stream.hpp>
#include <boost/property_tree/ptree.hpp>
#include <boost/property_tree/json_parser.hpp>
using namespace OpcUaStackCore;
void writeDocument(boost::property_tree::ptree& pt)
{
#if 0
std::cout << "READ" << std::endl;
std::stringstream ss1;
boost::property_tree::ptree js;
ss1 << "{ \"Value\": \"123\" }";
boost::property_tree::json_parser::read_json(ss1, js);
std::cout << "WRITE:" << std::endl;
#endif
boost::property_tree::ptree xx;
xx.add_child("AAA", pt);
std::stringstream ss2;
boost::property_tree::json_parser::write_json(ss2, xx);
std::cout << "Document: " << ss2.str() << std::endl;
}
BOOST_AUTO_TEST_SUITE(OpcUaNumber_)
BOOST_AUTO_TEST_CASE(OpcUaNumber_)
{
std::cout << "OpcUaNumber_t" << std::endl;
}
BOOST_AUTO_TEST_CASE(OpcUaNumber_OpcUaBoolean)
{
std::stringstream ss;
OpcUaBoolean value1, value2;
value1 = true;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == true);
OpcUaNumber::opcUaBinaryEncode(ss, false);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == false);
}
BOOST_AUTO_TEST_CASE(OpcUaNumber_OpcUaBoolean_ptree)
{
boost::property_tree::ptree pt;
OpcUaBoolean value1, value2;
value1 = true;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == true);
OpcUaNumber::encode(pt, false);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == false);
}
BOOST_AUTO_TEST_CASE(OpcUaNumber_OpcUaBooleanArray)
{
std::stringstream ss;
OpcUaBooleanArray value1, value2;
value1.resize(3);
value1.set(0, true);
value1.set(1, false);
value1.set(2, true);
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
bool result;
BOOST_REQUIRE(value2.get(0, result) == true);
BOOST_REQUIRE(result == true);
BOOST_REQUIRE(value2.get(1, result) == true);
BOOST_REQUIRE(result == false);
BOOST_REQUIRE(value2.get(2, result) == true);
BOOST_REQUIRE(result == true);
BOOST_REQUIRE(value2.get(3, result) == false);
}
BOOST_AUTO_TEST_CASE(OpcUaNumber_OpcUaBooleanArray_ptree)
{
boost::property_tree::ptree pt;
OpcUaBooleanArray value1, value2;
value1.resize(3);
value1.set(0, true);
value1.set(1, false);
value1.set(2, true);
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
bool result;
BOOST_REQUIRE(value2.get(0, result) == true);
BOOST_REQUIRE(result == true);
BOOST_REQUIRE(value2.get(1, result) == true);
BOOST_REQUIRE(result == false);
BOOST_REQUIRE(value2.get(2, result) == true);
BOOST_REQUIRE(result == true);
BOOST_REQUIRE(value2.get(3, result) == false);
}
BOOST_AUTO_TEST_CASE(OpcUaNumber_OpcUaBooleanArraySPtr)
{
std::stringstream ss;
OpcUaBooleanArray::SPtr value1, value2;
value1 = constructSPtr<OpcUaBooleanArray>();
value2 = constructSPtr<OpcUaBooleanArray>();
value1->resize(3);
value1->set(0, true);
value1->set(1, false);
value1->set(2, true);
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
bool result;
BOOST_REQUIRE(value2->get(0, result) == true);
BOOST_REQUIRE(result == true);
BOOST_REQUIRE(value2->get(1, result) == true);
BOOST_REQUIRE(result == false);
BOOST_REQUIRE(value2->get(2, result) == true);
BOOST_REQUIRE(result == true);
BOOST_REQUIRE(value2->get(3, result) == false);
}
BOOST_AUTO_TEST_CASE(OpcUaNumber_OpcUaBooleanArraySPtr_ptree)
{
boost::property_tree::ptree pt;
OpcUaBooleanArray::SPtr value1, value2;
value1 = constructSPtr<OpcUaBooleanArray>();
value2 = constructSPtr<OpcUaBooleanArray>();
value1->resize(3);
value1->set(0, true);
value1->set(1, false);
value1->set(2, true);
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
bool result;
BOOST_REQUIRE(value2->get(0, result) == true);
BOOST_REQUIRE(result == true);
BOOST_REQUIRE(value2->get(1, result) == true);
BOOST_REQUIRE(result == false);
BOOST_REQUIRE(value2->get(2, result) == true);
BOOST_REQUIRE(result == true);
BOOST_REQUIRE(value2->get(3, result) == false);
}
BOOST_AUTO_TEST_CASE(OpcUaNumber_OpcUaSByte)
{
std::stringstream ss;
OpcUaSByte value1, value2;
value1 = -128;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == -128);
value1 = -1;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == -1);
value1 = 0;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 0);
value1 = 1;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 1);
value1 = 127;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 127);
}
BOOST_AUTO_TEST_CASE(OpcUaNumber_OpcUaSByte_ptree)
{
boost::property_tree::ptree pt;
OpcUaSByte value1, value2;
value1 = -128;
OpcUaNumber::encode(pt, value1);
writeDocument(pt);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == -128);
value1 = -1;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == -1);
value1 = 0;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 0);
value1 = 1;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 1);
value1 = 127;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 127);
}
BOOST_AUTO_TEST_CASE(OpcUaNumber_OpcUaByte)
{
std::stringstream ss;
OpcUaByte value1, value2;
value1 = 0;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 0);
value1 = 1;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 1);
value1 = 255;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 255);
}
BOOST_AUTO_TEST_CASE(OpcUaNumber_OpcUaByte_ptree)
{
boost::property_tree::ptree pt;
OpcUaByte value1, value2;
value1 = 0;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 0);
value1 = 1;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 1);
value1 = 255;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 255);
}
BOOST_AUTO_TEST_CASE(OpcUaNumber_OpcUaInt16)
{
std::stringstream ss;
OpcUaInt16 value1, value2;
value1 = -0x7FFF;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == -0x7FFF);
value1 = -1;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == -1);
value1 = 0;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 0);
value1 = 1;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 1);
value1 = 0x7FFE;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 0x7FFE);
}
BOOST_AUTO_TEST_CASE(OpcUaNumber_OpcUaInt16_ptree)
{
boost::property_tree::ptree pt;
OpcUaInt16 value1, value2;
value1 = -0x7FFF;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == -0x7FFF);
value1 = -1;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == -1);
value1 = 0;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 0);
value1 = 1;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 1);
value1 = 0x7FFE;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 0x7FFE);
}
BOOST_AUTO_TEST_CASE(OpcUaNumber_OpcUaUInt16)
{
std::stringstream ss;
OpcUaUInt16 value1, value2;
value1 = 0;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 0);
value1 = 1;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 1);
value1 = 0xFFFF;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 0xFFFF);
}
BOOST_AUTO_TEST_CASE(OpcUaNumber_OpcUaUInt16_ptree)
{
boost::property_tree::ptree pt;
OpcUaUInt16 value1, value2;
value1 = 0;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 0);
value1 = 1;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 1);
value1 = 0xFFFF;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 0xFFFF);
}
BOOST_AUTO_TEST_CASE(OpcUaNumber_OpcUaInt32)
{
std::stringstream ss;
OpcUaInt32 value1, value2;
value1 = -0x7FFFFFFF;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == -0x7FFFFFFF);
value1 = -1;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == -1);
value1 = 0;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 0);
value1 = 1;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 1);
value1 = 0x7FFFFFFE;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 0x7FFFFFFE);
}
BOOST_AUTO_TEST_CASE(OpcUaNumber_OpcUaInt32_ptree)
{
boost::property_tree::ptree pt;
OpcUaInt32 value1, value2;
value1 = -0x7FFFFFFF;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == -0x7FFFFFFF);
value1 = -1;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == -1);
value1 = 0;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 0);
value1 = 1;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 1);
value1 = 0x7FFFFFFE;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 0x7FFFFFFE);
}
BOOST_AUTO_TEST_CASE(OpcUaNumber_OpcUaUInt32)
{
std::stringstream ss;
OpcUaUInt32 value1, value2;
value1 = 0;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 0);
value1 = 1;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 1);
value1 = 0xFFFFFFFF;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 0xFFFFFFFF);
}
BOOST_AUTO_TEST_CASE(OpcUaNumber_OpcUaUInt32_ptree)
{
boost::property_tree::ptree pt;
OpcUaUInt32 value1, value2;
value1 = 0;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 0);
value1 = 1;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 1);
value1 = 0xFFFFFFFF;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 0xFFFFFFFF);
}
BOOST_AUTO_TEST_CASE(OpcUaNumber_OpcUaInt64)
{
std::stringstream ss;
OpcUaInt64 value1, value2;
value1 = -0x7FFFFFFFFFFFFFFF;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == -0x7FFFFFFFFFFFFFFF);
value1 = -1;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == -1);
value1 = 0;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 0);
value1 = 1;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 1);
value1 = 0x7FFFFFFE;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 0x7FFFFFFE);
}
BOOST_AUTO_TEST_CASE(OpcUaNumber_OpcUaInt64_ptree)
{
boost::property_tree::ptree pt;
OpcUaInt64 value1, value2;
value1 = -0x7FFFFFFFFFFFFFFF;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == -0x7FFFFFFFFFFFFFFF);
value1 = -1;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == -1);
value1 = 0;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 0);
value1 = 1;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 1);
value1 = 0x7FFFFFFE;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 0x7FFFFFFE);
}
BOOST_AUTO_TEST_CASE(OpcUaNumber_OpcUaUInt64)
{
std::stringstream ss;
OpcUaUInt64 value1, value2;
value1 = 0;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 0);
value1 = 1;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 1);
value1 = 0xFFFFFFFFFFFFFFFF;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 0xFFFFFFFFFFFFFFFF);
}
BOOST_AUTO_TEST_CASE(OpcUaNumber_OpcUaUInt64_ptree)
{
boost::property_tree::ptree pt;
OpcUaUInt64 value1, value2;
value1 = 0;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 0);
value1 = 1;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 1);
value1 = 0xFFFFFFFFFFFFFFFF;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 0xFFFFFFFFFFFFFFFF);
}
BOOST_AUTO_TEST_CASE(OpcUaNumber_OpcUaFloat)
{
std::stringstream ss;
OpcUaFloat value1, value2;
value1 = (float)-1234.56;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE((float)value2 == (float)-1234.56);
value1 = -1;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == -1);
value1 = 0;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 0);
value1 = 1;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 1);
value1 = (float)1234.56;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE((float)value2 == (float)1234.56);
}
BOOST_AUTO_TEST_CASE(OpcUaNumber_OpcUaFloat_ptree)
{
boost::property_tree::ptree pt;
OpcUaFloat value1, value2;
value1 = (float)-1234.56;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE((float)value2 == (float)-1234.56);
value1 = -1;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == -1);
value1 = 0;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 0);
value1 = 1;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 1);
value1 = (float)1234.56;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE((float)value2 == (float)1234.56);
}
BOOST_AUTO_TEST_CASE(OpcUaNumber_OpcUaDouble)
{
std::stringstream ss;
OpcUaDouble value1, value2;
value1 = -1234.56;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == -1234.56);
value1 = -1;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == -1);
value1 = 0;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 0);
value1 = 1;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 1);
value1 = 1234.56;
OpcUaNumber::opcUaBinaryEncode(ss, value1);
OpcUaNumber::opcUaBinaryDecode(ss, value2);
BOOST_REQUIRE(value2 == 1234.56);
}
BOOST_AUTO_TEST_CASE(OpcUaNumber_OpcUaDouble_ptree)
{
boost::property_tree::ptree pt;
OpcUaDouble value1, value2;
value1 = -1234.56;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == -1234.56);
value1 = -1;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == -1);
value1 = 0;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 0);
value1 = 1;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 1);
value1 = 1234.56;
OpcUaNumber::encode(pt, value1);
OpcUaNumber::decode(pt, value2);
BOOST_REQUIRE(value2 == 1234.56);
}
BOOST_AUTO_TEST_SUITE_END()
|
AQ18/skimpy
|
skimpy/inference/parameters.py
|
<filename>skimpy/inference/parameters.py
"""
"""
from skimpy.core.parameters import ParameterValuePopulation
from scipy.stats import multivariate_normal
import tensorflow as tf
import pandas as pd
import numpy as np
EPSILON = 1e-9
class SecureMultivariateNormal(object):
def __init__(self, mu, sigma, var):
self.variable_parameters = var > EPSILON
self.constant_parameters = var < EPSILON
self.mu = mu
self.variable_index = var.index[self.variable_parameters]
self.const_index = var.index[self.constant_parameters]
# TODO: RAISE A MORE INFORMATIVE ERROR WHEN THE COV IS SIGULAR!
self._dist = multivariate_normal(mu[self.variable_parameters],
sigma.loc[self.variable_parameters,self.variable_parameters])
def rvs(self,size,random_state=None):
values_var = self._dist.rvs(size=size, random_state=random_state)
if size > 1:
df = pd.DataFrame(values_var, columns=self.variable_index)
else:
df = pd.DataFrame(values_var, index=self.variable_index, columns=[0]).T
values_cons = pd.concat( [ self.mu[self.constant_parameters] , ]*size , axis=1)
return pd.concat([df, values_cons.T], axis=1)
class LogNormalPriorParameterDistribution():
"""
TF Based model
"""
pass
class PosteriorLogNormalParameterPopulation(object):
"""
"""
def __init__(self, parameter_poulations, likelyhoods=None):
self.mu = []
self.sigma = []
self.pdf = []
for pop in parameter_poulations:
var = pop.log_var()
mu = pop.log_mean()
sigma = pop.log_cov()
self.mu.append( mu )
self.sigma.append( sigma )
self.pdf.append( SecureMultivariateNormal(mu,sigma,var) )
N = len(parameter_poulations)
if likelyhoods is None:
self.weights = np.ones(N)/N
else:
self.weights = likelyhoods/sum(likelyhoods)
self.cum_weights = np.cumsum(self.weights)
def resample(self, N, seed=None):
if not seed is None:
np.random.seed(seed=seed)
# Gillespie type of algorithm for resampling
x = []
for s in range(N):
r = np.random.rand()
j = sum(r > self.cum_weights)
x_i = np.exp( self.pdf[j].rvs(size=1, random_state=None) )
x.append(x_i)
df = pd.concat(x, axis=0, ignore_index=True, )
return df
class PosteriorNormalParameterPopulation(object):
"""
"""
def __init__(self, parameter_poulations, likelyhoods=None):
self.mu = []
self.sigma = []
self.pdf = []
for pop in parameter_poulations:
var = pop.var()
mu = pop.mean()
sigma = pop.cov()
self.mu.append( mu )
self.sigma.append( sigma )
self.pdf.append( SecureMultivariateNormal(mu,sigma,var) )
N = len(parameter_poulations)
if likelyhoods is None:
self.weights = np.ones(N)/N
else:
self.weights = likelyhoods/sum(likelyhoods)
self.cum_weights = np.cumsum(self.weights)
def resample(self, N, seed=None):
if not seed is None:
np.random.seed(seed=seed)
# Gillespie type of algorithm for resampling
x = []
for s in range(N):
r = np.random.rand()
j = sum(r > self.cum_weights)
x_i = self.pdf[j].rvs(size=1, random_state=None)
x.append(x_i)
df = pd.concat(x, axis=0, ignore_index=True, )
return df
|
Amit0617/OpenMS
|
src/openms/source/ANALYSIS/OPENSWATH/DIAScoring.cpp
|
<filename>src/openms/source/ANALYSIS/OPENSWATH/DIAScoring.cpp
// --------------------------------------------------------------------------
// OpenMS -- Open-Source Mass Spectrometry
// --------------------------------------------------------------------------
// Copyright The OpenMS Team -- Eberhard Karls University Tuebingen,
// ETH Zurich, and Freie Universitaet Berlin 2002-2021.
//
// This software is released under a three-clause BSD license:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of any author or any participating institution
// may be used to endorse or promote products derived from this software
// without specific prior written permission.
// For a full list of authors, refer to the file AUTHORS.
// --------------------------------------------------------------------------
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL ANY OF THE AUTHORS OR THE CONTRIBUTING
// INSTITUTIONS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
// OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
// OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
// ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// --------------------------------------------------------------------------
// $Maintainer: <NAME> $
// $Authors: <NAME>, <NAME> $
// --------------------------------------------------------------------------
#include <OpenMS/ANALYSIS/OPENSWATH/DIAScoring.h>
#include <OpenMS/CONCEPT/Constants.h>
#include <OpenMS/CHEMISTRY/ISOTOPEDISTRIBUTION/CoarseIsotopePatternGenerator.h>
#include <OpenMS/TRANSFORMATIONS/FEATUREFINDER/FeatureFinderAlgorithmPickedHelperStructs.h>
#include <OpenMS/TRANSFORMATIONS/FEATUREFINDER/FeatureFinderAlgorithm.h>
#include <OpenMS/OPENSWATHALGO/ALGO/StatsHelpers.h>
#include <OpenMS/OPENSWATHALGO/DATAACCESS/SpectrumHelpers.h> // integrateWindow
#include <OpenMS/ANALYSIS/OPENSWATH/DIAHelper.h>
#include <OpenMS/ANALYSIS/OPENSWATH/DIAPrescoring.h>
#include <OpenMS/CHEMISTRY/TheoreticalSpectrumGenerator.h>
#include <OpenMS/MATH/MISC/MathFunctions.h> // getPPM
#include <numeric>
#include <algorithm>
#include <functional>
#include <boost/bind.hpp>
#include <cmath> // for isnan
const double C13C12_MASSDIFF_U = 1.0033548;
namespace OpenMS
{
DIAScoring::DIAScoring() :
DefaultParamHandler("DIAScoring")
{
defaults_.setValue("dia_extraction_window", 0.05, "DIA extraction window in Th or ppm.");
defaults_.setMinFloat("dia_extraction_window", 0.0);
defaults_.setValue("dia_extraction_unit", "Th", "DIA extraction window unit");
defaults_.setValidStrings("dia_extraction_unit", {"Th","ppm"});
defaults_.setValue("dia_centroided", "false", "Use centroided DIA data.");
defaults_.setValidStrings("dia_centroided", {"true","false"});
defaults_.setValue("dia_byseries_intensity_min", 300.0, "DIA b/y series minimum intensity to consider.");
defaults_.setMinFloat("dia_byseries_intensity_min", 0.0);
defaults_.setValue("dia_byseries_ppm_diff", 10.0, "DIA b/y series minimal difference in ppm to consider.");
defaults_.setMinFloat("dia_byseries_ppm_diff", 0.0);
defaults_.setValue("dia_nr_isotopes", 4, "DIA number of isotopes to consider.");
defaults_.setMinInt("dia_nr_isotopes", 0);
defaults_.setValue("dia_nr_charges", 4, "DIA number of charges to consider.");
defaults_.setMinInt("dia_nr_charges", 0);
defaults_.setValue("peak_before_mono_max_ppm_diff", 20.0, "DIA maximal difference in ppm to count a peak at lower m/z when searching for evidence that a peak might not be monoisotopic.");
defaults_.setMinFloat("peak_before_mono_max_ppm_diff", 0.0);
// write defaults into Param object param_
defaultsToParam_();
// for void getBYSeries
{
generator = new TheoreticalSpectrumGenerator();
Param p;
p.setValue("add_metainfo", "true",
"Adds the type of peaks as metainfo to the peaks, like y8+, [M-H2O+2H]++");
generator->setParameters(p);
}
// for simulateSpectrumFromAASequence
// Param p;
// p.setValue("add_metainfo", "false",
// "Adds the type of peaks as metainfo to the peaks, like y8+, [M-H2O+2H]++");
// p.setValue("add_precursor_peaks", "true", "Adds peaks of the precursor to the spectrum, which happen to occur sometimes");
// generator->setParameters(p);
}
DIAScoring::~DIAScoring()
{
delete generator;
}
void DIAScoring::updateMembers_()
{
dia_extract_window_ = (double)param_.getValue("dia_extraction_window");
dia_extraction_ppm_ = param_.getValue("dia_extraction_unit") == "ppm";
dia_centroided_ = param_.getValue("dia_centroided").toBool();
dia_byseries_intensity_min_ = (double)param_.getValue("dia_byseries_intensity_min");
dia_byseries_ppm_diff_ = (double)param_.getValue("dia_byseries_ppm_diff");
dia_nr_isotopes_ = (int)param_.getValue("dia_nr_isotopes");
dia_nr_charges_ = (int)param_.getValue("dia_nr_charges");
peak_before_mono_max_ppm_diff_ = (double)param_.getValue("peak_before_mono_max_ppm_diff");
}
///////////////////////////////////////////////////////////////////////////
// DIA / SWATH scoring
void DIAScoring::dia_isotope_scores(const std::vector<TransitionType>& transitions, SpectrumPtrType spectrum,
OpenSwath::IMRMFeature* mrmfeature, double& isotope_corr, double& isotope_overlap) const
{
isotope_corr = 0;
isotope_overlap = 0;
// first compute a map of relative intensities from the feature, then compute the score
std::map<std::string, double> intensities;
getFirstIsotopeRelativeIntensities_(transitions, mrmfeature, intensities);
diaIsotopeScoresSub_(transitions, spectrum, intensities, isotope_corr, isotope_overlap);
}
void DIAScoring::dia_massdiff_score(const std::vector<TransitionType>& transitions,
SpectrumPtrType spectrum,
const std::vector<double>& normalized_library_intensity,
double& ppm_score,
double& ppm_score_weighted,
std::vector<double>& diff_ppm) const
{
ppm_score = 0;
ppm_score_weighted = 0;
diff_ppm.clear();
for (std::size_t k = 0; k < transitions.size(); k++)
{
const TransitionType& transition = transitions[k];
// Calculate the difference of the theoretical mass and the actually measured mass
double left(transition.getProductMZ()), right(transition.getProductMZ());
DIAHelpers::adjustExtractionWindow(right, left, dia_extract_window_, dia_extraction_ppm_);
double mz, intensity;
bool signalFound = DIAHelpers::integrateWindow(spectrum, left, right, mz, intensity, dia_centroided_);
// Continue if no signal was found - we therefore don't make a statement
// about the mass difference if no signal is present.
if (!signalFound)
{
continue;
}
double ppm = Math::getPPM(mz, transition.getProductMZ());
diff_ppm.push_back(transition.getProductMZ());
diff_ppm.push_back(ppm);
ppm_score += std::fabs(ppm);
ppm_score_weighted += std::fabs(ppm) * normalized_library_intensity[k];
#ifdef MRMSCORING_TESTING
std::cout << " weighted int of the peak is " << mz << " diff is in ppm " << diff_ppm << " thus append " << diff_ppm * diff_ppm << " or weighted " << diff_ppm * normalized_library_intensity[k] << std::endl;
#endif
}
// FEATURE we should not punish so much when one transition is missing!
ppm_score /= transitions.size();
}
bool DIAScoring::dia_ms1_massdiff_score(double precursor_mz, SpectrumPtrType spectrum,
double& ppm_score) const
{
ppm_score = -1;
double mz, intensity;
{
// Calculate the difference of the theoretical mass and the actually measured mass
double left(precursor_mz), right(precursor_mz);
DIAHelpers::adjustExtractionWindow(right, left, dia_extract_window_, dia_extraction_ppm_);
bool signalFound = DIAHelpers::integrateWindow(spectrum, left, right, mz, intensity, dia_centroided_);
// Catch if no signal was found and replace it with the most extreme
// value. Otherwise calculate the difference in ppm.
if (!signalFound)
{
ppm_score = (right - left) / precursor_mz * 1000000;
return false;
}
else
{
ppm_score = Math::getPPMAbs(mz, precursor_mz);
return true;
}
}
}
/// Precursor isotope scores
void DIAScoring::dia_ms1_isotope_scores(double precursor_mz, SpectrumPtrType spectrum,
double& isotope_corr, double& isotope_overlap, const EmpiricalFormula& sum_formula) const
{
// although precursor_mz can be received from the empirical formula (if non-empty), the actual precursor could be
// slightly different. And also for compounds, usually the neutral sum_formula without adducts is given.
// Therefore calculate the isotopes based on the formula but place them at precursor_mz
std::vector<double> isotopes_int;
getIsotopeIntysFromExpSpec_(precursor_mz, spectrum, isotopes_int, sum_formula.getCharge());
double max_ratio = 0;
int nr_occurrences = 0;
// calculate the scores:
// isotope correlation (forward) and the isotope overlap (backward) scores
isotope_corr = scoreIsotopePattern_(isotopes_int, sum_formula);
largePeaksBeforeFirstIsotope_(spectrum, precursor_mz, isotopes_int[0], nr_occurrences, max_ratio);
isotope_overlap = max_ratio;
}
void DIAScoring::getIsotopeIntysFromExpSpec_(double precursor_mz, SpectrumPtrType spectrum,
std::vector<double>& isotopes_int,
int charge_state) const
{
double abs_charge = std::fabs(static_cast<double>(charge_state));
for (int iso = 0; iso <= dia_nr_isotopes_; ++iso)
{
double left = precursor_mz + iso * C13C12_MASSDIFF_U / abs_charge;
double right = left;
DIAHelpers::adjustExtractionWindow(right, left, dia_extract_window_, dia_extraction_ppm_);
double mz, intensity;
DIAHelpers::integrateWindow(spectrum, left, right, mz, intensity, dia_centroided_);
isotopes_int.push_back(intensity);
}
}
void DIAScoring::dia_ms1_isotope_scores_averagine(double precursor_mz, SpectrumPtrType spectrum,
double& isotope_corr, double& isotope_overlap,
int charge_state) const
{
std::vector<double> exp_isotopes_int;
getIsotopeIntysFromExpSpec_(precursor_mz, spectrum, exp_isotopes_int, charge_state);
CoarseIsotopePatternGenerator solver(dia_nr_isotopes_ + 1);
// NOTE: this is a rough estimate of the neutral mz value since we would not know the charge carrier for negative ions
IsotopeDistribution isotope_dist = solver.estimateFromPeptideWeight(std::fabs(precursor_mz * charge_state));
double max_ratio;
int nr_occurrences;
// calculate the scores:
// isotope correlation (forward) and the isotope overlap (backward) scores
isotope_corr = scoreIsotopePattern_(exp_isotopes_int, isotope_dist);
largePeaksBeforeFirstIsotope_(spectrum, precursor_mz, exp_isotopes_int[0], nr_occurrences, max_ratio);
isotope_overlap = max_ratio;
}
void DIAScoring::dia_by_ion_score(SpectrumPtrType spectrum,
AASequence& sequence, int charge, double& bseries_score,
double& yseries_score) const
{
bseries_score = 0;
yseries_score = 0;
OPENMS_PRECONDITION(charge > 0, "Charge is a positive integer"); // for peptides, charge should be positive
double mz, intensity, left, right;
std::vector<double> yseries, bseries;
OpenMS::DIAHelpers::getBYSeries(sequence, bseries, yseries, generator, charge);
for (const auto& b_ion_mz : bseries)
{
left = b_ion_mz;
right = b_ion_mz;
DIAHelpers::adjustExtractionWindow(right, left, dia_extract_window_, dia_extraction_ppm_);
bool signalFound = DIAHelpers::integrateWindow(spectrum, left, right, mz, intensity, dia_centroided_);
double ppmdiff = Math::getPPMAbs(mz, b_ion_mz);
if (signalFound && ppmdiff < dia_byseries_ppm_diff_ && intensity > dia_byseries_intensity_min_)
{
bseries_score++;
}
}
for (const auto& y_ion_mz : yseries)
{
left = y_ion_mz;
right = y_ion_mz;
DIAHelpers::adjustExtractionWindow(right, left, dia_extract_window_, dia_extraction_ppm_);
bool signalFound = DIAHelpers::integrateWindow(spectrum, left, right, mz, intensity, dia_centroided_);
double ppmdiff = Math::getPPMAbs(mz, y_ion_mz);
if (signalFound && ppmdiff < dia_byseries_ppm_diff_ && intensity > dia_byseries_intensity_min_)
{
yseries_score++;
}
}
}
void DIAScoring::score_with_isotopes(SpectrumPtrType spectrum, const std::vector<TransitionType>& transitions,
double& dotprod, double& manhattan) const
{
OpenMS::DiaPrescore dp(dia_extract_window_, dia_nr_isotopes_, dia_nr_charges_);
dp.score(spectrum, transitions, dotprod, manhattan);
}
///////////////////////////////////////////////////////////////////////////
// Private methods
/// computes a vector of relative intensities for each feature (output to intensities)
void DIAScoring::getFirstIsotopeRelativeIntensities_(
const std::vector<TransitionType>& transitions,
OpenSwath::IMRMFeature* mrmfeature, std::map<std::string, double>& intensities) const
{
for (Size k = 0; k < transitions.size(); k++)
{
std::string native_id = transitions[k].getNativeID();
double rel_intensity = mrmfeature->getFeature(native_id)->getIntensity() / mrmfeature->getIntensity();
intensities.insert(std::pair<std::string, double>(native_id, rel_intensity));
}
}
void DIAScoring::diaIsotopeScoresSub_(const std::vector<TransitionType>& transitions, SpectrumPtrType spectrum,
std::map<std::string, double>& intensities, //relative intensities
double& isotope_corr,
double& isotope_overlap) const
{
std::vector<double> isotopes_int;
double max_ratio;
int nr_occurences;
for (Size k = 0; k < transitions.size(); k++)
{
isotopes_int.clear();
const String native_id = transitions[k].getNativeID();
double rel_intensity = intensities[native_id];
// If no charge is given, we assume it to be 1
int putative_fragment_charge = 1;
if (transitions[k].fragment_charge != 0)
{
putative_fragment_charge = transitions[k].fragment_charge;
}
// collect the potential isotopes of this peak
double abs_charge = std::fabs(static_cast<double>(putative_fragment_charge));
for (int iso = 0; iso <= dia_nr_isotopes_; ++iso)
{
double left = transitions[k].getProductMZ() + iso * C13C12_MASSDIFF_U / abs_charge;
double right = left;
DIAHelpers::adjustExtractionWindow(right, left, dia_extract_window_, dia_extraction_ppm_);
double mz, intensity;
DIAHelpers::integrateWindow(spectrum, left, right, mz, intensity, dia_centroided_);
isotopes_int.push_back(intensity);
}
// calculate the scores:
// isotope correlation (forward) and the isotope overlap (backward) scores
double score = scoreIsotopePattern_(isotopes_int, transitions[k].getProductMZ(), putative_fragment_charge);
isotope_corr += score * rel_intensity;
largePeaksBeforeFirstIsotope_(spectrum, transitions[k].getProductMZ(), isotopes_int[0], nr_occurences, max_ratio);
isotope_overlap += nr_occurences * rel_intensity;
}
}
void DIAScoring::largePeaksBeforeFirstIsotope_(SpectrumPtrType spectrum, double mono_mz, double mono_int, int& nr_occurences, double& max_ratio) const
{
double mz, intensity;
nr_occurences = 0;
max_ratio = 0.0;
for (int ch = 1; ch <= dia_nr_charges_; ++ch)
{
double center = mono_mz - C13C12_MASSDIFF_U / (double) ch;
double left = center;
double right = center;
DIAHelpers::adjustExtractionWindow(right, left, dia_extract_window_, dia_extraction_ppm_);
bool signalFound = DIAHelpers::integrateWindow(spectrum, left, right, mz, intensity, dia_centroided_);
// Continue if no signal was found - we therefore don't make a statement
// about the mass difference if no signal is present.
if (!signalFound)
{
continue;
}
// Compute ratio between the (presumed) monoisotopic peak intensity and the now found peak
double ratio;
if (mono_int != 0)
{
ratio = intensity / mono_int;
}
else
{
ratio = 0;
}
if (ratio > max_ratio) {max_ratio = ratio;}
double ddiff_ppm = std::fabs(mz - center) * 1e6 / center;
// FEATURE we should fit a theoretical distribution to see whether we really are a secondary peak
if (ratio > 1 && ddiff_ppm < peak_before_mono_max_ppm_diff_)
{
//isotope_overlap += 1.0 * rel_intensity;
nr_occurences += 1; // we count how often this happens...
#ifdef MRMSCORING_TESTING
cout << " _ overlap diff ppm " << ddiff_ppm << " and inten ratio " << ratio << " with " << mono_int << endl;
#endif
}
}
}
double DIAScoring::scoreIsotopePattern_(const std::vector<double>& isotopes_int,
double product_mz,
int putative_fragment_charge) const
{
OPENMS_PRECONDITION(putative_fragment_charge != 0, "Charge needs to be set to != 0"); // charge can be positive and negative
IsotopeDistribution isotope_dist;
// create the theoretical distribution from the peptide weight
CoarseIsotopePatternGenerator solver(dia_nr_isotopes_ + 1);
// NOTE: this is a rough estimate of the neutral mz value since we would not know the charge carrier for negative ions
isotope_dist = solver.estimateFromPeptideWeight(std::fabs(product_mz * putative_fragment_charge));
return scoreIsotopePattern_(isotopes_int, isotope_dist);
} //end of dia_isotope_corr_sub
double DIAScoring::scoreIsotopePattern_(const std::vector<double>& isotopes_int,
const EmpiricalFormula& empf) const
{
return scoreIsotopePattern_(isotopes_int,
empf.getIsotopeDistribution(CoarseIsotopePatternGenerator(dia_nr_isotopes_ + 1)));
}
double DIAScoring::scoreIsotopePattern_(const std::vector<double>& isotopes_int,
const IsotopeDistribution& isotope_dist) const
{
typedef OpenMS::FeatureFinderAlgorithmPickedHelperStructs::TheoreticalIsotopePattern TheoreticalIsotopePattern;
TheoreticalIsotopePattern isotopes;
for (IsotopeDistribution::ConstIterator it = isotope_dist.begin(); it != isotope_dist.end(); ++it)
{
isotopes.intensity.push_back(it->getIntensity());
}
isotopes.optional_begin = 0;
isotopes.optional_end = dia_nr_isotopes_;
// scale the distribution to a maximum of 1
double max = 0.0;
for (Size i = 0; i < isotopes.intensity.size(); ++i)
{
if (isotopes.intensity[i] > max)
{
max = isotopes.intensity[i];
}
}
isotopes.max = max;
if (max == 0.) max = 1.;
for (Size i = 0; i < isotopes.intensity.size(); ++i)
{
isotopes.intensity[i] /= max;
}
isotopes.trimmed_left = 0;
// score the pattern against a theoretical one
OPENMS_POSTCONDITION(isotopes_int.size() == isotopes.intensity.size(), "Vectors for pearson correlation do not have the same size.");
double int_score = OpenSwath::cor_pearson(isotopes_int.begin(), isotopes_int.end(), isotopes.intensity.begin());
if (std::isnan(int_score))
{
int_score = 0;
}
return int_score;
} //end of dia_isotope_corr_sub
}
|
WebCampZg/conference-web
|
people/forms.py
|
<filename>people/forms.py
from django import forms
from django.contrib.auth import get_user_model
from django.contrib.auth.forms import ReadOnlyPasswordHashField
from django.utils.translation import ugettext_lazy as _
UserModel = get_user_model()
class CustomUserCreationForm(forms.ModelForm):
"""
A form for creating new users. Includes all the
required fields, plus a repeated password.
"""
class Meta:
model = UserModel
fields = ("email",)
password1 = forms.CharField(label="Password", widget=forms.PasswordInput)
password2 = forms.CharField(label="Password Confirmation", widget=forms.PasswordInput)
def clean_password2(self):
# Check that the two password entries match
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
msg = "Passwords don't match"
raise forms.ValidationError(msg)
return password2
def save(self, commit=True):
# Save the provided password in hashed format
user = super(CustomUserCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class CustomUserChangeForm(forms.ModelForm):
"""
A form for updating users. Includes all the fields
on the user, but replaces the password field with
admin"s password hash display field.
"""
class Meta:
model = UserModel
exclude = ('date_joined', 'last_login')
password = ReadOnlyPasswordHashField(help_text=(
"Raw passwords are not stored, so there is no way to see "
"this user's password, but you can change the password "
"using <a href=\"password/\">this form</a>."))
def clean_password(self):
# Regardless of what the user provides, return the
# initial value. This is done here, rather than on
# the field, because the field does not have access
# to the initial value
return self.initial["password"]
class UserProfileForm(forms.ModelForm):
class Meta:
model = UserModel
fields = ['email', 'first_name', 'last_name', 'github', 'twitter', 'tshirt_size']
|
bbilger/jrestless
|
aws/gateway/jrestless-aws-gateway-handler/src/main/java/com/jrestless/aws/gateway/filter/DynamicProxyBasePathFilter.java
|
<reponame>bbilger/jrestless
/*
* Copyright 2017 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jrestless.aws.gateway.filter;
import java.io.IOException;
import java.net.URI;
import javax.ws.rs.container.ContainerRequestContext;
import javax.ws.rs.container.ContainerRequestFilter;
import javax.ws.rs.container.PreMatching;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.UriInfo;
import com.jrestless.aws.gateway.io.GatewayRequest;
/**
* This filter adds an additional base bath if the resource is a prefixed proxy
* resource.
* <p>
* The filter allows you to use the very same lambda function with different
* APIGW resources without changing any code. Let's say you have one APIGW
* resource configured as "/v1/{proxy+}" and one configured as "/v2/{proxy+}"
* and both invoke the very same lambda function. With this filter in place,
* your JAX-RS resources don't need to be mapped to "/v1" or "/v2". So they are
* agnostic to it. Note: In general this filter makes sense only if you map one
* endpoint to a lambda function.
* <p>
* The detected base path is added to the base URI + a trailing slash.
* <ol>
* <li>If the resource contains a proxy (greedy path variable), then everything
* before the proxy is added as additional base path to the base URI. For
* example "/a/b" is added if the resource is "/a/b/{proxy+}".
* <li>If there's no prefix, then no base path will be added (e.g. "/{proxy+}").
* <li>If there's no proxy, then no base path will be added (e.g. "/a", "/",
* "/a/{id}").
* <li>If there the resource is misconfigured, then no base path will be added
* (e.g. "/proxy+}", {@code null}).
* </ol>
* <p>
* Note: in some situations this filter results in undesired behavior. So we
* won't add it to the framework by default.
*
* @author <NAME>
*
*/
@PreMatching
public class DynamicProxyBasePathFilter implements ContainerRequestFilter {
private static final String PROXY_START = "/{";
private static final String PROXY_END = "+}";
// request-scoped proxy
private GatewayRequest gatwayRequest;
@Context
void setGatewayRequest(GatewayRequest gatwayRequest) {
this.gatwayRequest = gatwayRequest;
}
@Override
public void filter(ContainerRequestContext requestContext) throws IOException {
String dynamicApplicationPath = getDynamicBasePath();
if (dynamicApplicationPath != null && !dynamicApplicationPath.isEmpty()) {
UriInfo uriInfo = requestContext.getUriInfo();
URI baseUri = uriInfo.getBaseUriBuilder()
.path(dynamicApplicationPath)
.path("/") // baseUri must have a trailing slash
.build();
URI requestUri = uriInfo.getRequestUri();
requestContext.setRequestUri(baseUri, requestUri);
}
}
private String getDynamicBasePath() {
String resource = gatwayRequest.getResource();
if (resource == null) {
return null;
} else if (!resource.endsWith(PROXY_END)) {
return "";
}
int proxyStart = resource.lastIndexOf(PROXY_START);
if (proxyStart < 0) {
return null;
} else if (proxyStart == 0) {
return "";
} else {
return resource.substring(0, proxyStart);
}
}
}
|
MManicaM/ogs
|
ProcessLib/TES/TESLocalAssemblerInner.h
|
/**
* \copyright
* Copyright (c) 2012-2019, OpenGeoSys Community (http://www.opengeosys.org)
* Distributed under a Modified BSD License.
* See accompanying file LICENSE.txt or
* http://www.opengeosys.org/project/license
*
* The code of this file is used to decouple the evaluation of matrix elements
* from the rest of OGS6,
* not all of OGS6 has to be recompiled every time a small change is done.
*/
#pragma once
#include "NumLib/Fem/ShapeMatrixPolicy.h"
#include "ProcessLib/LocalAssemblerTraits.h"
#include "ProcessLib/VariableTransformation.h"
#include "TESLocalAssemblerData.h"
namespace ProcessLib
{
namespace TES
{
template <typename Traits>
class TESLocalAssemblerInner
{
public:
explicit TESLocalAssemblerInner(AssemblyParams const& ap,
const unsigned num_int_pts,
const unsigned dimension);
void assembleIntegrationPoint(
unsigned integration_point,
std::vector<double> const& localX,
typename Traits::ShapeMatrices const& sm,
const double weight,
Eigen::Map<typename Traits::LocalMatrix>& local_M,
Eigen::Map<typename Traits::LocalMatrix>& local_K,
Eigen::Map<typename Traits::LocalVector>& local_b);
void preEachAssemble();
// TODO better encapsulation
AssemblyParams const& getAssemblyParameters() const { return _d.ap; }
TESFEMReactionAdaptor const& getReactionAdaptor() const
{
return *_d.reaction_adaptor;
}
TESFEMReactionAdaptor& getReactionAdaptor() { return *_d.reaction_adaptor; }
TESLocalAssemblerData const& getData() const { return _d; }
private:
Eigen::Matrix3d getMassCoeffMatrix(const unsigned int_pt);
typename Traits::LaplaceMatrix getLaplaceCoeffMatrix(const unsigned int_pt,
const unsigned dim);
Eigen::Matrix3d getAdvectionCoeffMatrix(const unsigned int_pt);
Eigen::Matrix3d getContentCoeffMatrix(const unsigned int_pt);
Eigen::Vector3d getRHSCoeffVector(const unsigned int_pt);
void preEachAssembleIntegrationPoint(
const unsigned int_pt,
std::vector<double> const& localX,
typename Traits::ShapeMatrices const& sm);
void initReaction(const unsigned int_pt);
TESLocalAssemblerData _d;
};
} // namespace TES
} // namespace ProcessLib
#include "TESLocalAssemblerInner-impl.h"
|
Pawlost/Pokecube-Issues-and-Wiki
|
src/main/java/pokecube/core/ai/tasks/bees/tasks/EnterHive.java
|
package pokecube.core.ai.tasks.bees.tasks;
import java.util.Optional;
import net.minecraft.core.GlobalPos;
import net.minecraft.world.entity.ai.Brain;
import net.minecraft.world.level.Level;
import pokecube.core.ai.tasks.bees.AbstractBeeTask;
import pokecube.core.ai.tasks.bees.BeeTasks;
import pokecube.core.ai.tasks.bees.sensors.HiveSensor;
import pokecube.core.interfaces.IPokemob;
import thut.api.maths.Vector3;
public class EnterHive extends AbstractBeeTask
{
final Vector3 homePos = new Vector3();
public EnterHive(final IPokemob pokemob)
{
super(pokemob);
}
@Override
public void reset()
{
this.homePos.clear();
}
@Override
public void run()
{
final Brain<?> brain = this.entity.getBrain();
final Optional<GlobalPos> pos_opt = brain.getMemory(BeeTasks.HIVE_POS);
if (pos_opt.isPresent())
{
final Level world = this.entity.getLevel();
final GlobalPos pos = pos_opt.get();
final boolean clearHive = pos.dimension() != world.dimension();
// This will be cleared by CheckHive, so lets just exit here.
if (clearHive) return;
this.homePos.set(pos.pos());
// If too far, lets path over.
if (this.homePos.distToEntity(this.entity) > 2) this.setWalkTo(this.homePos, 1, 0);
// If we can't get into the hive, forget it as a hive.
else if (!HiveSensor.tryAddToBeeHive(this.entity, pos.pos()))
this.entity.getBrain().eraseMemory(BeeTasks.HIVE_POS);
}
}
@Override
public boolean doTask()
{
final Brain<?> brain = this.entity.getBrain();
final Optional<Boolean> hasNectar = brain.getMemory(BeeTasks.HAS_NECTAR);
// We have nectar to return to the hive with.
if (hasNectar.isPresent() && hasNectar.get()) return true;
final Optional<Integer> hiveTimer = brain.getMemory(BeeTasks.OUT_OF_HIVE_TIMER);
// This is our counter for if something angered us, and made is leave
// the hive, if so, we don't return to hive.
if (hiveTimer.isPresent() && hiveTimer.get() > 0) return false;
// Return home if it is raining
if (this.entity.getLevel().isRaining()) return true;
// Return home if it is night time
if (this.entity.getLevel().isNight()) return true;
// Otherwise don't return home
return false;
}
}
|
personalised-semantic-search/JoDS_IRIS--
|
JoDS_IRIS+-/src/org/deri/iris/factory/Factory.java
|
/*
* Integrated Rule Inference System (IRIS):
* An extensible rule inference system for datalog with extensions.
*
* Copyright (C) 2008 Semantic Technology Institute (STI) Innsbruck,
* University of Innsbruck, Technikerstrasse 21a, 6020 Innsbruck, Austria.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*/
package org.deri.iris.factory;
import org.deri.iris.api.factory.IBasicFactory;
import org.deri.iris.api.factory.IBuiltinsFactory;
import org.deri.iris.api.factory.IConcreteFactory;
import org.deri.iris.api.factory.IGraphFactory;
import org.deri.iris.api.factory.ITermFactory;
import org.deri.iris.basics.BasicFactory;
import org.deri.iris.builtins.BuiltinsFactory;
import org.deri.iris.graph.GraphFactory;
import org.deri.iris.terms.TermFactory;
import org.deri.iris.terms.concrete.ConcreteFactory;
import JODS.PGraphFactory;
/**
* @author <NAME> (richard dot poettler at deri dot at)
* @author <NAME>, DERI Innsbruck
*/
public class Factory {
public static final IBasicFactory BASIC = BasicFactory.getInstance();
public static final ITermFactory TERM = TermFactory.getInstance();
public static final IConcreteFactory CONCRETE = ConcreteFactory
.getInstance();
public static final IGraphFactory GRAPH = GraphFactory.getInstance();
public static final PGraphFactory PGRAPH = PGraphFactory.getInstance();
public static final IBuiltinsFactory BUILTIN = BuiltinsFactory
.getInstance();
}
|
jhuynh85/sdth-site
|
src/components/calendar/details.js
|
import React from "react"
import moment from "moment"
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"
import Html from "Common/Html"
import truncateString from "Utils/truncate"
import { StyledDetails, SeeMore } from "./styles"
function Details({ eventInfo, setEventInfo, ...position }) {
React.useEffect(() => {
const handler = e => {
if (!e.target.closest(".event-details")) {
setEventInfo(null)
}
}
window.document.body.addEventListener("click", handler)
return () => window.document.body.removeEventListener("click", handler)
})
React.useEffect(
() => {
const handler = e => {
if (e.key === "Escape") {
setEventInfo(null)
}
}
window.document.addEventListener("keyup", handler)
return () => window.document.removeEventListener("keyup", handler)
},
[eventInfo.event.title]
)
const start = moment(eventInfo.event.start).format("MMM D, Y @ h:mma")
return (
<StyledDetails {...position} className="event-details">
<aside className="header">
<div>
{eventInfo.event.title}
<div className="start">
<FontAwesomeIcon icon="clock" style={{ marginRight: ".8rem" }} />
{start}
</div>
</div>
<span>
<button
className="close-details"
type="button"
onClick={() => setEventInfo(null)}
>
×
</button>
</span>
</aside>
<div className="content">
<Html>
{truncateString(eventInfo.event.description || "No Description")}
</Html>
<SeeMore to={`/event/${eventInfo.event.id}`}>
See more
</SeeMore>
</div>
</StyledDetails>
)
}
export default Details
|
joshuaherrera/car_maintenance
|
server/client/src/components/Session/withAuthorization.js
|
import React from 'react';
import { withRouter } from 'react-router-dom';
import { compose } from 'recompose';
import AuthUserContext from './Context';
import { withFirebase } from '../Firebase';
const withAuthorization = condition => Component => {
class WithAuthorization extends React.Component {
//this function will listen for an authorization change
//and will check if the passed in condition is met.
//the passed in condition is used to check if a user is
//authorized or not.
//if a user is not authorized, the user is redirected to the
//login page
componentDidMount() {
this.listener = this.props.firebase.onAuthUserListener(
authUser => {
if(!condition(authUser)) {
this.props.history.push('/login');
}
},
() => this.props.history.push('/login'),
);
}
componentWillUnmount() {
this.listener();
}
render() {
return(
<AuthUserContext.Consumer>
{authUser =>
condition(authUser) ? <Component {...this.props} /> : null
}
</AuthUserContext.Consumer>
);
}
}
return compose(
withRouter,
withFirebase,
)(WithAuthorization);
};
export default withAuthorization;
|
Selous05/beeetv_1.6
|
app/src/main/java/com/beeecorptv/ui/downloadmanager/core/model/DownloadScheduler.java
|
/*
* EasyPlex - Movies - Live Streaming - TV Series, Anime
*
* @author @Y0bEX
* @package EasyPlex - Movies - Live Streaming - TV Series, Anime
* @copyright Copyright (c) 2021 Y0bEX,
* @license http://codecanyon.net/wiki/support/legal-terms/licensing-terms/
* @profile https://codecanyon.net/user/yobex
* @link <EMAIL>
* @skype <EMAIL>
**/
package com.beeecorptv.ui.downloadmanager.core.model;
import android.content.Context;
import android.text.format.DateUtils;
import androidx.annotation.NonNull;
import androidx.work.Constraints;
import androidx.work.Data;
import androidx.work.ExistingWorkPolicy;
import androidx.work.NetworkType;
import androidx.work.OneTimeWorkRequest;
import androidx.work.WorkManager;
import com.beeecorptv.ui.downloadmanager.core.RepositoryHelper;
import com.beeecorptv.ui.downloadmanager.core.model.data.StatusCode;
import com.beeecorptv.ui.downloadmanager.core.model.data.entity.DownloadInfo;
import com.beeecorptv.ui.downloadmanager.core.settings.SettingsRepository;
import com.beeecorptv.ui.downloadmanager.service.GetAndRunDownloadWorker;
import com.beeecorptv.ui.downloadmanager.service.RescheduleAllWorker;
import com.beeecorptv.ui.downloadmanager.service.RestoreDownloadsWorker;
import com.beeecorptv.ui.downloadmanager.service.RunAllWorker;
import com.beeecorptv.ui.downloadmanager.service.RunDownloadWorker;
import java.util.Random;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
public class DownloadScheduler
{
@SuppressWarnings("unused")
private static final String TAG = DownloadScheduler.class.getSimpleName();
public static final String TAG_WORK_RUN_ALL_TYPE = "run_all";
public static final String TAG_WORK_RESTORE_DOWNLOADS_TYPE = "restore_downloads";
public static final String TAG_WORK_RUN_TYPE = "run";
public static final String TAG_WORK_GET_AND_RUN_TYPE = "get_and_run";
public static final String TAG_WORK_RESCHEDULE_TYPE = "reschedule";
/*
* The time between a failure and the first retry after an IOException.
* Each subsequent retry grows exponentially, doubling each time.
* The time is in seconds
*/
private static final int RETRY_FIRST_DELAY = 30;
private static Random random = new Random();
/*
* Run unique work for starting download.
* If there is existing pending (uncompleted) work, cancel it
*/
public static void run(@NonNull Context appContext, @NonNull DownloadInfo info)
{
String downloadTag = getDownloadTag(info.id);
Data data = new Data.Builder()
.putString(RunDownloadWorker.TAG_ID, info.id.toString())
.build();
OneTimeWorkRequest work = new OneTimeWorkRequest.Builder(RunDownloadWorker.class)
.setInputData(data)
.setConstraints(getConstraints(appContext, info))
.setInitialDelay(getInitialDelay(info), TimeUnit.MILLISECONDS)
.addTag(TAG_WORK_RUN_TYPE)
.addTag(downloadTag)
.build();
WorkManager.getInstance(appContext).enqueueUniqueWork(downloadTag,
ExistingWorkPolicy.REPLACE, work);
}
public static void run(@NonNull Context appContext, @NonNull UUID id)
{
Data data = new Data.Builder()
.putString(GetAndRunDownloadWorker.TAG_ID, id.toString())
.build();
OneTimeWorkRequest work = new OneTimeWorkRequest.Builder(GetAndRunDownloadWorker.class)
.setInputData(data)
.addTag(TAG_WORK_GET_AND_RUN_TYPE)
.build();
WorkManager.getInstance(appContext).enqueue(work);
}
public static void undone(@NonNull Context context, @NonNull DownloadInfo info)
{
WorkManager.getInstance(context).cancelAllWorkByTag(getDownloadTag(info.id));
}
public static void rescheduleAll(@NonNull Context appContext)
{
OneTimeWorkRequest work = new OneTimeWorkRequest.Builder(RescheduleAllWorker.class)
.addTag(TAG_WORK_RESCHEDULE_TYPE)
.build();
WorkManager.getInstance(appContext).enqueue(work);
}
public static void runAll(@NonNull Context appContext, boolean ignorePaused)
{
Data data = new Data.Builder()
.putBoolean(RunAllWorker.TAG_IGNORE_PAUSED, ignorePaused)
.build();
OneTimeWorkRequest work = new OneTimeWorkRequest.Builder(RunAllWorker.class)
.setInputData(data)
.addTag(TAG_WORK_RUN_ALL_TYPE)
.build();
WorkManager.getInstance(appContext).enqueue(work);
}
/*
* Run stopped (and with running status) downloads after starting app
*/
public static void restoreDownloads(@NonNull Context appContext)
{
OneTimeWorkRequest work = new OneTimeWorkRequest.Builder(RestoreDownloadsWorker.class)
.addTag(TAG_WORK_RESTORE_DOWNLOADS_TYPE)
.build();
WorkManager.getInstance(appContext).enqueue(work);
}
public static String getDownloadTag(UUID downloadId)
{
return TAG_WORK_RUN_TYPE + ":" + downloadId;
}
public static String extractDownloadIdFromTag(String tag)
{
return tag.substring(tag.indexOf(":") + 1);
}
private static Constraints getConstraints(Context context, DownloadInfo info)
{
SettingsRepository pref = RepositoryHelper.getSettingsRepository(context);
NetworkType netType = NetworkType.CONNECTED;
boolean onlyCharging = pref.onlyCharging();
boolean batteryControl = pref.batteryControl();
if (pref.enableRoaming())
netType = NetworkType.NOT_ROAMING;
if (info != null && info.unmeteredConnectionsOnly || pref.unmeteredConnectionsOnly())
netType = NetworkType.UNMETERED;
return new Constraints.Builder()
.setRequiredNetworkType(netType)
.setRequiresCharging(onlyCharging)
.setRequiresBatteryNotLow(batteryControl)
.build();
}
/*
* Return initial delay in milliseconds required before this download is
* allowed to start again
*/
private static long getInitialDelay(DownloadInfo info)
{
if (info.statusCode == StatusCode.STATUS_WAITING_TO_RETRY) {
long now = System.currentTimeMillis();
long startAfter;
if (info.retryAfter > 0) {
startAfter = info.lastModify + fuzzDelay(info.retryAfter);
} else {
final long delay = (RETRY_FIRST_DELAY * DateUtils.SECOND_IN_MILLIS *
(1 << (info.numFailed - 1)));
startAfter = info.lastModify + fuzzDelay(delay);
}
return Math.max(0, startAfter - now);
} else {
return 0;
}
}
/*
* Add random fuzz to the given delay so it's anywhere between 1-1.5x the
* requested delay.
*/
private static long fuzzDelay(long delay)
{
return delay + random.nextInt((int)(delay / 2));
}
}
|
gluckzhang/besu
|
consensus/qbft/src/test/java/org/hyperledger/besu/consensus/qbft/validator/TransactionValidatorProviderTest.java
|
<filename>consensus/qbft/src/test/java/org/hyperledger/besu/consensus/qbft/validator/TransactionValidatorProviderTest.java
/*
* Copyright ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.consensus.qbft.validator;
import static java.util.Collections.emptyList;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hyperledger.besu.consensus.qbft.validator.ValidatorTestUtils.createContractForkSpec;
import static org.hyperledger.besu.ethereum.core.InMemoryKeyValueStorageProvider.createInMemoryBlockchain;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import org.hyperledger.besu.config.QbftConfigOptions;
import org.hyperledger.besu.consensus.common.ForksSchedule;
import org.hyperledger.besu.datatypes.Address;
import org.hyperledger.besu.datatypes.Hash;
import org.hyperledger.besu.ethereum.chain.MutableBlockchain;
import org.hyperledger.besu.ethereum.core.AddressHelpers;
import org.hyperledger.besu.ethereum.core.Block;
import org.hyperledger.besu.ethereum.core.BlockBody;
import org.hyperledger.besu.ethereum.core.BlockHeaderTestFixture;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
import com.google.common.collect.Lists;
import org.apache.tuweni.bytes.Bytes;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public class TransactionValidatorProviderTest {
@Mock private ValidatorContractController validatorContractController;
protected MutableBlockchain blockChain;
protected Block genesisBlock;
protected Block block_1;
protected Block block_2;
private Block block_3;
private ForksSchedule<QbftConfigOptions> forksSchedule;
private final BlockHeaderTestFixture headerBuilder = new BlockHeaderTestFixture();
private static final Address CONTRACT_ADDRESS = Address.fromHexString("1");
@Before
public void setup() {
forksSchedule = new ForksSchedule<>(List.of(createContractForkSpec(0L, CONTRACT_ADDRESS)));
genesisBlock = createEmptyBlock(0, Hash.ZERO);
blockChain = createInMemoryBlockchain(genesisBlock);
headerBuilder.extraData(Bytes.wrap(new byte[32]));
block_1 = createEmptyBlock(1, genesisBlock.getHeader().getHash());
block_2 = createEmptyBlock(2, block_1.getHeader().getHash());
block_3 = createEmptyBlock(3, block_2.getHeader().getHash());
blockChain.appendBlock(block_1, emptyList());
blockChain.appendBlock(block_2, emptyList());
blockChain.appendBlock(block_3, emptyList());
}
private Block createEmptyBlock(final long blockNumber, final Hash parentHash) {
headerBuilder.number(blockNumber).parentHash(parentHash).coinbase(AddressHelpers.ofValue(0));
return new Block(headerBuilder.buildHeader(), new BlockBody(emptyList(), emptyList()));
}
@Test
public void validatorsAfterBlockAreRetrievedUsingContractController() {
final List<Address> validatorsAt2 =
Lists.newArrayList(Address.fromHexString("5"), Address.fromHexString("6"));
final List<Address> validatorsAt3 =
Lists.newArrayList(
Address.fromHexString("5"), Address.fromHexString("6"), Address.fromHexString("7"));
when(validatorContractController.getValidators(2, CONTRACT_ADDRESS)).thenReturn(validatorsAt2);
when(validatorContractController.getValidators(3, CONTRACT_ADDRESS)).thenReturn(validatorsAt3);
final TransactionValidatorProvider validatorProvider =
new TransactionValidatorProvider(blockChain, validatorContractController, forksSchedule);
assertThat(validatorProvider.getValidatorsAfterBlock(block_2.getHeader()))
.containsExactlyElementsOf(validatorsAt2);
assertThat(validatorProvider.getValidatorsAfterBlock(block_3.getHeader()))
.containsExactlyElementsOf(validatorProvider.getValidatorsAfterBlock(block_3.getHeader()));
}
@Test
public void validatorsForBlockAreRetrievedUsingContractController() {
final List<Address> validatorsAt2 =
Lists.newArrayList(Address.fromHexString("5"), Address.fromHexString("6"));
final List<Address> validatorsAt3 =
Lists.newArrayList(
Address.fromHexString("5"), Address.fromHexString("6"), Address.fromHexString("7"));
when(validatorContractController.getValidators(2, CONTRACT_ADDRESS)).thenReturn(validatorsAt2);
when(validatorContractController.getValidators(3, CONTRACT_ADDRESS)).thenReturn(validatorsAt3);
final TransactionValidatorProvider validatorProvider =
new TransactionValidatorProvider(blockChain, validatorContractController, forksSchedule);
assertThat(validatorProvider.getValidatorsForBlock(block_2.getHeader()))
.containsExactlyElementsOf(validatorsAt2);
assertThat(validatorProvider.getValidatorsForBlock(block_3.getHeader()))
.containsExactlyElementsOf(validatorProvider.getValidatorsForBlock(block_3.getHeader()));
}
@Test
public void validatorsAtHeadAreRetrievedUsingContractController() {
final List<Address> validators =
Lists.newArrayList(Address.fromHexString("5"), Address.fromHexString("6"));
when(validatorContractController.getValidators(3, CONTRACT_ADDRESS)).thenReturn(validators);
final TransactionValidatorProvider validatorProvider =
new TransactionValidatorProvider(blockChain, validatorContractController, forksSchedule);
assertThat(validatorProvider.getValidatorsAtHead()).containsExactlyElementsOf(validators);
}
@Test
public void validatorsAtHeadContractCallIsCached() {
final List<Address> validators =
Lists.newArrayList(Address.fromHexString("5"), Address.fromHexString("6"));
when(validatorContractController.getValidators(3, CONTRACT_ADDRESS)).thenReturn(validators);
final TransactionValidatorProvider validatorProvider =
new TransactionValidatorProvider(blockChain, validatorContractController, forksSchedule);
assertThat(validatorProvider.getValidatorsAtHead()).containsExactlyElementsOf(validators);
verify(validatorContractController).getValidators(3, CONTRACT_ADDRESS);
assertThat(validatorProvider.getValidatorsAtHead()).containsExactlyElementsOf(validators);
verifyNoMoreInteractions(validatorContractController);
}
@Test
public void validatorsAfterBlockContractCallIsCached() {
final List<Address> validators =
Lists.newArrayList(Address.fromHexString("5"), Address.fromHexString("6"));
when(validatorContractController.getValidators(2, CONTRACT_ADDRESS)).thenReturn(validators);
final TransactionValidatorProvider validatorProvider =
new TransactionValidatorProvider(blockChain, validatorContractController, forksSchedule);
final Collection<Address> result =
validatorProvider.getValidatorsAfterBlock(block_2.getHeader());
assertThat(result).containsExactlyElementsOf(validators);
verify(validatorContractController).getValidators(2, CONTRACT_ADDRESS);
final Collection<Address> resultCached =
validatorProvider.getValidatorsAfterBlock(block_2.getHeader());
assertThat(resultCached).containsExactlyElementsOf(validators);
verifyNoMoreInteractions(validatorContractController);
}
@Test
public void getValidatorsAfterBlock_and_getValidatorsForBlock_useDifferentCaches() {
final List<Address> validators =
Lists.newArrayList(Address.fromHexString("5"), Address.fromHexString("6"));
when(validatorContractController.getValidators(2, CONTRACT_ADDRESS)).thenReturn(validators);
final TransactionValidatorProvider validatorProvider =
new TransactionValidatorProvider(blockChain, validatorContractController, forksSchedule);
validatorProvider.getValidatorsAfterBlock(block_2.getHeader()); // cache miss
verify(validatorContractController, times(1)).getValidators(2, CONTRACT_ADDRESS);
validatorProvider.getValidatorsAfterBlock(block_2.getHeader()); // cache hit
verifyNoMoreInteractions(validatorContractController);
validatorProvider.getValidatorsForBlock(block_2.getHeader()); // cache miss
verify(validatorContractController, times(2)).getValidators(2, CONTRACT_ADDRESS);
validatorProvider.getValidatorsAfterBlock(block_2.getHeader()); // cache hit
verifyNoMoreInteractions(validatorContractController);
}
@Test
public void validatorsMustBeSorted() {
final List<Address> validators =
Lists.newArrayList(
Address.fromHexString("9"), Address.fromHexString("8"), Address.fromHexString("7"));
when(validatorContractController.getValidators(3, CONTRACT_ADDRESS)).thenReturn(validators);
final TransactionValidatorProvider validatorProvider =
new TransactionValidatorProvider(blockChain, validatorContractController, forksSchedule);
final Collection<Address> result = validatorProvider.getValidatorsAtHead();
final List<Address> expectedValidators =
validators.stream().sorted().collect(Collectors.toList());
assertThat(result).containsExactlyElementsOf(expectedValidators);
}
@Test
public void voteProviderIsEmpty() {
TransactionValidatorProvider transactionValidatorProvider =
new TransactionValidatorProvider(blockChain, validatorContractController, forksSchedule);
assertThat(transactionValidatorProvider.getVoteProviderAtHead()).isEmpty();
}
}
|
bitesandbytes/Projekt-Panda
|
ClientGUI/src/clientCoreThreads/FileReceiverThread.java
|
package clientCoreThreads;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.RandomAccessFile;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.channels.ServerSocketChannel;
import java.nio.channels.SocketChannel;
import javax.swing.JOptionPane;
import javax.swing.JTextArea;
import coreClient.Global;
public class FileReceiverThread extends Thread
{
private static int listenPort = Global.clientFilePort;
private static String destFolder = Global.defaultFileSavePath;
private JTextArea msgBox;
private String filename;
public FileReceiverThread(JTextArea textBox)
{
super();
this.msgBox = textBox;
}
public void run()
{
ServerSocketChannel serverSocketChannel = null;
SocketChannel socketChannel = null;
Global.Log("Opening serverSocketChannel | NewReceiverThread");
try
{
serverSocketChannel = ServerSocketChannel.open();
serverSocketChannel.socket()
.bind(new InetSocketAddress(listenPort));
}
catch (IOException e1)
{
Global.Log("Unable to bind | NewReceiverThread.");
writeError();
return;
}
Global.Log("severSocketChannel Bind successful | NewReceiverThread");
while (true)
{
try
{
socketChannel = serverSocketChannel.accept();
getFilename(socketChannel);
readFromSocket(socketChannel, filename);
}
catch (IOException | ClassNotFoundException e)
{
Global.Log("Dropped a file transfer. Request transfer again.");
writeError();
continue;
}
writeSuccess();
}
}
private void writeSuccess()
{
JOptionPane.showMessageDialog(Global.window.frmChatServerV,
"File received : " + filename + "\nFull path: " + destFolder
+ filename);
}
private void getFilename(SocketChannel socketChannel) throws IOException,
ClassNotFoundException
{
Global.Log("Reading filename | NewFileReceiveThread.");
ObjectInputStream ois = new ObjectInputStream(socketChannel.socket()
.getInputStream());
Global.Log("Got OIS | NewFileReceiveThread.");
filename = (String) ois.readObject();
Global.Log("filename = " + filename + " | NewFileReceiveThread.");
}
private void readFromSocket(SocketChannel socketChannel, String filename)
throws IOException
{
Global.Log("Receiving file " + filename + " | NewFileReceiveThread.");
RandomAccessFile file = null;
file = new RandomAccessFile(destFolder + filename, "rw");
ByteBuffer buffer = ByteBuffer.allocate(1024);
FileChannel fileChannel = file.getChannel();
while (socketChannel.read(buffer) > 0)
{
buffer.flip();
fileChannel.write(buffer);
buffer.clear();
}
Global.Log("File receive complete| NewFileReceiveThread.");
fileChannel.close();
socketChannel.close();
file.close();
}
private void writeError()
{
synchronized (msgBox)
{
msgBox.append("\n" + filename + " transfer failed.");
}
return;
}
}
|
SCSLaboratory/BearOS
|
usr/include/sbin/libnfs/libnfs-raw-rquota.h
|
/*
* Please do not edit this file.
* It was generated using rpcgen.
*/
#ifndef _RQUOTA_H_RPCGEN
#define _RQUOTA_H_RPCGEN
#include <nfsc/libnfs-zdr.h>
#ifdef __cplusplus
extern "C" {
#endif
#define RQUOTAPATHLEN 1024
enum rquotastat {
RQUOTA_OK = 1,
RQUOTA_NOQUOTA = 2,
RQUOTA_EPERM = 3,
};
typedef enum rquotastat rquotastat;
typedef char *exportpath;
struct GETQUOTA1args {
exportpath export;
int uid;
};
typedef struct GETQUOTA1args GETQUOTA1args;
enum quotatype {
RQUOTA_TYPE_UID = 0,
RQUOTA_TYPE_GID = 1,
};
typedef enum quotatype quotatype;
struct GETQUOTA2args {
exportpath export;
quotatype type;
int uid;
};
typedef struct GETQUOTA2args GETQUOTA2args;
struct GETQUOTA1res_ok {
int bsize;
int active;
int bhardlimit;
int bsoftlimit;
int curblocks;
int fhardlimit;
int fsoftlimit;
int curfiles;
int btimeleft;
int ftimeleft;
};
typedef struct GETQUOTA1res_ok GETQUOTA1res_ok;
struct GETQUOTA1res {
rquotastat status;
union {
GETQUOTA1res_ok quota;
} GETQUOTA1res_u;
};
typedef struct GETQUOTA1res GETQUOTA1res;
#define RQUOTA_PROGRAM 100011
#define RQUOTA_V1 1
#if defined(__STDC__) || defined(__cplusplus)
#define RQUOTA1_NULL 0
extern void * rquota1_null_1(void *, CLIENT *);
extern void * rquota1_null_1_svc(void *, struct svc_req *);
#define RQUOTA1_GETQUOTA 1
extern GETQUOTA1res * rquota1_getquota_1(GETQUOTA1args *, CLIENT *);
extern GETQUOTA1res * rquota1_getquota_1_svc(GETQUOTA1args *, struct svc_req *);
#define RQUOTA1_GETACTIVEQUOTA 2
extern GETQUOTA1res * rquota1_getactivequota_1(GETQUOTA1args *, CLIENT *);
extern GETQUOTA1res * rquota1_getactivequota_1_svc(GETQUOTA1args *, struct svc_req *);
extern int rquota_program_1_freeresult (SVCXPRT *, zdrproc_t, caddr_t);
#else /* K&R C */
#define RQUOTA1_NULL 0
extern void * rquota1_null_1();
extern void * rquota1_null_1_svc();
#define RQUOTA1_GETQUOTA 1
extern GETQUOTA1res * rquota1_getquota_1();
extern GETQUOTA1res * rquota1_getquota_1_svc();
#define RQUOTA1_GETACTIVEQUOTA 2
extern GETQUOTA1res * rquota1_getactivequota_1();
extern GETQUOTA1res * rquota1_getactivequota_1_svc();
extern int rquota_program_1_freeresult ();
#endif /* K&R C */
#define RQUOTA_V2 2
#if defined(__STDC__) || defined(__cplusplus)
#define RQUOTA2_NULL 0
extern void * rquota2_null_2(void *, CLIENT *);
extern void * rquota2_null_2_svc(void *, struct svc_req *);
#define RQUOTA2_GETQUOTA 1
extern GETQUOTA1res * rquota2_getquota_2(GETQUOTA2args *, CLIENT *);
extern GETQUOTA1res * rquota2_getquota_2_svc(GETQUOTA2args *, struct svc_req *);
#define RQUOTA2_GETACTIVEQUOTA 2
extern GETQUOTA1res * rquota2_getactivequota_2(GETQUOTA2args *, CLIENT *);
extern GETQUOTA1res * rquota2_getactivequota_2_svc(GETQUOTA2args *, struct svc_req *);
extern int rquota_program_2_freeresult (SVCXPRT *, zdrproc_t, caddr_t);
#else /* K&R C */
#define RQUOTA2_NULL 0
extern void * rquota2_null_2();
extern void * rquota2_null_2_svc();
#define RQUOTA2_GETQUOTA 1
extern GETQUOTA1res * rquota2_getquota_2();
extern GETQUOTA1res * rquota2_getquota_2_svc();
#define RQUOTA2_GETACTIVEQUOTA 2
extern GETQUOTA1res * rquota2_getactivequota_2();
extern GETQUOTA1res * rquota2_getactivequota_2_svc();
extern int rquota_program_2_freeresult ();
#endif /* K&R C */
/* the zdr functions */
#if defined(__STDC__) || defined(__cplusplus)
extern bool_t zdr_rquotastat (ZDR *, rquotastat*);
extern bool_t zdr_exportpath (ZDR *, exportpath*);
extern bool_t zdr_GETQUOTA1args (ZDR *, GETQUOTA1args*);
extern bool_t zdr_quotatype (ZDR *, quotatype*);
extern bool_t zdr_GETQUOTA2args (ZDR *, GETQUOTA2args*);
extern bool_t zdr_GETQUOTA1res_ok (ZDR *, GETQUOTA1res_ok*);
extern bool_t zdr_GETQUOTA1res (ZDR *, GETQUOTA1res*);
#else /* K&R C */
extern bool_t zdr_rquotastat ();
extern bool_t zdr_exportpath ();
extern bool_t zdr_GETQUOTA1args ();
extern bool_t zdr_quotatype ();
extern bool_t zdr_GETQUOTA2args ();
extern bool_t zdr_GETQUOTA1res_ok ();
extern bool_t zdr_GETQUOTA1res ();
#endif /* K&R C */
#ifdef __cplusplus
}
#endif
#endif /* !_RQUOTA_H_RPCGEN */
|
LKG/lore
|
lore-user/user-core/src/main/java/im/heart/usercore/service/impl/FrameUserConnectServiceImpl.java
|
package im.heart.usercore.service.impl;
import com.google.common.collect.Sets;
import im.heart.core.plugins.persistence.DynamicSpecifications;
import im.heart.core.plugins.persistence.SearchFilter;
import im.heart.core.service.impl.CommonServiceImpl;
import im.heart.usercore.entity.FrameUserConnect;
import im.heart.usercore.repository.FrameUserConnectRepository;
import im.heart.usercore.service.FrameUserConnectService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.domain.Specification;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import java.math.BigInteger;
import java.util.Collection;
import java.util.List;
import java.util.Optional;
/**
*
* @author gg
* @Desc : 用户账号绑定关联表 Service
*/
@Service(value = FrameUserConnectService.BEAN_NAME)
@Transactional(propagation = Propagation.SUPPORTS,rollbackFor = Exception.class)
public class FrameUserConnectServiceImpl extends CommonServiceImpl<FrameUserConnect, BigInteger> implements FrameUserConnectService {
@Autowired
private FrameUserConnectRepository frameUserConnectRepository;
@Override
public Optional<FrameUserConnect> findByOpenIdAndType(String openId, String identityType) {
final Collection<SearchFilter> filters = Sets.newHashSet();
filters.add(new SearchFilter("openId", SearchFilter.Operator.EQ, openId));
filters.add(new SearchFilter("identityType", SearchFilter.Operator.EQ, identityType));
Specification<FrameUserConnect> spec = DynamicSpecifications.bySearchFilter(filters, FrameUserConnect.class);
Optional<FrameUserConnect> optional=this.frameUserConnectRepository.findOne(spec);
return optional;
}
@Override
public Page<FrameUserConnect> findAllByUserIdAndType(BigInteger userId, String identityType, Pageable pageable) {
final Collection<SearchFilter> filters = Sets.newHashSet();
filters.add(new SearchFilter("userId", SearchFilter.Operator.EQ, userId));
filters.add(new SearchFilter("identityType", SearchFilter.Operator.EQ, identityType));
Specification<FrameUserConnect> spec = DynamicSpecifications.bySearchFilter(filters, FrameUserConnect.class);
return this.frameUserConnectRepository.findAll(spec,pageable);
}
@Override
public List<FrameUserConnect> findAllByUserIdAndType(BigInteger userId, String identityType) {
final Collection<SearchFilter> filters = Sets.newHashSet();
filters.add(new SearchFilter("userId", SearchFilter.Operator.EQ, userId));
filters.add(new SearchFilter("identityType", SearchFilter.Operator.EQ, identityType));
Specification<FrameUserConnect> spec = DynamicSpecifications.bySearchFilter(filters, FrameUserConnect.class);
return this.frameUserConnectRepository.findAll(spec);
}
}
|
HugoGGuerrier/mini-ML4J
|
src/mml4j/main/typist/equation_system/nodes/ListNode.java
|
package mml4j.main.typist.equation_system.nodes;
import mml4j.main.typist.equation_system.merging_strategy.ConstructorStrategy;
import mml4j.main.typist.equation_system.nodes.abstracts.ConstructorNode;
import mml4j.main.typist.equation_system.nodes.abstracts.Node;
import mml4j.main.typist.interfaces.INodeContained;
import mml4j.main.typist.interfaces.INodeGenerator;
import mml4j.main.typist.utils.TypeTranslator;
import mml4j.main.typist.types.abstracts.Type;
import mml4j.main.typist.interfaces.INodeContainer;
import mml4j.main.typist.utils.Instantiater;
import java.util.LinkedList;
import java.util.List;
/**
* This class represent a list type before its unification
*
* @author <NAME>
*/
public class ListNode extends ConstructorNode implements INodeContainer {
// ----- Attributes -----
/** The type inside the list */
protected Node listType;
// ----- Constructors -----
/**
* Create a new list type with the wanted content type
*
* @param type The type that is inside the list
*/
public ListNode(Node type) {
super(new ConstructorStrategy());
this.listType = type;
type.addContainer(this);
}
// ----- Getters -----
public Node getListType() {
return listType;
}
/** @see ConstructorNode#getContent() */
@Override
public List<Node> getContent() {
List<Node> res = new LinkedList<>();
res.add(listType);
return res;
}
// ----- Setters -----
public void setListType(Node listType) {
listType.addContainer(this);
if(this.listType != null) this.listType.removeContainer(this);
this.listType = listType;
}
// ----- Container methods -----
/** @see INodeContainer#replaceContained(INodeContained, INodeContained) */
@Override
public void replaceContained(INodeContained oldCont, INodeContained newCont) {
if(listType == oldCont) setListType((Node) newCont);
}
// ----- Override methods -----
@Override
public String toString() {
return "[" + listType + "]";
}
// ----- Class methods -----
/** @see Node#isConstructor() */
@Override
public boolean isConstructor() {
return true;
}
/** @see Node#contains(Node) */
@Override
public boolean contains(Node other) {
return listType.contains(other);
}
/** @see Node#clone(INodeGenerator) */
@Override
public Node clone(INodeGenerator generator) {
if(generator.hasCorrespondence(this)) return this;
return new ListNode(listType.clone(generator));
}
/** @see Node#acceptTranslator(TypeTranslator) */
@Override
public Type acceptTranslator(TypeTranslator translator) {
return translator.translate(this);
}
/** @see Node#acceptInstantiater(Instantiater) */
@Override
public Node acceptInstantiater(Instantiater instantiater) {
return instantiater.instantiate(this);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.