repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
BryanRiel/pyre
tests/pyre/calc/expression_resolution.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # <NAME>. aïvázis # orthologue # (c) 1998-2018 all rights reserved # """ Verify that node resolution works """ def test(): import pyre.calc # set up the model model = pyre.calc.model() # set up an expression with an unresolved node model["price"] = model.expression(value="2*{production}") # ask for the price try: model["price"] assert False except model.UnresolvedNodeError as error: unresolved = model.retrieve(name="production") assert error.node is unresolved assert error.name == "production" # resolve the node p = 80. model["production"] = p # ask for the price again assert model["production"] == p assert model["price"] == 2*p # make a change p = 100. model["production"] = p # chek again assert model["production"] == p assert model["price"] == 2*p # force a node substitution m = 60 model["materials"] = m model["production"] = model.expression(value="2*{materials}") # chek again assert model["materials"] == m assert model["production"] == 2*m assert model["price"] == 4*m return # main if __name__ == "__main__": # skip pyre initialization since we don't rely on the executive pyre_noboot = True # run the test test() # end of file
sifbuilder/rxmods
src/mods/rxTodos/package.js
<gh_stars>1-10 const mod = { "name": "rxtodos", "description": "redux todos component built as composable module. Based on Todos example included in https://github.com/tj/frontend-boilerplate ", "authos": "sibuilder based on work by <NAME>, <NAME>", "license": "MIT", "version": "0.1.0" } export default mod
Gorac11/OOPProject
h2/src/main/org/h2/expression/ConditionIn.java
<reponame>Gorac11/OOPProject<filename>h2/src/main/org/h2/expression/ConditionIn.java<gh_stars>0 /* * Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License, * Version 1.0, and under the Eclipse Public License, Version 1.0 * (http://h2database.com/html/license.html). * Initial Developer: H2 Group */ package org.h2.expression; import java.util.ArrayList; import org.h2.engine.Database; import org.h2.engine.Session; import org.h2.index.IndexCondition; import org.h2.table.ColumnResolver; import org.h2.table.TableFilter; import org.h2.util.StatementBuilder; import org.h2.value.Value; import org.h2.value.ValueBoolean; import org.h2.value.ValueNull; /** * An 'in' condition with a list of values, as in WHERE NAME IN(...) */ public class ConditionIn extends Condition { private final Database database; private Expression left; private final ArrayList<Expression> valueList; private int queryLevel; /** * Create a new IN(..) condition. * * @param database the database * @param left the expression before IN * @param values the value list (at least one element) */ public ConditionIn(Database database, Expression left, ArrayList<Expression> values) { this.database = database; this.left = left; this.valueList = values; } @Override public Value getValue(Session session) { Value l = left.getValue(session); if (l == ValueNull.INSTANCE) { return l; } boolean result = false; boolean hasNull = false; for (Expression e : valueList) { Value r = e.getValue(session); if (r == ValueNull.INSTANCE) { hasNull = true; } else { r = r.convertTo(l.getType()); result = Comparison.compareNotNull(database, l, r, Comparison.EQUAL); if (result) { break; } } } if (!result && hasNull) { return ValueNull.INSTANCE; } return ValueBoolean.get(result); } @Override public void mapColumns(ColumnResolver resolver, int level) { left.mapColumns(resolver, level); for (Expression e : valueList) { e.mapColumns(resolver, level); } this.queryLevel = Math.max(level, this.queryLevel); } @Override public Expression optimize(Session session) { left = left.optimize(session); boolean constant = left.isConstant(); if (constant && left == ValueExpression.getNull()) { return left; } boolean allValuesConstant = true; boolean allValuesNull = true; int size = valueList.size(); for (int i = 0; i < size; i++) { Expression e = valueList.get(i); e = e.optimize(session); if (e.isConstant() && e.getValue(session) != ValueNull.INSTANCE) { allValuesNull = false; } if (allValuesConstant && !e.isConstant()) { allValuesConstant = false; } valueList.set(i, e); } if (constant && allValuesConstant) { return ValueExpression.get(getValue(session)); } if (size == 1) { Expression right = valueList.get(0); Expression expr = new Comparison(session, Comparison.EQUAL, left, right); expr = expr.optimize(session); return expr; } if (allValuesConstant && !allValuesNull) { int leftType = left.getType(); if (leftType == Value.UNKNOWN) { return this; } Expression expr = new ConditionInConstantSet(session, left, valueList); expr = expr.optimize(session); return expr; } return this; } @Override public void createIndexConditions(Session session, TableFilter filter) { if (!(left instanceof ExpressionColumn)) { return; } ExpressionColumn l = (ExpressionColumn) left; if (filter != l.getTableFilter()) { return; } if (session.getDatabase().getSettings().optimizeInList) { ExpressionVisitor visitor = ExpressionVisitor.getNotFromResolverVisitor(filter); for (Expression e : valueList) { if (!e.isEverything(visitor)) { return; } } filter.addIndexCondition(IndexCondition.getInList(l, valueList)); return; } } @Override public void setEvaluatable(TableFilter tableFilter, boolean b) { left.setEvaluatable(tableFilter, b); for (Expression e : valueList) { e.setEvaluatable(tableFilter, b); } } @Override public String getSQL() { StatementBuilder buff = new StatementBuilder("("); buff.append(left.getSQL()).append(" IN("); for (Expression e : valueList) { buff.appendExceptFirst(", "); buff.append(e.getSQL()); } return buff.append("))").toString(); } @Override public void updateAggregate(Session session) { left.updateAggregate(session); for (Expression e : valueList) { e.updateAggregate(session); } } @Override public boolean isEverything(ExpressionVisitor visitor) { if (!left.isEverything(visitor)) { return false; } return areAllValues(visitor); } private boolean areAllValues(ExpressionVisitor visitor) { for (Expression e : valueList) { if (!e.isEverything(visitor)) { return false; } } return true; } @Override public int getCost() { int cost = left.getCost(); for (Expression e : valueList) { cost += e.getCost(); } return cost; } /** * Add an additional element if possible. Example: given two conditions * A IN(1, 2) OR A=3, the constant 3 is added: A IN(1, 2, 3). * * @param other the second condition * @return null if the condition was not added, or the new condition */ Expression getAdditional(Comparison other) { Expression add = other.getIfEquals(left); if (add != null) { valueList.add(add); return this; } return null; } }
abu-bakar-nu/nautilus
include/dev/virtqueue.h
<filename>include/dev/virtqueue.h #ifndef VIRTQUEUE_H #define VIRTQUEUE_H /* An interface for efficient virtio implementation. * * This header is BSD licensed so anyone can use the definitions * to implement compatible drivers/servers. * * Copyright 2007, 2009, IBM Corporation * Copyright 2011, Red Hat, Inc * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of IBM nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ‘‘AS IS’’ AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL IBM OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. */ #include <stdint.h> typedef uint64_t le64; typedef uint32_t le32; typedef uint16_t le16; typedef uint8_t le8; /* This marks a buffer as continuing via the next field. */ #define VIRTQ_DESC_F_NEXT 1 /* This marks a buffer as write-only (otherwise read-only). */ #define VIRTQ_DESC_F_WRITE 2 /* This means the buffer contains a list of buffer descriptors. */ #define VIRTQ_DESC_F_INDIRECT 4 /* The device uses this in used->flags to advise the driver: don’t kick me * when you add a buffer. It’s unreliable, so it’s simply an * optimization. */ #define VIRTQ_USED_F_NO_NOTIFY 1 /* The driver uses this in avail->flags to advise the device: don’t * interrupt me when you consume a buffer. It’s unreliable, so it’s * simply an optimization. */ #define VIRTQ_AVAIL_F_NO_INTERRUPT 1 /* Device-independent feature bits. */ /* Force the device to interrupt if a virtqueue becomes empty (legacy). */ #define VIRTIO_F_NOTIFY_ON_EMPTY 24 /* Arbitrary descriptor layouts (legacy). */ #define VIRTIO_F_ANY_LAYOUT 27 /* Support for indirect descriptors */ #define VIRTIO_F_INDIRECT_DESC 28 /* Support for avail_event and used_event fields */ #define VIRTIO_F_EVENT_IDX 29 /* Compliant with standard (non-legacy) interface. */ #define VIRTIO_F_VERSION_1 32 /* Virtqueue descriptors: 16 bytes. * These can chain together via "next". */ struct virtq_desc { /* Address (guest-physical). */ le64 addr; /* Length. */ le32 len; /* The flags as indicated above. */ le16 flags; /* We chain unused descriptors via this, too */ le16 next; }; struct virtq_avail { le16 flags; le16 idx; le16 ring[]; /* Only if VIRTIO_F_EVENT_IDX: le16 used_event; */ }; /* le32 is used here for ids for padding reasons. */ struct virtq_used_elem { /* Index of start of used descriptor chain. */ le32 id; /* Total length of the descriptor chain which was written to. */ le32 len; }; struct virtq_used { le16 flags; le16 idx; struct virtq_used_elem ring[]; /* Only if VIRTIO_F_EVENT_IDX: le16 avail_event; */ }; struct virtq { uint16_t qsz; struct virtq_desc *desc; struct virtq_avail *avail; struct virtq_used *used; }; static inline int virtq_need_event(uint16_t event_idx, uint16_t new_idx, uint16_t old_idx) { return (uint16_t)(new_idx - event_idx - 1) < (uint16_t)(new_idx - old_idx); } /* Get location of event indices (only with VIRTIO_F_EVENT_IDX) */ static inline le16 *virtq_used_event(struct virtq *vq) { /* For backwards compat, used event index is at *end* of avail ring. */ return &vq->avail->ring[vq->qsz]; } static inline le16 *virtq_avail_event(struct virtq *vq) { /* For backwards compat, avail event index is at *end* of used ring. */ return (le16 *)&vq->used->ring[vq->qsz]; } #endif /* VIRTQUEUE_H */
yoshiekiura/algopeatio
vendor/bundle/ruby/2.2.0/gems/rails-i18n-4.0.1/rails/pluralization/sl.rb
<filename>vendor/bundle/ruby/2.2.0/gems/rails-i18n-4.0.1/rails/pluralization/sl.rb module RailsI18n module Pluralization module Slovenian def self.rule lambda do |n| mod100 = n % 100 if mod100 == 1 :one elsif mod100 == 2 :two elsif mod100 == 3 || mod100 == 4 :few else :other end end end end end end { :sl => { :'i18n' => { :plural => { :keys => [:one, :two, :few, :other], :rule => RailsI18n::Pluralization::Slovenian.rule }}}}
49View/event_horizon
native/core/math/quad_vertices.cpp
#include "quad_vertices.h" QuadVertices3::QuadVertices3( const Rect2f& r, const AABB& projection ) { int lda = projection.leastDominantAxis(); switch ( lda ) { case 0: mVertices[0] = { 0.0f, r.bottomRight() }; mVertices[1] = { projection.calcWidth(), r.topRight() }; mVertices[2] = { 0.0f, r.bottomLeft() }; mVertices[3] = { projection.calcWidth(), r.topLeft() }; break; case 1: mVertices[0] = { r.bottomRight().x(), 0.0f, r.bottomRight().y() }; mVertices[1] = { r.topRight().x(), projection.calcHeight(), r.topRight().y() }; mVertices[2] = { r.bottomLeft().x(), 0.0f, r.bottomLeft().y() }; mVertices[3] = { r.topLeft().x(), projection.calcHeight(), r.topLeft().y() }; break; case 2: mVertices[0] = { r.bottomRight(), 0.0f }; mVertices[1] = { r.topRight() ,projection.calcDepth() }; mVertices[2] = { r.bottomLeft() , 0.0f }; mVertices[3] = { r.topLeft() ,projection.calcDepth() }; break; default: break; } for ( auto t = 0; t < 4; t++ ) { mVertices[t] += projection.minPoint(); } } const QuadVertices4 QuadVertices4::QUAD_VERTICES( Vector4f( -1.0f, 1.0f, 0.0f, 1.0f ), Vector4f( 1.0f, 1.0f, 0.0f, 1.0f ), Vector4f( -1.0f, -1.0f, 0.0f, 1.0f ), Vector4f( 1.0f, -1.0f, 0.0f, 1.0f ) ); const QuadVertices3 QuadVertices3::QUAD_VERTICES_X( Vector3f( 0.0f, -1.0f, 1.0f ), Vector3f( 0.0f, 1.0f, 1.0f ), Vector3f( 0.0f, -1.0f, -1.0f ), Vector3f( 0.0f, 1.0f, -1.0f ) ); const QuadVertices3 QuadVertices3::QUAD_VERTICES_Y( Vector3f( -1.0f, 0.0f, 1.0f ), Vector3f( 1.0f, 0.0f, 1.0f ), Vector3f( -1.0f, 0.0f, -1.0f ), Vector3f( 1.0f, 0.0f, -1.0f ) ); const QuadVertices3 QuadVertices3::QUAD_VERTICES_Z( Vector3f( -1.0f, 1.0f, 0.0f ), Vector3f( 1.0f, 1.0f, 0.0f ), Vector3f( -1.0f, -1.0f, 0.0f ), Vector3f( 1.0f, -1.0f, 0.0f ) ); // Texture coorrds const QuadVertices2 QuadVertices2::QUAD_TEX_COORDS_CENTERED( Vector2f( -.5f, -.5f ), Vector2f( .5f, -.5f ), Vector2f( -.5f, .5f ), Vector2f( .5f, .5f ) ); const QuadVertices2 QuadVertices2::QUAD_TEX_COORDS( Vector2f( 0.0f, 0.0f ), Vector2f( 1.0f, 0.0f ), Vector2f( 0.0f, 1.0f ), Vector2f( 1.0f, 1.0f ) ); const QuadVertices2 QuadVertices2::QUAD_INV_TEX_COORDS( Vector2f( 0.0f, 1.0f ), Vector2f( 1.0f, 1.0f ), Vector2f( 0.0f, 0.0f ), Vector2f( 1.0f, 0.0f ) ); const QuadVertices2 QuadVertices2::QUAD_TEX_COORDS_MIRROR( Vector2f( 1.0f, 0.0f ), Vector2f( 0.0f, 0.0f ), Vector2f( 1.0f, 1.0f ), Vector2f( 0.0f, 1.0f ) ); const QuadVertices2 QuadVertices2::QUAD_INV_TEX_COORDS_MIRROR( Vector2f( 1.0f, 1.0f ), Vector2f( 0.0f, 1.0f ), Vector2f( 1.0f, 0.0f ), Vector2f( 0.0f, 0.0f ) ); const QuadVertices2 QuadVertices2::QUAD_TEX_STRIP_COORDS( Vector2f( 1.0f, 0.0f ), Vector2f( 1.0f, 1.0f ), Vector2f( 0.0f, 0.0f ), Vector2f( 0.0f, 1.0f ) ); const QuadVertices2 QuadVertices2::QUAD_TEX_STRIP_INV_Y_COORDS( Vector2f( 1.0f, 1.0f ), Vector2f( 1.0f, 0.0f ), Vector2f( 0.0f, 1.0f ), Vector2f( 0.0f, 0.0f ) ); const QuadVertices2 QuadVertices2::QUAD_TEX_STRIP_INV_COORDS( Vector2f( 0.0f, 0.0f ), Vector2f( 1.0f, 0.0f ), Vector2f( 0.0f, 1.0f ), Vector2f( 1.0f, 1.0f ) );
740326093/-
YBL365/Class/Profile/Section1/b_Order/b_OrderDetail/View/YBLOrderDetailTotalMoenyCell.h
// // YBLOrderDetailTotalMoenyCell.h // YC168 // // Created by 乔同新 on 2017/3/16. // Copyright © 2017年 乔同新. All rights reserved. // #import <UIKit/UIKit.h> @interface YBLOrderDetailTotalMoenyCell : UITableViewCell @property (nonatomic, strong) UILabel *totalMoneyLabel; @property (nonatomic, strong) UILabel *yunfeiLabel; + (CGFloat)getHi; @end
justice-code/star-map
extension/src/main/java/org/eddy/extension/ExtensionConfig.java
package org.eddy.extension; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.stereotype.Component; import java.util.HashMap; import java.util.Map; import java.util.Objects; /** * 扩展点获取 */ @Component @ConfigurationProperties("star") public class ExtensionConfig { private Map<String, String> keyName = new HashMap<>(); public void setProtocolFactory(String protocolFactory) { keyName.put("protocolFactory", protocolFactory); } public void setLoadBalance(String loadBalance) { keyName.put("loadBalance", loadBalance); } public void setRegistry(String registry) { keyName.put("registry", registry); } public void setEngine(String engine) { keyName.put("engine", engine); } public void setSelector(String selector) { keyName.put("selector", selector); } public String name(String key) { Objects.requireNonNull(key); if (! keyName.containsKey(key)) { throw new RuntimeException("do not find key:" + key + ", please allocation it first"); } return keyName.get(key); } public boolean contain(String key) { Objects.requireNonNull(key); return keyName.containsKey(key); } }
pulumi/pulumi-akamai
sdk/python/pulumi_akamai/app_sec_waf_mode.py
<gh_stars>1-10 # coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from . import _utilities __all__ = ['AppSecWafModeArgs', 'AppSecWafMode'] @pulumi.input_type class AppSecWafModeArgs: def __init__(__self__, *, config_id: pulumi.Input[int], mode: pulumi.Input[str], security_policy_id: pulumi.Input[str]): """ The set of arguments for constructing a AppSecWafMode resource. :param pulumi.Input[int] config_id: . Unique identifier of the security configuration associated with the WAF mode settings being modified. :param pulumi.Input[str] mode: . Specifies how Kona Rule Set rules are upgraded. Allowed values are: :param pulumi.Input[str] security_policy_id: . Unique identifier of the security policy associated with the WAF mode settings being modified. """ pulumi.set(__self__, "config_id", config_id) pulumi.set(__self__, "mode", mode) pulumi.set(__self__, "security_policy_id", security_policy_id) @property @pulumi.getter(name="configId") def config_id(self) -> pulumi.Input[int]: """ . Unique identifier of the security configuration associated with the WAF mode settings being modified. """ return pulumi.get(self, "config_id") @config_id.setter def config_id(self, value: pulumi.Input[int]): pulumi.set(self, "config_id", value) @property @pulumi.getter def mode(self) -> pulumi.Input[str]: """ . Specifies how Kona Rule Set rules are upgraded. Allowed values are: """ return pulumi.get(self, "mode") @mode.setter def mode(self, value: pulumi.Input[str]): pulumi.set(self, "mode", value) @property @pulumi.getter(name="securityPolicyId") def security_policy_id(self) -> pulumi.Input[str]: """ . Unique identifier of the security policy associated with the WAF mode settings being modified. """ return pulumi.get(self, "security_policy_id") @security_policy_id.setter def security_policy_id(self, value: pulumi.Input[str]): pulumi.set(self, "security_policy_id", value) @pulumi.input_type class _AppSecWafModeState: def __init__(__self__, *, config_id: Optional[pulumi.Input[int]] = None, current_ruleset: Optional[pulumi.Input[str]] = None, eval_expiration_date: Optional[pulumi.Input[str]] = None, eval_ruleset: Optional[pulumi.Input[str]] = None, eval_status: Optional[pulumi.Input[str]] = None, mode: Optional[pulumi.Input[str]] = None, output_text: Optional[pulumi.Input[str]] = None, security_policy_id: Optional[pulumi.Input[str]] = None): """ Input properties used for looking up and filtering AppSecWafMode resources. :param pulumi.Input[int] config_id: . Unique identifier of the security configuration associated with the WAF mode settings being modified. :param pulumi.Input[str] mode: . Specifies how Kona Rule Set rules are upgraded. Allowed values are: :param pulumi.Input[str] output_text: Text Export representation :param pulumi.Input[str] security_policy_id: . Unique identifier of the security policy associated with the WAF mode settings being modified. """ if config_id is not None: pulumi.set(__self__, "config_id", config_id) if current_ruleset is not None: pulumi.set(__self__, "current_ruleset", current_ruleset) if eval_expiration_date is not None: pulumi.set(__self__, "eval_expiration_date", eval_expiration_date) if eval_ruleset is not None: pulumi.set(__self__, "eval_ruleset", eval_ruleset) if eval_status is not None: pulumi.set(__self__, "eval_status", eval_status) if mode is not None: pulumi.set(__self__, "mode", mode) if output_text is not None: pulumi.set(__self__, "output_text", output_text) if security_policy_id is not None: pulumi.set(__self__, "security_policy_id", security_policy_id) @property @pulumi.getter(name="configId") def config_id(self) -> Optional[pulumi.Input[int]]: """ . Unique identifier of the security configuration associated with the WAF mode settings being modified. """ return pulumi.get(self, "config_id") @config_id.setter def config_id(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "config_id", value) @property @pulumi.getter(name="currentRuleset") def current_ruleset(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "current_ruleset") @current_ruleset.setter def current_ruleset(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "current_ruleset", value) @property @pulumi.getter(name="evalExpirationDate") def eval_expiration_date(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "eval_expiration_date") @eval_expiration_date.setter def eval_expiration_date(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "eval_expiration_date", value) @property @pulumi.getter(name="evalRuleset") def eval_ruleset(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "eval_ruleset") @eval_ruleset.setter def eval_ruleset(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "eval_ruleset", value) @property @pulumi.getter(name="evalStatus") def eval_status(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "eval_status") @eval_status.setter def eval_status(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "eval_status", value) @property @pulumi.getter def mode(self) -> Optional[pulumi.Input[str]]: """ . Specifies how Kona Rule Set rules are upgraded. Allowed values are: """ return pulumi.get(self, "mode") @mode.setter def mode(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "mode", value) @property @pulumi.getter(name="outputText") def output_text(self) -> Optional[pulumi.Input[str]]: """ Text Export representation """ return pulumi.get(self, "output_text") @output_text.setter def output_text(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "output_text", value) @property @pulumi.getter(name="securityPolicyId") def security_policy_id(self) -> Optional[pulumi.Input[str]]: """ . Unique identifier of the security policy associated with the WAF mode settings being modified. """ return pulumi.get(self, "security_policy_id") @security_policy_id.setter def security_policy_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "security_policy_id", value) class AppSecWafMode(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, config_id: Optional[pulumi.Input[int]] = None, mode: Optional[pulumi.Input[str]] = None, security_policy_id: Optional[pulumi.Input[str]] = None, __props__=None): """ **Scopes**: Security policy Modifies the way your Kona Rule Set rules are updated. Use **KRS** mode to update the rule sets manually or **AAG** to have those rule sets automatically updated. **Related API Endpoint**: [/appsec/v1/configs/{configId}/versions/{versionNumber}/security-policies/{policyId}/mode](https://developer.akamai.com/api/cloud_security/application_security/v1.html#putmode) ## Example Usage Basic usage: ```python import pulumi import pulumi_akamai as akamai configuration = akamai.get_app_sec_configuration(name="Documentation") waf_mode = akamai.AppSecWafMode("wafMode", config_id=configuration.config_id, security_policy_id="gms1_134637", mode="KRS") pulumi.export("wafModeMode", waf_mode.mode) pulumi.export("wafModeCurrentRuleset", waf_mode.current_ruleset) pulumi.export("wafModeEvalStatus", waf_mode.eval_status) pulumi.export("wafModeEvalRuleset", waf_mode.eval_ruleset) pulumi.export("wafModeEvalExpirationDate", waf_mode.eval_expiration_date) ``` ## Output Options The following options can be used to determine the information returned, and how that returned information is formatted: - `current_ruleset` – Versioning information for the current Kona Rule Set. - `eval_ruleset`. Versioning information for the Kona Rule Set being evaluated (if applicable) . - `eval_status`. Returns **enabled** if an evaluation is currently in progress; otherwise returns **disabled**. - `eval_expiration_date`. Date on which the evaluation period ends (if applicable). - `output_text`. Tabular report showing the current rule set, WAF mode and evaluation status. :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[int] config_id: . Unique identifier of the security configuration associated with the WAF mode settings being modified. :param pulumi.Input[str] mode: . Specifies how Kona Rule Set rules are upgraded. Allowed values are: :param pulumi.Input[str] security_policy_id: . Unique identifier of the security policy associated with the WAF mode settings being modified. """ ... @overload def __init__(__self__, resource_name: str, args: AppSecWafModeArgs, opts: Optional[pulumi.ResourceOptions] = None): """ **Scopes**: Security policy Modifies the way your Kona Rule Set rules are updated. Use **KRS** mode to update the rule sets manually or **AAG** to have those rule sets automatically updated. **Related API Endpoint**: [/appsec/v1/configs/{configId}/versions/{versionNumber}/security-policies/{policyId}/mode](https://developer.akamai.com/api/cloud_security/application_security/v1.html#putmode) ## Example Usage Basic usage: ```python import pulumi import pulumi_akamai as akamai configuration = akamai.get_app_sec_configuration(name="Documentation") waf_mode = akamai.AppSecWafMode("wafMode", config_id=configuration.config_id, security_policy_id="gms1_134637", mode="KRS") pulumi.export("wafModeMode", waf_mode.mode) pulumi.export("wafModeCurrentRuleset", waf_mode.current_ruleset) pulumi.export("wafModeEvalStatus", waf_mode.eval_status) pulumi.export("wafModeEvalRuleset", waf_mode.eval_ruleset) pulumi.export("wafModeEvalExpirationDate", waf_mode.eval_expiration_date) ``` ## Output Options The following options can be used to determine the information returned, and how that returned information is formatted: - `current_ruleset` – Versioning information for the current Kona Rule Set. - `eval_ruleset`. Versioning information for the Kona Rule Set being evaluated (if applicable) . - `eval_status`. Returns **enabled** if an evaluation is currently in progress; otherwise returns **disabled**. - `eval_expiration_date`. Date on which the evaluation period ends (if applicable). - `output_text`. Tabular report showing the current rule set, WAF mode and evaluation status. :param str resource_name: The name of the resource. :param AppSecWafModeArgs args: The arguments to use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource. """ ... def __init__(__self__, resource_name: str, *args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(AppSecWafModeArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, config_id: Optional[pulumi.Input[int]] = None, mode: Optional[pulumi.Input[str]] = None, security_policy_id: Optional[pulumi.Input[str]] = None, __props__=None): if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id is None: if __props__ is not None: raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = AppSecWafModeArgs.__new__(AppSecWafModeArgs) if config_id is None and not opts.urn: raise TypeError("Missing required property 'config_id'") __props__.__dict__["config_id"] = config_id if mode is None and not opts.urn: raise TypeError("Missing required property 'mode'") __props__.__dict__["mode"] = mode if security_policy_id is None and not opts.urn: raise TypeError("Missing required property 'security_policy_id'") __props__.__dict__["security_policy_id"] = security_policy_id __props__.__dict__["current_ruleset"] = None __props__.__dict__["eval_expiration_date"] = None __props__.__dict__["eval_ruleset"] = None __props__.__dict__["eval_status"] = None __props__.__dict__["output_text"] = None super(AppSecWafMode, __self__).__init__( 'akamai:index/appSecWafMode:AppSecWafMode', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None, config_id: Optional[pulumi.Input[int]] = None, current_ruleset: Optional[pulumi.Input[str]] = None, eval_expiration_date: Optional[pulumi.Input[str]] = None, eval_ruleset: Optional[pulumi.Input[str]] = None, eval_status: Optional[pulumi.Input[str]] = None, mode: Optional[pulumi.Input[str]] = None, output_text: Optional[pulumi.Input[str]] = None, security_policy_id: Optional[pulumi.Input[str]] = None) -> 'AppSecWafMode': """ Get an existing AppSecWafMode resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[int] config_id: . Unique identifier of the security configuration associated with the WAF mode settings being modified. :param pulumi.Input[str] mode: . Specifies how Kona Rule Set rules are upgraded. Allowed values are: :param pulumi.Input[str] output_text: Text Export representation :param pulumi.Input[str] security_policy_id: . Unique identifier of the security policy associated with the WAF mode settings being modified. """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = _AppSecWafModeState.__new__(_AppSecWafModeState) __props__.__dict__["config_id"] = config_id __props__.__dict__["current_ruleset"] = current_ruleset __props__.__dict__["eval_expiration_date"] = eval_expiration_date __props__.__dict__["eval_ruleset"] = eval_ruleset __props__.__dict__["eval_status"] = eval_status __props__.__dict__["mode"] = mode __props__.__dict__["output_text"] = output_text __props__.__dict__["security_policy_id"] = security_policy_id return AppSecWafMode(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name="configId") def config_id(self) -> pulumi.Output[int]: """ . Unique identifier of the security configuration associated with the WAF mode settings being modified. """ return pulumi.get(self, "config_id") @property @pulumi.getter(name="currentRuleset") def current_ruleset(self) -> pulumi.Output[str]: return pulumi.get(self, "current_ruleset") @property @pulumi.getter(name="evalExpirationDate") def eval_expiration_date(self) -> pulumi.Output[str]: return pulumi.get(self, "eval_expiration_date") @property @pulumi.getter(name="evalRuleset") def eval_ruleset(self) -> pulumi.Output[str]: return pulumi.get(self, "eval_ruleset") @property @pulumi.getter(name="evalStatus") def eval_status(self) -> pulumi.Output[str]: return pulumi.get(self, "eval_status") @property @pulumi.getter def mode(self) -> pulumi.Output[str]: """ . Specifies how Kona Rule Set rules are upgraded. Allowed values are: """ return pulumi.get(self, "mode") @property @pulumi.getter(name="outputText") def output_text(self) -> pulumi.Output[str]: """ Text Export representation """ return pulumi.get(self, "output_text") @property @pulumi.getter(name="securityPolicyId") def security_policy_id(self) -> pulumi.Output[str]: """ . Unique identifier of the security policy associated with the WAF mode settings being modified. """ return pulumi.get(self, "security_policy_id")
lurch/python-gpiozero
docs/examples/robot_keyboard_1.py
<filename>docs/examples/robot_keyboard_1.py import curses from gpiozero import Robot robot = Robot(left=(4, 14), right=(17, 18)) actions = { curses.KEY_UP: robot.forward, curses.KEY_DOWN: robot.backward, curses.KEY_LEFT: robot.left, curses.KEY_RIGHT: robot.right, } def main(window): next_key = None while True: curses.halfdelay(1) if next_key is None: key = window.getch() else: key = next_key next_key = None if key != -1: # KEY DOWN curses.halfdelay(3) action = actions.get(key) if action is not None: action() next_key = key while next_key == key: next_key = window.getch() # KEY UP robot.stop() curses.wrapper(main)
uk-gov-mirror/guidance-guarantee-programme.telephone_appointment_planner
app/helpers/application_helper.rb
module ApplicationHelper def paginate(objects, options = {}) options.reverse_merge!(theme: 'twitter-bootstrap-3') super(objects, options) end def high_priority_activity_count Activity .high_priority_for(current_user) .unresolved .count end end
Watch-Later/recipes
tpc/bin/roundtrip_tcp.cc
<reponame>Watch-Later/recipes #include "InetAddress.h" #include "TcpStream.h" #include <limits> #include <memory> #include <unistd.h> #include <stdlib.h> #include <stdio.h> #include <time.h> #include <vector> int64_t kNanos = 1e9; int64_t clock_diff(struct timespec x, struct timespec y) { return (x.tv_sec - y.tv_sec) * kNanos + x.tv_nsec - y.tv_nsec; } struct timespec get_time() { struct timespec now; clock_gettime(CLOCK_MONOTONIC, &now); return now; } struct Sample { int index; int64_t start_nano; int64_t rtt_nano; }; std::vector<Sample> run(const char* host, int delay_ms, int length_s, int batch, int payload_size, bool silent) { const struct timespec start = get_time(); std::vector<Sample> rtts; InetAddress addr; if (!InetAddress::resolve(host, 3007, &addr)) { printf("Unable to resolve %s\n", host); return rtts; } // printf("connecting to %s\n", addr.toIpPort().c_str()); TcpStreamPtr stream(TcpStream::connect(addr)); if (!stream) { printf("Unable to connect %s\n", addr.toIpPort().c_str()); perror(""); return rtts; } std::unique_ptr<char[]> payload(new char[payload_size]); int64_t count = 0, sum_rtt = 0; int64_t min_rtt = std::numeric_limits<int64_t>::max(), max_rtt = 0; while (true) { const struct timespec batch_start = get_time(); double elapsed_s = (double)clock_diff(batch_start, start) / kNanos; if (elapsed_s >= length_s) { if (silent && count > 0) { printf("count %ld, avg rtt %.2fus, min %.2fus, max %.2fus\n", count, sum_rtt / 1e3 / count, min_rtt / 1e3, max_rtt / 1e3); } break; } for (int i = 0; i < batch; ++i) { const struct timespec before = get_time(); int nw = stream->sendAll(payload.get(), payload_size); if (nw != payload_size) return rtts; int nr = stream->receiveAll(payload.get(), payload_size); if (nr != payload_size) return rtts; const struct timespec after = get_time(); int64_t rtt = clock_diff(after, before); ++count; sum_rtt += rtt; if (rtt > max_rtt) max_rtt = rtt; if (rtt < min_rtt) min_rtt = rtt; Sample s = { .index = i, .rtt_nano = rtt, }; if (i == 0) s.start_nano = clock_diff(before, start); else s.start_nano = clock_diff(before, batch_start); if (!silent) rtts.push_back(s); } if (delay_ms > 0) { ::usleep(delay_ms * 1000); } } return rtts; } int main(int argc, char* argv[]) { int opt; int delay = 0, length = 3, batch = 4, payload = 1; bool silent = false; while ((opt = getopt(argc, argv, "b:d:l:p:s")) != -1) { switch (opt) { case 'b': batch = atoi(optarg); break; case 'd': delay = atoi(optarg); break; case 'l': length = atoi(optarg); break; case 'p': payload = atoi(optarg); break; case 's': silent = true; break; default: ; } } if (optind >= argc) { fprintf(stderr, "Usage:\nroundtrip_tcp [-b batch_size] [-d delay_ms] [-l length_in_seconds] echo_server_host\n"); return 1; } if (batch < 1) { batch = 1; } if (delay < 0) { delay = 0; } if (payload < 1) { payload = 1; } std::vector<Sample> rtts = run(argv[optind], delay, length, batch, payload, silent); if (!silent) { printf("index start rtt\n"); for (Sample s : rtts) { printf("%d %ld %ld\n", s.index, s.start_nano, s.rtt_nano); } } }
MDK-Packs/Nuvoton_M480BSP
SampleCode/StdDriver/HSUSBH_USBH_HID/main.c
/**************************************************************************//** * @file main.c * @version V1.00 * @brief Use USB Host core driver and HID driver. This sample demonstrates how * to submit HID class request and how to read data from interrupt pipe. * This sample supports dynamic device plug/un-plug and multiple HID devices. * * * @copyright (C) 2017 Nuvoton Technology Corp. All rights reserved. *****************************************************************************/ #include <stdio.h> #include <string.h> #include "NuMicro.h" #include "usbh_lib.h" #include "usbh_hid.h" // #define HAVE_INT_OUT #ifdef __ICCARM__ #pragma data_alignment=32 uint8_t g_buff_pool[1024]; #else uint8_t g_buff_pool[1024] __attribute__((aligned(32))); #endif HID_DEV_T *g_hid_list[CONFIG_HID_MAX_DEV]; extern int kbhit(void); /* function in retarget.c */ volatile uint32_t g_tick_cnt; volatile int int_cnt = 0; void SysTick_Handler(void) { g_tick_cnt++; } void enable_sys_tick(int ticks_per_second) { g_tick_cnt = 0; if (SysTick_Config(SystemCoreClock / ticks_per_second)) { /* Setup SysTick Timer for 1 second interrupts */ printf("Set system tick error!!\n"); while (1); } } uint32_t get_ticks() { return g_tick_cnt; } /* * This function is necessary for USB Host library. */ void delay_us(int usec) { /* * Configure Timer0, clock source from XTL_12M. Prescale 12 */ /* TIMER0 clock from HXT */ CLK->CLKSEL1 = (CLK->CLKSEL1 & (~CLK_CLKSEL1_TMR0SEL_Msk)) | CLK_CLKSEL1_TMR0SEL_HXT; CLK->APBCLK0 |= CLK_APBCLK0_TMR0CKEN_Msk; TIMER0->CTL = 0; /* disable timer */ TIMER0->INTSTS = (TIMER_INTSTS_TIF_Msk | TIMER_INTSTS_TWKF_Msk); /* write 1 to clear for safety */ TIMER0->CMP = usec; TIMER0->CTL = (11 << TIMER_CTL_PSC_Pos) | TIMER_ONESHOT_MODE | TIMER_CTL_CNTEN_Msk; while (!TIMER0->INTSTS); } void dump_buff_hex(uint8_t *pucBuff, int nBytes) { int nIdx, i; nIdx = 0; while (nBytes > 0) { printf("0x%04X ", nIdx); for (i = 0; (i < 16) && (nBytes > 0); i++) { printf("%02x ", pucBuff[nIdx + i]); nBytes--; } nIdx += 16; printf("\n"); } printf("\n"); } int is_a_new_hid_device(HID_DEV_T *hdev) { int i; for (i = 0; i < CONFIG_HID_MAX_DEV; i++) { if ((g_hid_list[i] != NULL) && (g_hid_list[i] == hdev) && (g_hid_list[i]->uid == hdev->uid)) return 0; } return 1; } void update_hid_device_list(HID_DEV_T *hdev) { int i = 0; memset(g_hid_list, 0, sizeof(g_hid_list)); while ((i < CONFIG_HID_MAX_DEV) && (hdev != NULL)) { g_hid_list[i++] = hdev; hdev = hdev->next; } } void int_read_callback(HID_DEV_T *hdev, uint16_t ep_addr, int status, uint8_t *rdata, uint32_t data_len) { /* * USB host HID driver notify user the transfer status via <status> parameter. If the * If <status> is 0, the USB transfer is fine. If <status> is not zero, this interrupt in * transfer failed and HID driver will stop this pipe. It can be caused by USB transfer error * or device disconnected. */ if (status < 0) { printf("Interrupt in transfer failed! status: %d\n", status); return; } printf("Device [0x%x,0x%x] ep 0x%x, %d bytes received =>\n", hdev->idVendor, hdev->idProduct, ep_addr, data_len); dump_buff_hex(rdata, data_len); int_cnt++; } #ifdef HAVE_INT_OUT void int_write_callback(HID_DEV_T *hdev, uint16_t ep_addr, int staus, uint8_t *wbuff, uint32_t *data_len) { int max_len = *data_len; printf("Device [0x%x,0x%x] ep 0x%x, ask user to fill data buffer and length.\n", hdev->idVendor, hdev->idProduct, ep_addr); memset(wbuff, 0, max_len); /* Fill data to be sent via interrupt out pipe */ *data_len = max_len; /* Tell HID driver transfer length of this time */ } #endif int init_hid_device(HID_DEV_T *hdev) { uint8_t *data_buff; int i, ret; data_buff = (uint8_t *)((uint32_t)g_buff_pool); printf("\n\n==================================\n"); printf(" Init HID device : 0x%x\n", (int)hdev); printf(" VID: 0x%x, PID: 0x%x\n\n", hdev->idVendor, hdev->idProduct); ret = usbh_hid_get_report_descriptor(hdev, data_buff, 1024); if (ret > 0) { printf("\nDump report descriptor =>\n"); dump_buff_hex(data_buff, ret); } /* * Example: GET_PROTOCOL request. */ ret = usbh_hid_get_protocol(hdev, data_buff); printf("[GET_PROTOCOL] ret = %d, protocol = %d\n", ret, data_buff[0]); /* * Example: SET_PROTOCOL request. */ ret = usbh_hid_set_protocol(hdev, data_buff[0]); printf("[SET_PROTOCOL] ret = %d, protocol = %d\n", ret, data_buff[0]); /* * Example: GET_REPORT request on report ID 0x1, report type FEATURE. */ ret = usbh_hid_get_report(hdev, RT_FEATURE, 0x1, data_buff, 64); if (ret > 0) { printf("[GET_REPORT] Data => "); for (i = 0; i < ret; i++) printf("%02x ", data_buff[i]); printf("\n"); } printf("\nUSBH_HidStartIntReadPipe...\n"); for (i = 0; i < 2; i++) { /* use ping-pong buffer transfer */ ret = usbh_hid_start_int_read(hdev, 0, int_read_callback); if (ret != HID_RET_OK) { printf("usbh_hid_start_int_read failed! %d\n", ret); while (1); } else printf("Interrupt in transfer %d started...\n", i); } #ifdef HAVE_INT_OUT ret = usbh_hid_start_int_write(hdev, 0, int_write_callback); if ((ret != HID_RET_OK) && (ret != HID_RET_XFER_IS_RUNNING)) printf("usbh_hid_start_int_write failed!\n"); else printf("Interrupt out transfer started...\n"); #endif return 0; } void SYS_Init(void) { /* Unlock protected registers */ SYS_UnlockReg(); /* Set XT1_OUT(PF.2) and XT1_IN(PF.3) to input mode */ PF->MODE &= ~(GPIO_MODE_MODE2_Msk | GPIO_MODE_MODE3_Msk); /* Enable HXT clock */ CLK_EnableXtalRC(CLK_PWRCTL_HXTEN_Msk); /* Wait for HXT clock ready */ CLK_WaitClockReady(CLK_STATUS_HXTSTB_Msk); /* Switch HCLK clock source to HXT */ CLK_SetHCLK(CLK_CLKSEL0_HCLKSEL_HXT,CLK_CLKDIV0_HCLK(1)); /* Set core clock as PLL_CLOCK from PLL */ CLK_SetCoreClock(FREQ_192MHZ); /* Set both PCLK0 and PCLK1 as HCLK/2 */ CLK->PCLKDIV = CLK_PCLKDIV_APB0DIV_DIV2 | CLK_PCLKDIV_APB1DIV_DIV2; /* Enable UART module clock */ CLK_EnableModuleClock(UART0_MODULE); /* Select UART module clock source as HXT and UART module clock divider as 1 */ CLK_SetModuleClock(UART0_MODULE, CLK_CLKSEL1_UART0SEL_HXT, CLK_CLKDIV0_UART0(1)); /* Enable USBH module clock */ CLK_EnableModuleClock(USBH_MODULE); /* USB Host desired input clock is 48 MHz. Set as PLL divided by 4 (192/4 = 48) */ CLK->CLKDIV0 = (CLK->CLKDIV0 & ~CLK_CLKDIV0_USBDIV_Msk) | CLK_CLKDIV0_USB(4); /* Enable USBD and OTG clock */ CLK->APBCLK0 |= CLK_APBCLK0_USBDCKEN_Msk | CLK_APBCLK0_OTGCKEN_Msk; /* Set OTG as USB Host role */ SYS->USBPHY = SYS_USBPHY_HSUSBEN_Msk | (0x1 << SYS_USBPHY_HSUSBROLE_Pos) | SYS_USBPHY_USBEN_Msk | SYS_USBPHY_SBO_Msk | (0x1 << SYS_USBPHY_USBROLE_Pos); delay_us(20); SYS->USBPHY |= SYS_USBPHY_HSUSBACT_Msk; /* Update System Core Clock */ SystemCoreClockUpdate(); /* Set GPB multi-function pins for UART0 RXD and TXD */ SYS->GPB_MFPH &= ~(SYS_GPB_MFPH_PB12MFP_Msk | SYS_GPB_MFPH_PB13MFP_Msk); SYS->GPB_MFPH |= (SYS_GPB_MFPH_PB12MFP_UART0_RXD | SYS_GPB_MFPH_PB13MFP_UART0_TXD); /* USB_VBUS_EN (USB 1.1 VBUS power enable pin) multi-function pin - PB.15 */ SYS->GPB_MFPH = (SYS->GPB_MFPH & ~SYS_GPB_MFPH_PB15MFP_Msk) | SYS_GPB_MFPH_PB15MFP_USB_VBUS_EN; /* USB_VBUS_ST (USB 1.1 over-current detect pin) multi-function pin - PC.14 */ SYS->GPC_MFPH = (SYS->GPC_MFPH & ~SYS_GPC_MFPH_PC14MFP_Msk) | SYS_GPC_MFPH_PC14MFP_USB_VBUS_ST; /* HSUSB_VBUS_EN (USB 2.0 VBUS power enable pin) multi-function pin - PB.10 */ SYS->GPB_MFPH = (SYS->GPB_MFPH & ~SYS_GPB_MFPH_PB10MFP_Msk) | SYS_GPB_MFPH_PB10MFP_HSUSB_VBUS_EN; /* HSUSB_VBUS_ST (USB 2.0 over-current detect pin) multi-function pin - PB.11 */ SYS->GPB_MFPH = (SYS->GPB_MFPH & ~SYS_GPB_MFPH_PB11MFP_Msk) | SYS_GPB_MFPH_PB11MFP_HSUSB_VBUS_ST; /* USB 1.1 port multi-function pin VBUS, D+, D-, and ID pins */ SYS->GPA_MFPH &= ~(SYS_GPA_MFPH_PA12MFP_Msk | SYS_GPA_MFPH_PA13MFP_Msk | SYS_GPA_MFPH_PA14MFP_Msk | SYS_GPA_MFPH_PA15MFP_Msk); SYS->GPA_MFPH |= SYS_GPA_MFPH_PA12MFP_USB_VBUS | SYS_GPA_MFPH_PA13MFP_USB_D_N | SYS_GPA_MFPH_PA14MFP_USB_D_P | SYS_GPA_MFPH_PA15MFP_USB_OTG_ID; /* Lock protected registers */ SYS_LockReg(); } void UART0_Init(void) { /* Configure UART0 and set UART0 baud rate */ UART_Open(UART0, 115200); } int32_t main(void) { HID_DEV_T *hdev, *hdev_list; uint32_t t0; SYS_Init(); /* Init System, IP clock and multi-function I/O */ UART0_Init(); /* Initialize UART0 */ enable_sys_tick(100); printf("\n\n"); printf("+-------------------------------------------+\n"); printf("| |\n"); printf("| USB Host HID class sample demo |\n"); printf("| |\n"); printf("+-------------------------------------------+\n"); usbh_core_init(); usbh_hid_init(); usbh_memory_used(); memset(g_hid_list, 0, sizeof(g_hid_list)); t0 = g_tick_cnt; while (1) { if (usbh_pooling_hubs()) /* USB Host port detect polling and management */ { usbh_memory_used(); /* print out USB memory allocating information */ printf("\n Has hub events.\n"); hdev_list = usbh_hid_get_device_list(); hdev = hdev_list; while (hdev != NULL) { if (is_a_new_hid_device(hdev)) { init_hid_device(hdev); } hdev = hdev->next; } update_hid_device_list(hdev_list); usbh_memory_used(); } if (g_tick_cnt - t0 >= 100) { t0 = g_tick_cnt; printf("%d \n", int_cnt); int_cnt = 0; } #ifndef DEBUG_ENABLE_SEMIHOST if (!kbhit()) { getchar(); usbh_memory_used(); } #endif } } /*** (C) COPYRIGHT 2017 Nuvoton Technology Corp. ***/
abhijain-sq/square-java-sdk
src/main/java/com/squareup/square/models/V1Refund.java
package com.squareup.square.models; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonGetter; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.squareup.square.http.client.HttpContext; import java.util.List; import java.util.Objects; /** * This is a model class for V1Refund type. */ public class V1Refund { private HttpContext httpContext; @JsonInclude(JsonInclude.Include.NON_NULL) private final String type; @JsonInclude(JsonInclude.Include.NON_NULL) private final String reason; @JsonInclude(JsonInclude.Include.NON_NULL) private final V1Money refundedMoney; @JsonInclude(JsonInclude.Include.NON_NULL) private final V1Money refundedProcessingFeeMoney; @JsonInclude(JsonInclude.Include.NON_NULL) private final V1Money refundedTaxMoney; @JsonInclude(JsonInclude.Include.NON_NULL) private final V1Money refundedAdditiveTaxMoney; @JsonInclude(JsonInclude.Include.NON_NULL) private final List<V1PaymentTax> refundedAdditiveTax; @JsonInclude(JsonInclude.Include.NON_NULL) private final V1Money refundedInclusiveTaxMoney; @JsonInclude(JsonInclude.Include.NON_NULL) private final List<V1PaymentTax> refundedInclusiveTax; @JsonInclude(JsonInclude.Include.NON_NULL) private final V1Money refundedTipMoney; @JsonInclude(JsonInclude.Include.NON_NULL) private final V1Money refundedDiscountMoney; @JsonInclude(JsonInclude.Include.NON_NULL) private final V1Money refundedSurchargeMoney; @JsonInclude(JsonInclude.Include.NON_NULL) private final List<V1PaymentSurcharge> refundedSurcharges; @JsonInclude(JsonInclude.Include.NON_NULL) private final String createdAt; @JsonInclude(JsonInclude.Include.NON_NULL) private final String processedAt; @JsonInclude(JsonInclude.Include.NON_NULL) private final String paymentId; @JsonInclude(JsonInclude.Include.NON_NULL) private final String merchantId; @JsonInclude(JsonInclude.Include.NON_NULL) private final Boolean isExchange; /** * Initialization constructor. * @param type String value for type. * @param reason String value for reason. * @param refundedMoney V1Money value for refundedMoney. * @param refundedProcessingFeeMoney V1Money value for refundedProcessingFeeMoney. * @param refundedTaxMoney V1Money value for refundedTaxMoney. * @param refundedAdditiveTaxMoney V1Money value for refundedAdditiveTaxMoney. * @param refundedAdditiveTax List of V1PaymentTax value for refundedAdditiveTax. * @param refundedInclusiveTaxMoney V1Money value for refundedInclusiveTaxMoney. * @param refundedInclusiveTax List of V1PaymentTax value for refundedInclusiveTax. * @param refundedTipMoney V1Money value for refundedTipMoney. * @param refundedDiscountMoney V1Money value for refundedDiscountMoney. * @param refundedSurchargeMoney V1Money value for refundedSurchargeMoney. * @param refundedSurcharges List of V1PaymentSurcharge value for refundedSurcharges. * @param createdAt String value for createdAt. * @param processedAt String value for processedAt. * @param paymentId String value for paymentId. * @param merchantId String value for merchantId. * @param isExchange Boolean value for isExchange. */ @JsonCreator public V1Refund( @JsonProperty("type") String type, @JsonProperty("reason") String reason, @JsonProperty("refunded_money") V1Money refundedMoney, @JsonProperty("refunded_processing_fee_money") V1Money refundedProcessingFeeMoney, @JsonProperty("refunded_tax_money") V1Money refundedTaxMoney, @JsonProperty("refunded_additive_tax_money") V1Money refundedAdditiveTaxMoney, @JsonProperty("refunded_additive_tax") List<V1PaymentTax> refundedAdditiveTax, @JsonProperty("refunded_inclusive_tax_money") V1Money refundedInclusiveTaxMoney, @JsonProperty("refunded_inclusive_tax") List<V1PaymentTax> refundedInclusiveTax, @JsonProperty("refunded_tip_money") V1Money refundedTipMoney, @JsonProperty("refunded_discount_money") V1Money refundedDiscountMoney, @JsonProperty("refunded_surcharge_money") V1Money refundedSurchargeMoney, @JsonProperty("refunded_surcharges") List<V1PaymentSurcharge> refundedSurcharges, @JsonProperty("created_at") String createdAt, @JsonProperty("processed_at") String processedAt, @JsonProperty("payment_id") String paymentId, @JsonProperty("merchant_id") String merchantId, @JsonProperty("is_exchange") Boolean isExchange) { this.type = type; this.reason = reason; this.refundedMoney = refundedMoney; this.refundedProcessingFeeMoney = refundedProcessingFeeMoney; this.refundedTaxMoney = refundedTaxMoney; this.refundedAdditiveTaxMoney = refundedAdditiveTaxMoney; this.refundedAdditiveTax = refundedAdditiveTax; this.refundedInclusiveTaxMoney = refundedInclusiveTaxMoney; this.refundedInclusiveTax = refundedInclusiveTax; this.refundedTipMoney = refundedTipMoney; this.refundedDiscountMoney = refundedDiscountMoney; this.refundedSurchargeMoney = refundedSurchargeMoney; this.refundedSurcharges = refundedSurcharges; this.createdAt = createdAt; this.processedAt = processedAt; this.paymentId = paymentId; this.merchantId = merchantId; this.isExchange = isExchange; } public HttpContext getContext() { return httpContext; } /** * Getter for Type. * @return Returns the String */ @JsonGetter("type") public String getType() { return type; } /** * Getter for Reason. * The merchant-specified reason for the refund. * @return Returns the String */ @JsonGetter("reason") public String getReason() { return reason; } /** * Getter for RefundedMoney. * @return Returns the V1Money */ @JsonGetter("refunded_money") public V1Money getRefundedMoney() { return refundedMoney; } /** * Getter for RefundedProcessingFeeMoney. * @return Returns the V1Money */ @JsonGetter("refunded_processing_fee_money") public V1Money getRefundedProcessingFeeMoney() { return refundedProcessingFeeMoney; } /** * Getter for RefundedTaxMoney. * @return Returns the V1Money */ @JsonGetter("refunded_tax_money") public V1Money getRefundedTaxMoney() { return refundedTaxMoney; } /** * Getter for RefundedAdditiveTaxMoney. * @return Returns the V1Money */ @JsonGetter("refunded_additive_tax_money") public V1Money getRefundedAdditiveTaxMoney() { return refundedAdditiveTaxMoney; } /** * Getter for RefundedAdditiveTax. * All of the additive taxes associated with the refund. * @return Returns the List of V1PaymentTax */ @JsonGetter("refunded_additive_tax") public List<V1PaymentTax> getRefundedAdditiveTax() { return refundedAdditiveTax; } /** * Getter for RefundedInclusiveTaxMoney. * @return Returns the V1Money */ @JsonGetter("refunded_inclusive_tax_money") public V1Money getRefundedInclusiveTaxMoney() { return refundedInclusiveTaxMoney; } /** * Getter for RefundedInclusiveTax. * All of the inclusive taxes associated with the refund. * @return Returns the List of V1PaymentTax */ @JsonGetter("refunded_inclusive_tax") public List<V1PaymentTax> getRefundedInclusiveTax() { return refundedInclusiveTax; } /** * Getter for RefundedTipMoney. * @return Returns the V1Money */ @JsonGetter("refunded_tip_money") public V1Money getRefundedTipMoney() { return refundedTipMoney; } /** * Getter for RefundedDiscountMoney. * @return Returns the V1Money */ @JsonGetter("refunded_discount_money") public V1Money getRefundedDiscountMoney() { return refundedDiscountMoney; } /** * Getter for RefundedSurchargeMoney. * @return Returns the V1Money */ @JsonGetter("refunded_surcharge_money") public V1Money getRefundedSurchargeMoney() { return refundedSurchargeMoney; } /** * Getter for RefundedSurcharges. * A list of all surcharges associated with the refund. * @return Returns the List of V1PaymentSurcharge */ @JsonGetter("refunded_surcharges") public List<V1PaymentSurcharge> getRefundedSurcharges() { return refundedSurcharges; } /** * Getter for CreatedAt. * The time when the merchant initiated the refund for Square to process, in ISO 8601 format. * @return Returns the String */ @JsonGetter("created_at") public String getCreatedAt() { return createdAt; } /** * Getter for ProcessedAt. * The time when Square processed the refund on behalf of the merchant, in ISO 8601 format. * @return Returns the String */ @JsonGetter("processed_at") public String getProcessedAt() { return processedAt; } /** * Getter for PaymentId. * A Square-issued ID associated with the refund. For single-tender refunds, payment_id is the * ID of the original payment ID. For split-tender refunds, payment_id is the ID of the original * tender. For exchange-based refunds (is_exchange == true), payment_id is the ID of the * original payment ID even if the payment includes other tenders. * @return Returns the String */ @JsonGetter("payment_id") public String getPaymentId() { return paymentId; } /** * Getter for MerchantId. * @return Returns the String */ @JsonGetter("merchant_id") public String getMerchantId() { return merchantId; } /** * Getter for IsExchange. * Indicates whether or not the refund is associated with an exchange. If is_exchange is true, * the refund reflects the value of goods returned in the exchange not the total money refunded. * @return Returns the Boolean */ @JsonGetter("is_exchange") public Boolean getIsExchange() { return isExchange; } @Override public int hashCode() { return Objects.hash(type, reason, refundedMoney, refundedProcessingFeeMoney, refundedTaxMoney, refundedAdditiveTaxMoney, refundedAdditiveTax, refundedInclusiveTaxMoney, refundedInclusiveTax, refundedTipMoney, refundedDiscountMoney, refundedSurchargeMoney, refundedSurcharges, createdAt, processedAt, paymentId, merchantId, isExchange); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof V1Refund)) { return false; } V1Refund other = (V1Refund) obj; return Objects.equals(type, other.type) && Objects.equals(reason, other.reason) && Objects.equals(refundedMoney, other.refundedMoney) && Objects.equals(refundedProcessingFeeMoney, other.refundedProcessingFeeMoney) && Objects.equals(refundedTaxMoney, other.refundedTaxMoney) && Objects.equals(refundedAdditiveTaxMoney, other.refundedAdditiveTaxMoney) && Objects.equals(refundedAdditiveTax, other.refundedAdditiveTax) && Objects.equals(refundedInclusiveTaxMoney, other.refundedInclusiveTaxMoney) && Objects.equals(refundedInclusiveTax, other.refundedInclusiveTax) && Objects.equals(refundedTipMoney, other.refundedTipMoney) && Objects.equals(refundedDiscountMoney, other.refundedDiscountMoney) && Objects.equals(refundedSurchargeMoney, other.refundedSurchargeMoney) && Objects.equals(refundedSurcharges, other.refundedSurcharges) && Objects.equals(createdAt, other.createdAt) && Objects.equals(processedAt, other.processedAt) && Objects.equals(paymentId, other.paymentId) && Objects.equals(merchantId, other.merchantId) && Objects.equals(isExchange, other.isExchange); } /** * Converts this V1Refund into string format. * @return String representation of this class */ @Override public String toString() { return "V1Refund [" + "type=" + type + ", reason=" + reason + ", refundedMoney=" + refundedMoney + ", refundedProcessingFeeMoney=" + refundedProcessingFeeMoney + ", refundedTaxMoney=" + refundedTaxMoney + ", refundedAdditiveTaxMoney=" + refundedAdditiveTaxMoney + ", refundedAdditiveTax=" + refundedAdditiveTax + ", refundedInclusiveTaxMoney=" + refundedInclusiveTaxMoney + ", refundedInclusiveTax=" + refundedInclusiveTax + ", refundedTipMoney=" + refundedTipMoney + ", refundedDiscountMoney=" + refundedDiscountMoney + ", refundedSurchargeMoney=" + refundedSurchargeMoney + ", refundedSurcharges=" + refundedSurcharges + ", createdAt=" + createdAt + ", processedAt=" + processedAt + ", paymentId=" + paymentId + ", merchantId=" + merchantId + ", isExchange=" + isExchange + "]"; } /** * Builds a new {@link V1Refund.Builder} object. * Creates the instance with the state of the current model. * @return a new {@link V1Refund.Builder} object */ public Builder toBuilder() { Builder builder = new Builder() .type(getType()) .reason(getReason()) .refundedMoney(getRefundedMoney()) .refundedProcessingFeeMoney(getRefundedProcessingFeeMoney()) .refundedTaxMoney(getRefundedTaxMoney()) .refundedAdditiveTaxMoney(getRefundedAdditiveTaxMoney()) .refundedAdditiveTax(getRefundedAdditiveTax()) .refundedInclusiveTaxMoney(getRefundedInclusiveTaxMoney()) .refundedInclusiveTax(getRefundedInclusiveTax()) .refundedTipMoney(getRefundedTipMoney()) .refundedDiscountMoney(getRefundedDiscountMoney()) .refundedSurchargeMoney(getRefundedSurchargeMoney()) .refundedSurcharges(getRefundedSurcharges()) .createdAt(getCreatedAt()) .processedAt(getProcessedAt()) .paymentId(getPaymentId()) .merchantId(getMerchantId()) .isExchange(getIsExchange()); return builder; } /** * Class to build instances of {@link V1Refund}. */ public static class Builder { private HttpContext httpContext; private String type; private String reason; private V1Money refundedMoney; private V1Money refundedProcessingFeeMoney; private V1Money refundedTaxMoney; private V1Money refundedAdditiveTaxMoney; private List<V1PaymentTax> refundedAdditiveTax; private V1Money refundedInclusiveTaxMoney; private List<V1PaymentTax> refundedInclusiveTax; private V1Money refundedTipMoney; private V1Money refundedDiscountMoney; private V1Money refundedSurchargeMoney; private List<V1PaymentSurcharge> refundedSurcharges; private String createdAt; private String processedAt; private String paymentId; private String merchantId; private Boolean isExchange; /** * Setter for httpContext. * @param httpContext HttpContext value for httpContext. * @return Builder */ public Builder httpContext(HttpContext httpContext) { this.httpContext = httpContext; return this; } /** * Setter for type. * @param type String value for type. * @return Builder */ public Builder type(String type) { this.type = type; return this; } /** * Setter for reason. * @param reason String value for reason. * @return Builder */ public Builder reason(String reason) { this.reason = reason; return this; } /** * Setter for refundedMoney. * @param refundedMoney V1Money value for refundedMoney. * @return Builder */ public Builder refundedMoney(V1Money refundedMoney) { this.refundedMoney = refundedMoney; return this; } /** * Setter for refundedProcessingFeeMoney. * @param refundedProcessingFeeMoney V1Money value for refundedProcessingFeeMoney. * @return Builder */ public Builder refundedProcessingFeeMoney(V1Money refundedProcessingFeeMoney) { this.refundedProcessingFeeMoney = refundedProcessingFeeMoney; return this; } /** * Setter for refundedTaxMoney. * @param refundedTaxMoney V1Money value for refundedTaxMoney. * @return Builder */ public Builder refundedTaxMoney(V1Money refundedTaxMoney) { this.refundedTaxMoney = refundedTaxMoney; return this; } /** * Setter for refundedAdditiveTaxMoney. * @param refundedAdditiveTaxMoney V1Money value for refundedAdditiveTaxMoney. * @return Builder */ public Builder refundedAdditiveTaxMoney(V1Money refundedAdditiveTaxMoney) { this.refundedAdditiveTaxMoney = refundedAdditiveTaxMoney; return this; } /** * Setter for refundedAdditiveTax. * @param refundedAdditiveTax List of V1PaymentTax value for refundedAdditiveTax. * @return Builder */ public Builder refundedAdditiveTax(List<V1PaymentTax> refundedAdditiveTax) { this.refundedAdditiveTax = refundedAdditiveTax; return this; } /** * Setter for refundedInclusiveTaxMoney. * @param refundedInclusiveTaxMoney V1Money value for refundedInclusiveTaxMoney. * @return Builder */ public Builder refundedInclusiveTaxMoney(V1Money refundedInclusiveTaxMoney) { this.refundedInclusiveTaxMoney = refundedInclusiveTaxMoney; return this; } /** * Setter for refundedInclusiveTax. * @param refundedInclusiveTax List of V1PaymentTax value for refundedInclusiveTax. * @return Builder */ public Builder refundedInclusiveTax(List<V1PaymentTax> refundedInclusiveTax) { this.refundedInclusiveTax = refundedInclusiveTax; return this; } /** * Setter for refundedTipMoney. * @param refundedTipMoney V1Money value for refundedTipMoney. * @return Builder */ public Builder refundedTipMoney(V1Money refundedTipMoney) { this.refundedTipMoney = refundedTipMoney; return this; } /** * Setter for refundedDiscountMoney. * @param refundedDiscountMoney V1Money value for refundedDiscountMoney. * @return Builder */ public Builder refundedDiscountMoney(V1Money refundedDiscountMoney) { this.refundedDiscountMoney = refundedDiscountMoney; return this; } /** * Setter for refundedSurchargeMoney. * @param refundedSurchargeMoney V1Money value for refundedSurchargeMoney. * @return Builder */ public Builder refundedSurchargeMoney(V1Money refundedSurchargeMoney) { this.refundedSurchargeMoney = refundedSurchargeMoney; return this; } /** * Setter for refundedSurcharges. * @param refundedSurcharges List of V1PaymentSurcharge value for refundedSurcharges. * @return Builder */ public Builder refundedSurcharges(List<V1PaymentSurcharge> refundedSurcharges) { this.refundedSurcharges = refundedSurcharges; return this; } /** * Setter for createdAt. * @param createdAt String value for createdAt. * @return Builder */ public Builder createdAt(String createdAt) { this.createdAt = createdAt; return this; } /** * Setter for processedAt. * @param processedAt String value for processedAt. * @return Builder */ public Builder processedAt(String processedAt) { this.processedAt = processedAt; return this; } /** * Setter for paymentId. * @param paymentId String value for paymentId. * @return Builder */ public Builder paymentId(String paymentId) { this.paymentId = paymentId; return this; } /** * Setter for merchantId. * @param merchantId String value for merchantId. * @return Builder */ public Builder merchantId(String merchantId) { this.merchantId = merchantId; return this; } /** * Setter for isExchange. * @param isExchange Boolean value for isExchange. * @return Builder */ public Builder isExchange(Boolean isExchange) { this.isExchange = isExchange; return this; } /** * Builds a new {@link V1Refund} object using the set fields. * @return {@link V1Refund} */ public V1Refund build() { V1Refund model = new V1Refund(type, reason, refundedMoney, refundedProcessingFeeMoney, refundedTaxMoney, refundedAdditiveTaxMoney, refundedAdditiveTax, refundedInclusiveTaxMoney, refundedInclusiveTax, refundedTipMoney, refundedDiscountMoney, refundedSurchargeMoney, refundedSurcharges, createdAt, processedAt, paymentId, merchantId, isExchange); model.httpContext = httpContext; return model; } } }
linroid/my_practice
cpp/chapter6/separate_compilation/Chapter6.h
<reponame>linroid/my_practice int fact(int val);
piejanssens/openui5
src/sap.ui.core/test/sap/ui/core/qunit/testsuites/testsuite.supportability.qunit.js
sap.ui.define([], function() { "use strict"; return { name: "TestSuite for Topic: Supportability", defaults: { module: "test-resources/sap/ui/core/qunit/{name}.qunit" }, tests: { "util/jQuery.sap.measure": { title: "jQuery.sap.measure" }, "util/jquery.sap.trace": { title: "jQuery.sap.trace", beforeBootstrap: "test-resources/sap/ui/core/qunit/util/beforeBootstrap/jQuery.sap.trace" }, AppCacheBuster: { /** * Page kept because test assumes a specific baseURI */ page: "test-resources/sap/ui/core/qunit/AppCacheBuster.qunit.html", title: "sap.ui.core.AppCacheBuster" }, "performance/BeaconRequest": { title: "sap.ui.performance.BeaconRequest", loader: { paths: { performance: "test-resources/sap/ui/core/qunit/performance" } } }, "performance/trace/FESR": { title: "sap.ui.performance.FESR" }, "performance/trace/InitFESR_metatag": { page: "test-resources/sap/ui/core/qunit/performance/trace/InitFESR_metatag.qunit.html", title: "sap.ui.performance.trace.FESR: Activation of FESR via meta-tag" }, "performance/trace/InitFESR_metatag_beaconurl": { page: "test-resources/sap/ui/core/qunit/performance/trace/InitFESR_metatag_beaconurl.qunit.html", title: "sap.ui.performance.trace.FESR: Activation of FESR via meta-tag with beacon URL" }, "performance/trace/InitFESR_notactive": { title: "sap.ui.performance.trace.FESR: Inactivity of FESR" }, "performance/trace/InitFESR_urlparam": { page: "test-resources/sap/ui/core/qunit/performance/trace/InitFESR_urlparam.qunit.html?sap-ui-fesr=true", title: "sap.ui.performance.trace.FESR: Activation of FESR via url-param" }, "performance/trace/Interaction": { title: "sap.ui.performance.Interaction" }, "performance/trace/Passport": { title: "sap.ui.performance.Passport" }, "performance/XHRInterceptor": { title: "sap.ui.performance.XHRInterceptor" } } }; });
pbirkle/petrolator-backend
src/main/java/de/itbirkle/petrolator/PetrolatorBackendApplication.java
package de.itbirkle.petrolator; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; @SpringBootApplication public class PetrolatorBackendApplication { public static void main(String[] args) { SpringApplication.run(PetrolatorBackendApplication.class, args); } }
ldruschk/matterbridge
vendor/github.com/yaegashi/msgraph.go/beta/ModelUpdate.go
<reponame>ldruschk/matterbridge // Code generated by msgraph-generate.go DO NOT EDIT. package msgraph // UpdateRecordingStatusOperation undocumented type UpdateRecordingStatusOperation struct { // CommsOperation is the base model of UpdateRecordingStatusOperation CommsOperation } // UpdateWindow undocumented type UpdateWindow struct { // Object is the base model of UpdateWindow Object // UpdateWindowStartTime undocumented UpdateWindowStartTime *TimeOfDay `json:"updateWindowStartTime,omitempty"` // UpdateWindowEndTime undocumented UpdateWindowEndTime *TimeOfDay `json:"updateWindowEndTime,omitempty"` } // UpdateWindowsDeviceAccountActionParameter undocumented type UpdateWindowsDeviceAccountActionParameter struct { // Object is the base model of UpdateWindowsDeviceAccountActionParameter Object // DeviceAccount undocumented DeviceAccount *WindowsDeviceAccount `json:"deviceAccount,omitempty"` // PasswordRotationEnabled undocumented PasswordRotationEnabled *bool `json:"passwordRotationEnabled,omitempty"` // CalendarSyncEnabled undocumented CalendarSyncEnabled *bool `json:"calendarSyncEnabled,omitempty"` // DeviceAccountEmail undocumented DeviceAccountEmail *string `json:"deviceAccountEmail,omitempty"` // ExchangeServer undocumented ExchangeServer *string `json:"exchangeServer,omitempty"` // SessionInitiationProtocalAddress undocumented SessionInitiationProtocalAddress *string `json:"sessionInitiationProtocalAddress,omitempty"` }
yadavanuj1996/freecodecamp-solutions
Javascript Algorithms And Data Structures Certification/Basic Javascript/45-shopping-list.js
var myList = []; myList=[["item-1",1],["item-2",2],["item-3",3],["item-4",4],["item-5",5]];
thewires2/Leetcode
HashTable/1894. Find the Student that Will Replace the Chalk.py
class Solution: def chalkReplacer(self, chalk: List[int], k: int) -> int: s=sum(chalk) rounds=k//s k=k-s*rounds for i in range(len(chalk)): if chalk[i]>k: return i k-=chalk[i]
ndevenish/dxtbx
ext.py
from __future__ import absolute_import, division, print_function import boost.python ext = boost.python.import_ext("dxtbx_ext") from dxtbx_ext import * # isort:skip # noqa: F401,F403,E402
IamMayankThakur/test-bigdata
adminmgr/media/code/A2/python/task/BD_026_109_110_sdrKORi.py
from __future__ import print_function import re import sys from operator import add from pyspark.sql import SparkSession def computeContribs(urls, rank): num_urls = len(urls) for url in urls: yield (url, rank / num_urls) def parseNeighbors(urls): parts = re.split(r'\,+', urls) return parts[0], parts[1] def diff(r): if r[1][0] is not None: if r[1][1] is not None: d = abs(r[1][0]-r[1][1]) return r[0],d if __name__ == "__main__": if len(sys.argv) != 4: print("Usage: pagerank <file> <iterations>", file=sys.stderr) sys.exit(-1) # Initialize the spark context. spark = SparkSession\ .builder\ .appName("PythonPageRank")\ .getOrCreate() lines = spark.read.text(sys.argv[1]).rdd.map(lambda r: r[0]) nodes = lines.map(lambda x: (x.split(",")[0]+","+x.split(",")[1])) #print(nodes.collect()) # for line in lines.collect(): # print(line.split(",")[0]) get_ranks = lines.map(lambda x: (x.split(",")[1],int(x.split(",")[2])/int(x.split(",")[3]))) #print(get_ranks.collect()) weight = int(sys.argv[3]) * 0.01 y = get_ranks.reduceByKey(lambda x, y: x + y) ranks = y.map(lambda x : (x[0], max(x[1],1.0))) links = nodes.map(lambda urls: parseNeighbors(urls)).distinct().groupByKey().cache() if (int(sys.argv[2])) == 0: flag = 0 iterations=0 while flag == 0 and iterations < 2000: iterations= iterations+1 pre_ranks=ranks contribs = links.join(ranks).flatMap( lambda url_urls_rank: computeContribs(url_urls_rank[1][0], url_urls_rank[1][1])) if (int(sys.argv[3])) == 0: ranks = contribs.reduceByKey(add).mapValues(lambda rank: rank * 0.80 + 0.2) else: ranks = contribs.reduceByKey(add).mapValues(lambda rank: (rank * weight) + (1-weight)) r1=pre_ranks r2=ranks flag=1 c= r1.leftOuterJoin(r2).map(lambda r:diff(r)) for x in c.collect(): if x is not None: if x[1]>= 0.0001: flag=0 else: for iteration in range(int(sys.argv[2])): contribs = links.join(ranks).flatMap( lambda url_urls_rank: computeContribs(url_urls_rank[1][0], url_urls_rank[1][1])) if (int(sys.argv[3])) == 0: ranks = contribs.reduceByKey(add).mapValues(lambda rank: rank * 0.80 + 0.2) else: ranks = contribs.reduceByKey(add).mapValues(lambda rank: (rank * weight) + (1-weight)) ranks_final = ranks.sortBy(lambda x : (-x[1],x[0])) for (link, rank) in ranks_final.collect(): #print("%s, %s" % (link, rank)) print(link, "{:.12f}".format(rank),sep = ",") #print() spark.stop()
valitydev/fraudbusters
src/test/java/dev/vality/fraudbusters/converter/TrustConditionToConditionConverterTest.java
<filename>src/test/java/dev/vality/fraudbusters/converter/TrustConditionToConditionConverterTest.java<gh_stars>0 package dev.vality.fraudbusters.converter; import dev.vality.fraudo.model.TrustCondition; import dev.vality.trusted.tokens.Condition; import dev.vality.trusted.tokens.YearsOffset; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import static dev.vality.fraudbusters.factory.TestObjectsFactory.createTrustCondition; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.Mockito.*; @ExtendWith(MockitoExtension.class) class TrustConditionToConditionConverterTest { private TrustConditionToConditionConverter converter; @Mock private IntegerToYearsOffsetConverter integerToYearsOffsetConverter; @BeforeEach void setUp() { converter = new TrustConditionToConditionConverter(integerToYearsOffsetConverter); } @Test void convert() { when(integerToYearsOffsetConverter.convert(anyInt())).thenReturn(YearsOffset.current_with_last_years); TrustCondition trustCondition = createTrustCondition(2); Condition condition = converter.convert(trustCondition); verify(integerToYearsOffsetConverter, times(1)) .convert(trustCondition.getTransactionsYearsOffset()); assertEquals(trustCondition.getTransactionsCount().intValue(), condition.getCount()); assertEquals(trustCondition.getTransactionsCurrency(), condition.getCurrencySymbolicCode()); assertEquals(trustCondition.getTransactionsYearsOffset(), condition.getYearsOffset().getValue()); assertEquals(trustCondition.getTransactionsSum().longValue(), condition.getSum()); } @Test void convertBatch() { when(integerToYearsOffsetConverter.convert(anyInt())).thenReturn(YearsOffset.current_with_last_years); TrustCondition trustCondition = createTrustCondition(2); TrustCondition trustConditionNoSum = createTrustCondition(null); Map<Integer, Condition> conditions = converter.convertBatch(List.of(trustCondition, trustConditionNoSum)).stream() .collect(Collectors.toMap( Condition::getCount, condition -> condition )); verify(integerToYearsOffsetConverter, times(2)) .convert(trustCondition.getTransactionsYearsOffset()); Condition condition = conditions.get(trustCondition.getTransactionsCount()); assertEquals(trustCondition.getTransactionsCount().intValue(), condition.getCount()); assertEquals(trustCondition.getTransactionsCurrency(), condition.getCurrencySymbolicCode()); assertEquals(trustCondition.getTransactionsYearsOffset(), condition.getYearsOffset().getValue()); assertEquals(trustCondition.getTransactionsSum().longValue(), condition.getSum()); condition = conditions.get(trustConditionNoSum.getTransactionsCount()); assertEquals(trustConditionNoSum.getTransactionsCount().intValue(), condition.getCount()); assertEquals(trustConditionNoSum.getTransactionsCurrency(), condition.getCurrencySymbolicCode()); assertEquals(trustConditionNoSum.getTransactionsYearsOffset(), condition.getYearsOffset().getValue()); assertFalse(condition.isSetSum()); } }
DEBANMC/dragonfly
server/cmd/doc.go
<filename>server/cmd/doc.go // Package cmd implements a Minecraft specific command system, which may be used simply by 'plugging' it in // and sending commands registered in an AvailableCommandsPacket. // // The cmd package handles commands in a specific way: It requires a struct to be passed to the cmd.New() // function, which implements the Runnable interface. For every exported field in the struct, executing the // command will result in the parsing of the arguments using the types of the fields of the struct, in the // order that they appear in. // // A Runnable may have exported fields only of the following types: // int8, int16, int32, int64, int, uint8, uint16, uint32, uint64, uint, // float32, float64, string, bool, mgl64.Vec3, Varargs, []Target // or a type that implements the cmd.Parameter, cmd.Enum or cmd.SubCommand interface. cmd.Enum implementations // must be of the type string. // Fields in the Runnable struct may have the `optional:""` struct tag to mark them as an optional parameter, // the `suffix:"$suffix"` struct tag to add a suffix to the parameter in the usage, and the `name:"name"` tag // to specify a name different from the field name for the parameter. // // Commands may be registered using the cmd.Register() method. By itself, this method will not ensure that the // client will be able to use the command: The user of the cmd package must handle commands itself and run the // appropriate one using the cmd.ByAlias function. package cmd
frroliveira/sqlipa
src/test/sqlipa/parser/TestASTParser.java
<gh_stars>0 package test.sqlipa.parser; import static org.junit.Assert.*; import java.io.StringReader; import java.util.List; import main.sqlipa.ast.ColumnDef; import main.sqlipa.ast.IndexedColumn; import main.sqlipa.ast.Node; import main.sqlipa.ast.TypeName; import main.sqlipa.ast.stmt.AnalyzeStmt; import main.sqlipa.ast.stmt.AttachStmt; import main.sqlipa.ast.stmt.BeginStmt; import main.sqlipa.ast.stmt.DetachStmt; import main.sqlipa.ast.stmt.alter.AddColumnStmt; import main.sqlipa.ast.stmt.alter.AlterTableStmt; import main.sqlipa.ast.stmt.alter.RenameTableStmt; import main.sqlipa.ast.stmt.create.CreateIndexStmt; import main.sqlipa.ast.stmt.create.CreateTableStmt; import main.sqlipa.ast.stmt.create.CreateTableStmtWithColumns; import main.sqlipa.ast.stmt.create.CreateTableStmtWithSelect; import main.sqlipa.ast.stmt.create.CreateTriggerStmt; import main.sqlipa.ast.stmt.create.CreateViewStmt; import main.sqlipa.ast.stmt.drop.DropIndexStmt; import main.sqlipa.ast.stmt.drop.DropTableStmt; import main.sqlipa.ast.stmt.drop.DropTriggerStmt; import main.sqlipa.ast.stmt.drop.DropViewStmt; import main.sqlipa.ast.stmt.event.delete.DeleteStmt; import main.sqlipa.ast.stmt.event.insert.InsertStmt; import main.sqlipa.ast.stmt.event.insert.InsertStmtDefault; import main.sqlipa.ast.stmt.event.insert.InsertStmtWithValues; import main.sqlipa.ast.stmt.event.select.SelectStmt; import main.sqlipa.ast.stmt.event.update.UpdateStmt; import main.sqlipa.parser.ASTParser; import main.sqlipa.parser.ParseException; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; public class TestASTParser { @Rule public ExpectedException thrown = ExpectedException.none(); @Test public void testAlterTableStmt_rename() throws ParseException { // Without database specification. // -- ALTER TABLE table_name RENAME TO new_table_name AlterTableStmt stmt = stmt("alter1").alterTableStmt(); assertTrue(stmt instanceof RenameTableStmt); assertNull(stmt.database); assertEquals("table_name", stmt.table.name); assertEquals("new_table_name", ((RenameTableStmt) stmt).newTable.name); // With database specification. // -- ALTER TABLE database_name.table_name RENAME TO new_table_name stmt = stmt("alter2").alterTableStmt(); assertTrue(stmt instanceof RenameTableStmt); assertEquals("database_name", stmt.database.name); assertEquals("table_name", stmt.table.name); assertEquals("new_table_name", ((RenameTableStmt) stmt).newTable.name); } @Test public void testAlterTableStmt_addColumn() throws ParseException { // Without database specification and with COLUMN. // -- ALTER TABLE table_name ADD COLUMN new_column INTEGER NOT NULL AlterTableStmt stmt = stmt("alter3").alterTableStmt(); assertTrue(stmt instanceof AddColumnStmt); assertNull(stmt.database); assertEquals("table_name", stmt.table.name); assertNotNull(((AddColumnStmt) stmt).column); // With database specification and without COLUMN. // -- ALTER TABLE database_name.table_name ADD new_column INTEGER NOT // NULL stmt = stmt("alter4").alterTableStmt(); assertTrue(stmt instanceof AddColumnStmt); assertEquals("database_name", stmt.database.name); assertEquals("table_name", stmt.table.name); assertNotNull(((AddColumnStmt) stmt).column); } @Test public void testAnalyzeStmt() throws ParseException { // One name in ANALYZE. // -- ANALYZE table_name AnalyzeStmt stmt = stmt("analyze1").analyzeStmt(); assertEquals("table_name", stmt.first.name); assertNull(stmt.second); // Two names in ANALYZE. // -- ANALYZE database_name.index_name stmt = stmt("analyze2").analyzeStmt(); assertEquals("database_name", stmt.first.name); assertEquals("index_name", stmt.second.name); } @Test public void testAttachStmt() throws ParseException { // Without DATABASE. // -- ATTACH file_name AS database_name AttachStmt stmt = stmt("attach1").attachStmt(); assertNotNull(stmt.expr); assertEquals("database_name", stmt.database.name); // With DATABASE. // -- ATTACH DATABASE file_name AS database_name stmt = stmt("attach2").attachStmt(); assertNotNull(stmt.expr); assertEquals("database_name", stmt.database.name); } @Test public void testBeginStmt() throws ParseException { // -- BEGIN assertNull(stmt("begin1").beginStmt().type); // -- BEGIN TRANSACTION assertNull(stmt("begin2").beginStmt().type); // -- BEGIN DEFERRED assertEquals(BeginStmt.Type.DEFERRED, stmt("begin3").beginStmt().type); // -- BEGIN IMMEDIATE assertEquals(BeginStmt.Type.IMMEDIATE, stmt("begin4").beginStmt().type); // -- BEGIN EXCLUSIVE TRANSACTION assertEquals(BeginStmt.Type.EXCLUSIVE, stmt("begin5").beginStmt().type); } @Test public void testCommitStmt() throws ParseException { // -- END assertNotNull(stmt("commit1").commitStmt()); // -- COMMIT assertNotNull(stmt("commit2").commitStmt()); // -- END TRANSACTION assertNotNull(stmt("commit3").commitStmt()); // -- COMMIT TRANSACTION assertNotNull(stmt("commit4").commitStmt()); } @Test public void testRollbackStmt() throws ParseException { // -- ROLLBACK assertNull(stmt("rollback1").rollbackStmt().savepoint); // -- ROLLBACK TRANSACTION assertNull(stmt("rollback2").rollbackStmt().savepoint); // -- ROLLBACK TO savepoint_name assertEquals("savepoint_name", stmt("rollback3").rollbackStmt().savepoint.name); // -- ROLLBACK TO SAVEPOINT savepoint_name assertEquals("savepoint_name", stmt("rollback4").rollbackStmt().savepoint.name); // -- ROLLBACK TRANSACTION TO savepoint_name assertEquals("savepoint_name", stmt("rollback5").rollbackStmt().savepoint.name); // -- ROLLBACK TRANSACTION TO SAVEPOINT savepoint_name assertEquals("savepoint_name", stmt("rollback6").rollbackStmt().savepoint.name); } @Test public void testSavepointStmt() throws ParseException { // -- SAVEPOINT savepoint_name assertEquals("savepoint_name", stmt("savepoint1").savepointStmt().savepoint.name); } @Test public void testReleaseStmt() throws ParseException { // -- RELEASE savepoint_name assertEquals("savepoint_name", stmt("release1").releaseStmt().savepoint.name); // -- RELEASE SAVEPOINT savepoint_name assertEquals("savepoint_name", stmt("release2").releaseStmt().savepoint.name); } @Test public void testCreateIndexStmt() throws ParseException { // -- CREATE INDEX IF NOT EXISTS index_name ON table_name(column_name1) CreateIndexStmt stmt = stmt("create_index1").createIndexStmt(); assertFalse(stmt.hasUnique); assertTrue(stmt.hasIfNotExists); assertNull(stmt.database); assertEquals("index_name", stmt.name.name); assertEquals("table_name", stmt.table.name); assertList(stmt.columns, 1); // -- CREATE UNIQUE INDEX database_name.index_name ON // table_name(column_name1, column_name2) stmt = stmt("create_index2").createIndexStmt(); assertTrue(stmt.hasUnique); assertFalse(stmt.hasIfNotExists); assertEquals("database_name", stmt.database.name); assertEquals("index_name", stmt.name.name); assertEquals("table_name", stmt.table.name); assertList(stmt.columns, 2); // -- CREATE INDEX index_name ON table_name(column_name) stmt = stmt("create_index3").createIndexStmt(); assertFalse(stmt.hasUnique); assertFalse(stmt.hasIfNotExists); assertNull(stmt.database); assertEquals("index_name", stmt.name.name); assertEquals("table_name", stmt.table.name); assertList(stmt.columns, 1); // -- CREATE INDEX index_name ON table_name() thrown.expect(ParseException.class); stmt("create_index4").createIndexStmt(); } @Test public void testIndexedColumn() throws ParseException { // -- column_name COLLATE collation_name ASC IndexedColumn part = part("indexed_column1").indexedColumn(); assertEquals("column_name", part.column.name); assertEquals("collation_name", part.collation.name); assertEquals(IndexedColumn.Order.ASC, part.order); // -- column_name DESC part = part("indexed_column2").indexedColumn(); assertEquals("column_name", part.column.name); assertNull(part.collation); assertEquals(IndexedColumn.Order.DESC, part.order); // -- column_name part = part("indexed_column3").indexedColumn(); assertEquals("column_name", part.column.name); assertNull(part.collation); assertNull(part.order); } @Test public void testCreateTableStmt_columns() throws ParseException { // -- CREATE TABLE IF NOT EXISTS table_name( // -- column_name1 // -- ) CreateTableStmt stmt = stmt("create_table1").createTableStmt(); assertTrue(stmt instanceof CreateTableStmtWithColumns); assertFalse(stmt.hasTemporary); assertTrue(stmt.hasIfNotExists); assertNull(stmt.database); assertEquals("table_name", stmt.name.name); assertList(((CreateTableStmtWithColumns) stmt).columns, 1); assertNull(((CreateTableStmtWithColumns) stmt).constraints); // -- CREATE TEMP TABLE database_name.table_name( // -- column_name1, // -- column_name2 // -- ) stmt = stmt("create_table2").createTableStmt(); assertTrue(stmt instanceof CreateTableStmtWithColumns); assertTrue(stmt.hasTemporary); assertFalse(stmt.hasIfNotExists); assertEquals("database_name", stmt.database.name); assertEquals("table_name", stmt.name.name); assertList(((CreateTableStmtWithColumns) stmt).columns, 2); assertNull(((CreateTableStmtWithColumns) stmt).constraints); // -- CREATE TEMPORARY TABLE table_name( // -- column_name1, // -- column_name2, // -- CHECK(column_name1 == 1) // -- ) stmt = stmt("create_table3").createTableStmt(); assertTrue(stmt instanceof CreateTableStmtWithColumns); assertTrue(stmt.hasTemporary); assertFalse(stmt.hasIfNotExists); assertNull(stmt.database); assertEquals("table_name", stmt.name.name); assertList(((CreateTableStmtWithColumns) stmt).columns, 2); assertList(((CreateTableStmtWithColumns) stmt).constraints, 1); // -- CREATE TABLE table_name( // -- column_name1, // -- column_name2, // -- CHECK(column_name1 == 1), // -- CHECK(column_name2 == 2) // -- ) stmt = stmt("create_table4").createTableStmt(); assertTrue(stmt instanceof CreateTableStmtWithColumns); assertFalse(stmt.hasTemporary); assertFalse(stmt.hasIfNotExists); assertNull(stmt.database); assertEquals("table_name", stmt.name.name); assertList(((CreateTableStmtWithColumns) stmt).columns, 2); assertList(((CreateTableStmtWithColumns) stmt).constraints, 2); // -- CREATE TABLE IF NOT EXISTS table_name() thrown.expect(ParseException.class); stmt("create_table5").createTableStmt(); } @Test public void testCreateTableStmt_select() throws ParseException { // -- CREATE TABLE IF NOT EXISTS table_name AS SELECT * CreateTableStmt stmt = stmt("create_table6").createTableStmt(); assertTrue(stmt instanceof CreateTableStmtWithSelect); assertFalse(stmt.hasTemporary); assertTrue(stmt.hasIfNotExists); assertNull(stmt.database); assertEquals("table_name", stmt.name.name); assertNotNull(((CreateTableStmtWithSelect) stmt).select); // -- CREATE TEMP TABLE database_name.table_name AS SELECT * stmt = stmt("create_table7").createTableStmt(); assertTrue(stmt instanceof CreateTableStmtWithSelect); assertTrue(stmt.hasTemporary); assertFalse(stmt.hasIfNotExists); assertEquals("database_name", stmt.database.name); assertEquals("table_name", stmt.name.name); assertNotNull(((CreateTableStmtWithSelect) stmt).select); // -- CREATE TEMPORARY TABLE table_name AS SELECT * stmt = stmt("create_table8").createTableStmt(); assertTrue(stmt instanceof CreateTableStmtWithSelect); assertTrue(stmt.hasTemporary); assertFalse(stmt.hasIfNotExists); assertNull(stmt.database); assertEquals("table_name", stmt.name.name); assertNotNull(((CreateTableStmtWithSelect) stmt).select); } @Test public void testColumnDef() throws ParseException { // -- column_name ColumnDef part = part("column_def1").columnDef(); assertEquals("column_name", part.column.name); assertNull(part.type); assertNull(part.constraints); // -- column_name type_name part = part("column_def2").columnDef(); assertEquals("column_name", part.column.name); assertNotNull(part.type); assertNull(part.constraints); // -- column_name type_name NOT NULL part = part("column_def3").columnDef(); assertEquals("column_name", part.column.name); assertNotNull(part.type); assertList(part.constraints, 1); // -- column_name type_name NOT NULL COLLATE collation_name part = part("column_def4").columnDef(); assertEquals("column_name", part.column.name); assertNotNull(part.type); assertList(part.constraints, 2); // -- column_name NOT NULL part = part("column_def5").columnDef(); assertEquals("column_name", part.column.name); assertNull(part.type); assertList(part.constraints, 1); // -- column_name type_name NOT NULL COLLATE collation_name part = part("column_def6").columnDef(); assertEquals("column_name", part.column.name); assertNull(part.type); assertList(part.constraints, 2); } @Test public void testTypeName() throws ParseException { // -- name TypeName part = part("type_name1").typeName(); assertEquals("name", part.name); assertNull(part.xDimension); assertNull(part.yDimension); // -- name(1) part = part("type_name2").typeName(); assertEquals("name", part.name); assertNotNull(part.xDimension); assertNull(part.yDimension); // -- name(1,2) part = part("type_name3").typeName(); assertEquals("name", part.name); assertNotNull(part.xDimension); assertNotNull(part.yDimension); // -- name1 name2 name3 part = part("type_name4").typeName(); assertEquals("name1 name2 name3", part.name); assertNull(part.xDimension); assertNull(part.yDimension); // -- name1 name2 name3(1) part = part("type_name5").typeName(); assertEquals("name1 name2 name3", part.name); assertNotNull(part.xDimension); assertNull(part.yDimension); // -- name1 name2 name3(1,2) part = part("type_name6").typeName(); assertEquals("name1 name2 name3", part.name); assertNotNull(part.xDimension); assertNotNull(part.yDimension); } @Test public void testColumnConstraint() throws ParseException { // TODO: } @Test public void testSignedNumber() throws ParseException { // TODO: } @Test public void testTableConstraint() throws ParseException { // TODO: } @Test public void testForeignKeyClause() throws ParseException { // TODO: } @Test public void testConflictClause() throws ParseException { // TODO: } @Test public void testCreateTriggerStmt() throws ParseException { // -- CREATE TRIGGER IF NOT EXISTS trigger_name BEFORE DELETE // -- ON table_name FOR EACH ROW // -- BEGIN // -- UPDATE table_name SET column_name = 1; // -- END; CreateTriggerStmt stmt = stmt("create_trigger1").createTriggerStmt(); assertFalse(stmt.hasTemporary); assertTrue(stmt.hasIfNotExists); assertNull(stmt.database); assertEquals("trigger_name", stmt.name.name); assertEquals(CreateTriggerStmt.Time.BEFORE, stmt.time); assertEquals(CreateTriggerStmt.Event.DELETE, stmt.event); assertNull(stmt.columns); assertEquals("table_name", stmt.table.name); assertTrue(stmt.hasForEachRow); assertNull(stmt.when); assertList(stmt.stmts, 1); assertTrue(stmt.stmts.get(0) instanceof UpdateStmt); // -- CREATE TEMP TRIGGER database_name.trigger_name AFTER INSERT // -- ON table_name FOR EACH ROW WHEN column_name == 1 // -- BEGIN // -- INSERT INTO table_name DEFAULT VALUES; // -- END; stmt = stmt("create_trigger2").createTriggerStmt(); assertTrue(stmt.hasTemporary); assertFalse(stmt.hasIfNotExists); assertEquals("database_name", stmt.database.name); assertEquals("trigger_name", stmt.name.name); assertEquals(CreateTriggerStmt.Time.AFTER, stmt.time); assertEquals(CreateTriggerStmt.Event.INSERT, stmt.event); assertNull(stmt.columns); assertEquals("table_name", stmt.table.name); assertTrue(stmt.hasForEachRow); assertNotNull(stmt.when); assertList(stmt.stmts, 1); assertTrue(stmt.stmts.get(0) instanceof InsertStmt); // -- CREATE TEMPORARY TRIGGER trigger_name INSTEAD OF UPDATE // -- ON table_name WHEN column_name == 1 // -- BEGIN // -- DELETE FROM table_name; // -- END; stmt = stmt("create_trigger3").createTriggerStmt(); assertTrue(stmt.hasTemporary); assertFalse(stmt.hasIfNotExists); assertNull(stmt.database); assertEquals("trigger_name", stmt.name.name); assertEquals(CreateTriggerStmt.Time.INSTEAD_OF, stmt.time); assertEquals(CreateTriggerStmt.Event.UPDATE, stmt.event); assertNull(stmt.columns); assertEquals("table_name", stmt.table.name); assertFalse(stmt.hasForEachRow); assertNotNull(stmt.when); assertList(stmt.stmts, 1); assertTrue(stmt.stmts.get(0) instanceof DeleteStmt); // -- CREATE TEMPORARY TRIGGER trigger_name INSTEAD OF UPDATE // -- ON table_name WHEN column_name == 1 // -- BEGIN // -- INSERT INTO table_name DEFAULT VALUES; // -- DELETE FROM table_name; // -- END; stmt = stmt("create_trigger4").createTriggerStmt(); assertTrue(stmt.hasTemporary); assertFalse(stmt.hasIfNotExists); assertNull(stmt.database); assertEquals("trigger_name", stmt.name.name); assertEquals(CreateTriggerStmt.Time.INSTEAD_OF, stmt.time); assertEquals(CreateTriggerStmt.Event.UPDATE, stmt.event); assertNull(stmt.columns); assertEquals("table_name", stmt.table.name); assertFalse(stmt.hasForEachRow); assertNotNull(stmt.when); assertList(stmt.stmts, 2); assertTrue(stmt.stmts.get(0) instanceof InsertStmt); assertTrue(stmt.stmts.get(1) instanceof DeleteStmt); // -- CREATE TEMPORARY TRIGGER trigger_name INSTEAD OF UPDATE // -- ON table_name WHEN column_name == 1 // -- BEGIN // -- END; thrown.expect(ParseException.class); stmt("create_trigger5").createTriggerStmt(); // -- CREATE TRIGGER trigger_name UPDATE OF column_name1 // -- ON table_name // -- BEGIN // -- SELECT *; // -- END; stmt = stmt("create_trigger6").createTriggerStmt(); assertFalse(stmt.hasTemporary); assertFalse(stmt.hasIfNotExists); assertNull(stmt.database); assertEquals("trigger_name", stmt.name.name); assertNull(stmt.time); assertEquals(CreateTriggerStmt.Event.UPDATE, stmt.event); assertList(stmt.columns, 1); assertEquals("table_name", stmt.table.name); assertFalse(stmt.hasForEachRow); assertNull(stmt.when); assertList(stmt.stmts, 1); assertTrue(stmt.stmts.get(0) instanceof SelectStmt); // -- CREATE TRIGGER trigger_name UPDATE OF column_name1, column_name2 // -- ON table_name // -- BEGIN // -- SELECT *; // -- END; stmt = stmt("create_trigger7").createTriggerStmt(); assertFalse(stmt.hasTemporary); assertFalse(stmt.hasIfNotExists); assertNull(stmt.database); assertEquals("trigger_name", stmt.name.name); assertNull(stmt.time); assertEquals(CreateTriggerStmt.Event.UPDATE, stmt.event); assertList(stmt.columns, 2); assertEquals("table_name", stmt.table.name); assertFalse(stmt.hasForEachRow); assertNull(stmt.when); assertList(stmt.stmts, 1); assertTrue(stmt.stmts.get(0) instanceof SelectStmt); // -- CREATE TRIGGER trigger_name UPDATE OF // -- ON table_name // -- BEGIN // -- SELECT *; // -- END; thrown.expect(ParseException.class); stmt("create_trigger8").createTriggerStmt(); } @Test public void testCreateViewStmt() throws ParseException { // -- CREATE VIEW IF NOT EXISTS view_name AS SELECT * CreateViewStmt stmt = stmt("create_view1").createViewStmt(); assertFalse(stmt.hasTemporary); assertTrue(stmt.hasIfNotExists); assertNull(stmt.database); assertEquals("view_name", stmt.name.name); assertNotNull(stmt.select); // -- CREATE TEMP VIEW database_name.view_name AS SELECT * stmt = stmt("create_view2").createViewStmt(); assertTrue(stmt.hasTemporary); assertFalse(stmt.hasIfNotExists); assertEquals("database_name", stmt.database.name); assertEquals("view_name", stmt.name.name); assertNotNull(stmt.select); // -- CREATE TEMPORARY VIEW view_name AS SELECT * stmt = stmt("create_view3").createViewStmt(); assertTrue(stmt.hasTemporary); assertFalse(stmt.hasIfNotExists); assertNull(stmt.database); assertEquals("view_name", stmt.name.name); assertNotNull(stmt.select); // -- CREATE TEMPORARY VIEW view_name AS thrown.expect(ParseException.class); stmt("create_view4").createViewStmt(); } @Test public void testCreateVirtualTableStmt() throws ParseException { // TODO: } @Test public void testDeleteStmt() throws ParseException { // -- DELETE FROM table_name DeleteStmt stmt = stmt("delete1").deleteStmt(); assertNotNull(stmt.qualifiedTable); assertNull(stmt.where); assertNull(stmt.constraint); // -- DELETE FROM table_name WHERE column_name == 1 stmt = stmt("delete2").deleteStmt(); assertNotNull(stmt.qualifiedTable); assertNotNull(stmt.where); assertNull(stmt.constraint); // -- DELETE FROM table_name LIMIT 10 stmt = stmt("delete3").deleteStmt(); assertNotNull(stmt.qualifiedTable); assertNull(stmt.where); assertNotNull(stmt.constraint); // -- DELETE FROM table_name WHERE column_name == 1 LIMIT 10 stmt = stmt("delete4").deleteStmt(); assertNotNull(stmt.qualifiedTable); assertNotNull(stmt.where); assertNotNull(stmt.constraint); } @Test public void testEventConstraint() throws ParseException { // TODO: } @Test public void testDetachStmt() throws ParseException { // -- DETACH database_name DetachStmt stmt = stmt("detach1").detachStmt(); assertEquals("database_name", stmt.database.name); assertFalse(stmt.hasDatabase); // -- DETACH DATABASE database_name stmt = stmt("detach2").detachStmt(); assertEquals("database_name", stmt.database.name); assertTrue(stmt.hasDatabase); } @Test public void testDropIndexStmt() throws ParseException { // -- DROP INDEX index_name DropIndexStmt stmt = stmt("drop_index1").dropIndexStmt(); assertFalse(stmt.hasIfExists); assertNull(stmt.database); assertEquals("index_name", stmt.name.name); // -- DROP INDEX database_name.index_name stmt = stmt("drop_index2").dropIndexStmt(); assertFalse(stmt.hasIfExists); assertEquals("database_name", stmt.database.name); assertEquals("index_name", stmt.name.name); // -- DROP INDEX IF EXISTS database_name.index_name stmt = stmt("drop_index3").dropIndexStmt(); assertTrue(stmt.hasIfExists); assertEquals("database_name", stmt.database.name); assertEquals("index_name", stmt.name.name); } @Test public void testDropTableStmt() throws ParseException { // -- DROP TABLE table_name DropTableStmt stmt = stmt("drop_table1").dropTableStmt(); assertFalse(stmt.hasIfExists); assertNull(stmt.database); assertEquals("table_name", stmt.name.name); // -- DROP TABLE database_name.table_name stmt = stmt("drop_table2").dropTableStmt(); assertFalse(stmt.hasIfExists); assertEquals("database_name", stmt.database.name); assertEquals("table_name", stmt.name.name); // -- DROP TABLE IF EXISTS database_name.table_name stmt = stmt("drop_table3").dropTableStmt(); assertTrue(stmt.hasIfExists); assertEquals("database_name", stmt.database.name); assertEquals("table_name", stmt.name.name); } @Test public void testDropTriggerStmt() throws ParseException { // -- DROP TRIGGER trigger_name DropTriggerStmt stmt = stmt("drop_trigger1").dropTriggerStmt(); assertFalse(stmt.hasIfExists); assertNull(stmt.database); assertEquals("trigger_name", stmt.name.name); // -- DROP TRIGGER database_name.trigger_name stmt = stmt("drop_trigger2").dropTriggerStmt(); assertFalse(stmt.hasIfExists); assertEquals("database_name", stmt.database.name); assertEquals("trigger_name", stmt.name.name); // -- DROP TRIGGER IF EXISTS database_name.trigger_name stmt = stmt("drop_trigger3").dropTriggerStmt(); assertTrue(stmt.hasIfExists); assertEquals("database_name", stmt.database.name); assertEquals("trigger_name", stmt.name.name); } @Test public void testDropViewStmt() throws ParseException { // -- DROP VIEW view_name DropViewStmt stmt = stmt("drop_view1").dropViewStmt(); assertFalse(stmt.hasIfExists); assertNull(stmt.database); assertEquals("view_name", stmt.name.name); // -- DROP VIEW database_name.view_name stmt = stmt("drop_view2").dropViewStmt(); assertFalse(stmt.hasIfExists); assertEquals("database_name", stmt.database.name); assertEquals("view_name", stmt.name.name); // -- DROP VIEW IF EXISTS database_name.view_name stmt = stmt("drop_view3").dropViewStmt(); assertTrue(stmt.hasIfExists); assertEquals("database_name", stmt.database.name); assertEquals("view_name", stmt.name.name); } @Test public void testExpression() throws ParseException { // TODO: } @Test public void testLiteral() throws ParseException { // TODO: } @Test public void testInsertStmt_default() throws ParseException { // -- INSERT INTO database_name.table_name DEFAULT VALUES InsertStmt stmt = stmt("insert1").insertStmt(); assertTrue(stmt instanceof InsertStmtDefault); assertEquals(InsertStmt.Type.INSERT, stmt.type); assertEquals("database_name", stmt.database.name); assertEquals("table_name", stmt.table.name); // -- REPLACE INTO table_name DEFAULT VALUES stmt = stmt("insert2").insertStmt(); assertTrue(stmt instanceof InsertStmtDefault); assertEquals(InsertStmt.Type.REPLACE, stmt.type); assertNull(stmt.database); assertEquals("table_name", stmt.table.name); } @Test public void testInsertStmt_withValues() throws ParseException { // -- INSERT OR ROLLBACK INTO table_name(column_name1) VALUES(1) InsertStmt stmt = stmt("insert3").insertStmt(); assertTrue(stmt instanceof InsertStmtWithValues); assertEquals(InsertStmt.Type.INSERT_OR_ROLLBACK, stmt.type); assertNull(stmt.database); assertEquals("table_name", stmt.table.name); assertList(((InsertStmtWithValues) stmt).columns, 1); } @Test public void testPragmaStmt() throws ParseException { // TODO: } @Test public void testReindexStmt() throws ParseException { // TODO: } @Test public void testSelectStmt() throws ParseException { // TODO: } @Test public void testUpdateStmt() throws ParseException { // TODO: } @Test public void testQualifiedTableName() throws ParseException { // TODO: } @Test public void testVacuumStmt() throws ParseException { // TODO: } private TestHelper mHelper = new TestHelper(); private ASTParser parser(final String sql) { return new ASTParser(new StringReader(sql)); } private ASTParser stmt(final String part) { return parser(mHelper.getStmt(part)); } private ASTParser part(final String part) { return parser(mHelper.getPart(part)); } private void assertList(List<? extends Node> nodes, int size) { assertEquals(size, nodes.size()); for (Node node : nodes) { assertNotNull(node); } } private void assertMatrix(List<List<? extends Node>> nodes, int firstSize, int secondSize) { assertEquals(firstSize, nodes.size()); for (List<? extends Node> nodeList : nodes) { assertEquals(secondSize, nodeList.size()); for (Node node : nodeList) { assertNotNull(node); } } } }
SWayfarer/swl2
SWayfarer's Lib II/src/ru/swayfarer/swl2/asm/transfomer/fieldaccessors/FieldAccessorClassVisitor.java
<reponame>SWayfarer/swl2 package ru.swayfarer.swl2.asm.transfomer.fieldaccessors; import ru.swayfarer.swl2.asm.AsmUtils; import ru.swayfarer.swl2.asm.informated.ClassInfo; import ru.swayfarer.swl2.asm.informated.FieldInfo; import ru.swayfarer.swl2.asm.informated.MethodInfo; import ru.swayfarer.swl2.asm.informated.visitor.InformatedClassVisitor; import ru.swayfarer.swl2.collections.CollectionsSWL; import ru.swayfarer.swl2.collections.extended.IExtendedList; import ru.swayfarer.swl2.markers.InternalElement; import ru.swayfarer.swl2.z.dependencies.org.objectweb.asm.ClassVisitor; import ru.swayfarer.swl2.z.dependencies.org.objectweb.asm.MethodVisitor; /** * Посетитель классов для {@link FieldAccessorsClassTransformer} * @author swayfarer * */ public class FieldAccessorClassVisitor extends InformatedClassVisitor { /** Трансформер, вызвавший этот {@link FieldAccessorClassVisitor}*/ @InternalElement public FieldAccessorsClassTransformer classTransformer; /** Имена полей, которые необходимо обработать */ @InternalElement public IExtendedList<String> fields; public FieldAccessorClassVisitor(ClassVisitor classVisitor, ClassInfo classInfo, FieldAccessorsClassTransformer classTransformer) { super(classVisitor, classInfo); this.classTransformer = classTransformer; fields = classTransformer.registeredFields.get(classInfo.getInternalName()); } @Override public MethodVisitor visitMethodInformated(MethodInfo info, int access, String name, String descriptor, String signature, String[] exceptions) { MethodVisitor mv = super.visitMethodInformated(info, access, name, descriptor, signature, exceptions); return mv; } @Override public void visitEnd() { generateAccessors(); super.visitEnd(); } public void generateAccessors() { if (CollectionsSWL.isNullOrEmpty(fields)) return; classInfo.fields.dataStream() .filter((f) -> fields.contains(f.getName())) .each(this::generateAccessors); } public void generateAccessors(FieldInfo info) { generateSetter(info); generateSetter(info); } public void generateGetter(FieldInfo info) { String methodName = classTransformer.fieldsGetterNameFun.apply(info); String methodDesc = "()" + info.descriptor; if (canGenerate(methodName, methodDesc)) { MethodVisitor mv = visitMethod(ACC_PUBLIC, methodName, methodDesc, "", null); mv.visitCode(); mv.visitVarInsn(ALOAD, 0); AsmUtils.getField(mv, info); AsmUtils.invokeReturn(mv, info.getType()); mv.visitEnd(); } } public void generateSetter(FieldInfo info) { String methodName = classTransformer.fieldsGetterNameFun.apply(info); String methodDesc = "(" + info.descriptor + ")Ljava/lang/Object;"; if (canGenerate(methodName, methodDesc)) { MethodVisitor mv = visitMethod(ACC_PUBLIC, methodName, methodDesc, "", null); mv.visitCode(); mv.visitVarInsn(ALOAD, 0); AsmUtils.invokeLoad(mv, info.getType(), 1); AsmUtils.putField(mv, info); mv.visitVarInsn(ALOAD, 0); mv.visitInsn(ARETURN); mv.visitEnd(); } } public boolean canGenerate(String methodName, String desc) { return classInfo.getMethod(methodName, desc) == null; } }
AsyncVoid/Industrial-Foregoing
src/main/java/com/buuz135/industrial/proxy/client/render/ContributorsCatEarsRender.java
<gh_stars>0 package com.buuz135.industrial.proxy.client.render; import com.buuz135.industrial.proxy.client.ClientProxy; import net.minecraft.client.Minecraft; import net.minecraft.client.entity.AbstractClientPlayer; import net.minecraft.client.renderer.GlStateManager; import net.minecraft.client.renderer.RenderHelper; import net.minecraft.client.renderer.block.model.IBakedModel; import net.minecraft.client.renderer.entity.layers.LayerRenderer; import net.minecraft.client.renderer.texture.TextureMap; import net.minecraft.entity.player.EnumPlayerModelParts; import net.minecraft.init.Blocks; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; import org.lwjgl.opengl.GL11; import java.util.Arrays; import java.util.Calendar; public class ContributorsCatEarsRender implements LayerRenderer<AbstractClientPlayer> { public static Contributors contributors; @SideOnly(Side.CLIENT) @Override public void doRenderLayer(AbstractClientPlayer entitylivingbaseIn, float limbSwing, float limbSwingAmount, float partialTicks, float ageInTicks, float netHeadYaw, float headPitch, float scale) { if (contributors == null) return; if (!Arrays.asList(contributors.uuid).contains(entitylivingbaseIn.getUniqueID().toString())) return; if (!entitylivingbaseIn.isWearing(EnumPlayerModelParts.CAPE)) return; GlStateManager.pushMatrix(); GlStateManager.enableBlend(); GlStateManager.disableCull(); RenderHelper.disableStandardItemLighting(); if (Minecraft.isAmbientOcclusionEnabled()) { GlStateManager.shadeModel(GL11.GL_SMOOTH); } else { GlStateManager.shadeModel(GL11.GL_FLAT); } GlStateManager.translate(0, -0.015f, 0); if (!entitylivingbaseIn.inventory.armorInventory.get(3).isEmpty()) GlStateManager.translate(0, -0.02f, 0); if (entitylivingbaseIn.isSneaking()) GlStateManager.translate(0, 0.27, 0); GlStateManager.rotate(90, 0, 1, 0); GlStateManager.rotate(180, 1, 0, 0); GlStateManager.rotate(netHeadYaw, 0, -1, 0); GlStateManager.rotate(headPitch, 0, 0, -1); Minecraft.getMinecraft().getTextureManager().bindTexture(TextureMap.LOCATION_BLOCKS_TEXTURE); Calendar calendar = Calendar.getInstance(); if (calendar.get(Calendar.MONTH) == Calendar.OCTOBER) { spookyScarySkeletons(); } else if (calendar.get(Calendar.MONTH) == Calendar.DECEMBER) { itsSnowyHere(); } else { Minecraft.getMinecraft().getBlockRendererDispatcher().getBlockModelRenderer().renderModelBrightnessColor(ClientProxy.ears_baked, 0.5f, 255, 255, 255); } RenderHelper.enableStandardItemLighting(); GlStateManager.depthMask(true); GlStateManager.popMatrix(); } @SideOnly(Side.CLIENT) public void spookyScarySkeletons() { IBakedModel pumpkin = Minecraft.getMinecraft().getBlockRendererDispatcher().getModelForState(Minecraft.getMinecraft().world.getTotalWorldTime() % 200 < 100 ? Blocks.LIT_PUMPKIN.getDefaultState() : Blocks.PUMPKIN.getDefaultState()); GlStateManager.rotate(90, 0, -1, 0); GlStateManager.translate(0.08, 0.485, -0.1); GlStateManager.scale(2 / 16D, 3 / 16D, 2 / 16D); Minecraft.getMinecraft().getBlockRendererDispatcher().getBlockModelRenderer().renderModelBrightnessColor(pumpkin, 0.5f, 255, 255, 255); GlStateManager.translate(-0.08 * 28, 0, 0); Minecraft.getMinecraft().getBlockRendererDispatcher().getBlockModelRenderer().renderModelBrightnessColor(pumpkin, 0.5f, 255, 255, 255); } public void itsSnowyHere() { IBakedModel pumpkin = Minecraft.getMinecraft().getBlockRendererDispatcher().getModelForState(Blocks.TALLGRASS.getStateFromMeta(2)); GlStateManager.rotate(90, 0, -1, 0); GlStateManager.translate(0.08, 0.485, -0.1); GlStateManager.scale(2 / 16D, 2 / 16D, 2 / 16D); Minecraft.getMinecraft().getBlockRendererDispatcher().getBlockModelRenderer().renderModelBrightnessColor(pumpkin, 0.5f, 255, 255, 255); GlStateManager.translate(-0.08 * 28, 0, 0); Minecraft.getMinecraft().getBlockRendererDispatcher().getBlockModelRenderer().renderModelBrightnessColor(pumpkin, 0.5f, 255, 255, 255); } @Override public boolean shouldCombineTextures() { return false; } public static class Contributors { public String[] uuid = new String[0]; } }
isuthermography/spatialnde
demos/testregistration_projectionmismatch.py
<reponame>isuthermography/spatialnde import sys import copy import json from threading import Thread try: from cStringIO import StringIO # python 2.x pass except ImportError: from io import StringIO # python 3.x pass import numpy as np import dg_file as dgf import dg_metadata as dgm import dataguzzler as dg from spatialnde.ndeobj import ndepart from spatialnde.coordframes import coordframe,concrete_affine from spatialnde.imageprojectionmodel import ImageProjectionModel from spatialnde.dataset import SurfaceStructuredGridDataSet from spatialnde.imageprojection import imageprojection_prepare_float,imageprojection_float,imgbufzero,validitybufzero from spatialnde.dataguzzler.dg_3d import ndepartparams_from_landmarked3d,blank_uv_from_landmarked3d #ndepartparams = '[ [{"XUWKIV": [0, 0.470679, 0.778821], "JUSQOF": [0, 0.470432, 0.470844], "LUDRAZ": [0, 0.000597, 0.778506], "LITJID": [0, 0.470547, 0.530562], "KEXQET": [0, 0.159187, 0.618888], "FIPROJ": [0, 0.000135, 0.530768], "XEZHOD": [0, 0.469243, 0.717868], "SOFBEW": [0, 0.00087, 0.716984], "KILVOK": [0, 0.00053, 0.470313]}, {}], { "UV_ScalingParamBySurfaceNum": [[-0.00038232421875000001, -0.00038085937500000001], [0.78300000000000003, 0.78000000000000003]] } ]' #x3dname="CR03-SPAR-01H_uv.x3d" landmarked_3d_dgs = "CR03-SPAR-01H_landmarks.dgs" calibfile="sc6000_1009_calib.cic" # Dimensions of texture to create #texwidth=800 #texheight=800 #texwidth=1200 #texheight=1200 # File with an image of the specimen, and marked landmarks, to map onto the 3D object imgdgs_withlandmarks = "CR03-SPAR-01H_specimen_640x480_landmarks.dgs" # Generated with dataguzzler_define_object_landmarks.confm4 and dataguzzler_define_landmarks_load_image.py imgdgs_chan="Param" landmarked_3d_texchan = "CR03_SPAR_01H_tex" landmarked_3d_x3dchan = "CR03_SPAR_01H" (imgdgs_metadata,imgdgs_wfmdict)=dgf.loadsnapshot(imgdgs_withlandmarks) objframe=coordframe() cameraframe=coordframe() (landmarked3d_metadata,landmarked_3d_wfmdict) = dgf.loadsnapshot(landmarked_3d_dgs) # Create ndepartparams from landmarked_3d ndepartparams=ndepartparams_from_landmarked3d(landmarked_3d_wfmdict,[ landmarked_3d_texchan ] ) # If we load the x3d directly, it doesn't have a texture URL set, so # we can't match the texture URL to the scaling metadata from the snapshot # (UV_ScalingParamsByTexURL). The copy within the .dgs DOES have a texture # url set (from dataguzzler_define_landmarks_load_x3d) so # we load it instead #testobj = ndepart.fromx3d(objframe,ndepartparams,x3dname) x3dbuf = StringIO(dgm.GetMetaDatumWIStr(landmarked_3d_wfmdict[landmarked_3d_x3dchan],"X3DGeom","")) testobj = ndepart.fromx3d(objframe,ndepartparams,x3dbuf) projmodel = ImageProjectionModel.fromcalibfiledataguzzler(imgdgs_wfmdict[imgdgs_chan],calibfile,alphaoverride=1.0) # specify alphaoverride to indicate that undistortion has already been applied # Fudge focal length in projmodel focal_length=750.0 # pixels projmodel.new_camera_mtx=np.array(((focal_length,0.0,320.0), (0.0,focal_length,240.0), (0.0,0.0,1.0)),dtype='d') dataset = SurfaceStructuredGridDataSet.fromdataguzzlerpixelimage(imgdgs_wfmdict[imgdgs_chan],imgdgs_wfmdict) # now we should be able to call projmodel.evaluaterelativepose (projectedlandmarks,Pose) = projmodel.evaluaterelativepose(testobj,dataset) # Check if pose works PoseCopy=np.empty(Pose.shape,dtype='d') PoseCopy[:,:]=Pose # Fix 2nd and 3rd rows PoseCopy[1,:]=-Pose[1,:] PoseCopy[2,:]=-Pose[2,:] testlandmark="KEXQET" testcoordsxyz=testobj.implpart.surfaces[0].intrinsicparameterization.eval_xyz_uv(testobj.implpart.surfaces[0],*testobj.landmarks.landmarkdict2d[testlandmark][1:]) testcoords=np.inner(PoseCopy,np.r_[testcoordsxyz,1.0]) testcoords/=testcoords[2] # normalize by z coordinate testpixelcoords=np.inner(projmodel.new_camera_mtx,testcoords[:3]) print("testpixelcoords=%s" % (str(testpixelcoords))) print("datasetcoords=%s" % (str(dataset.landmarkpixelcoords[testlandmark]))) #raise ValueError("exit") # Pose, given object coordinates returns camera coordinates. # Define relationship between coord frames PoseXForm = concrete_affine.fromaugmented(objframe,cameraframe,Pose) # Assume single texture for all surfaces, overwriting existing texture wfm in loaded wfmdict paramwfm = blank_uv_from_landmarked3d(landmarked_3d_wfmdict,landmarked_3d_texchan) #imagebuf=np.zeros((texwidth,texheight),dtype='f') # paramwfm.data and validitybuf are where the image is being mapped to. validitybuf=np.zeros(paramwfm.data.shape,dtype='f',order="F") #angleofincidencebuf=np.zeros(paramwfm.data.shape,dtype='f',order="F") parameterizationdict=dict([ (id(surface),(None,paramwfm.data.T,validitybuf.T)) for surface in testobj.implpart.surfaces ]) # Imagedat is the incoming image to be mapped imagedat=dataset.data[:,:] #imagedat=dataset.data[::10,::10] imagedata=np.empty((1,imagedat.shape[0],imagedat.shape[1]),dtype='f') imagedata[0,:,:]=imagedat # Perform z-buffering projparams=imageprojection_prepare_float(projmodel,cameraframe,[ testobj ],parameterizationdict,imagedat,2.0,uv_weightingblur_distance=.01) # pixel blur assumed to be 2 pixels; uv weightingblur of 1cm validitybufzero(projparams) imgbufzero(projparams) surf_imgbuf=projparams.surfacelist[0][1][1] surf_validitybuf=projparams.surfacelist[0][1][2] surf_angleofincidencefactorbufuv=projparams.surfacelist[0][3] surf_weightingbuf=projparams.surfacelist[0][2] #imageprojection_float(projparams,imagedata) th=Thread(target=lambda: imageprojection_float(projparams,imagedata)) th.start() th.join() refres = paramwfm.data.copy() refval = validitybuf.copy() for cnt in range(15): validitybufzero(projparams) imgbufzero(projparams) th=Thread(target=lambda: imageprojection_float(projparams,imagedata)) th.start() th.join() #imageprojection_float(projparams,imagedata) if not(((refres == paramwfm.data) | (np.isnan(refres) & np.isnan(paramwfm.data))).all()) or (refval != validitybuf).any(): different = ~((refres == paramwfm.data) | (np.isnan(refres) & np.isnan(paramwfm.data))) differentvalidity = (refval != validitybuf) refresdiff=refres[different] paramwfmdiff=paramwfm.data[different] refvaldiff=refval[differentvalidity] validitybufdiff=validitybuf[differentvalidity] print(np.where(different)) print(np.where(differentvalidity)) if paramwfmdiff.shape[0] > 0: print(np.linalg.norm(refresdiff-paramwfmdiff)/np.linalg.norm(paramwfmdiff)) print(np.max(np.abs(refresdiff-paramwfmdiff))/np.linalg.norm(paramwfmdiff)) pass else: print(0.0) print(0.0) pass if refvaldiff.shape[0] > 0: print(np.linalg.norm(refvaldiff-validitybufdiff)/np.linalg.norm(refvaldiff)) print(np.max(np.abs(refvaldiff-validitybufdiff))/np.linalg.norm(refvaldiff)) pass else: print(0.0) print(0.0) pass raise ValueError("Mismatch!") pass #paramwfm.data /= validitybuf #pl.imshow(imagebuf) # dataset.landmarkpixelcoords["KEXQET"]= (369,286.5) in pixels from upper left on dataset # testobj.landmarks.landmarkdict2d["KEXQET"] = [0, 0.159187, 0.618888] # testobj.implpart.surfaces[0].intrinsicparameterization.eval_xyz_uv(testobj.implpart.surfaces[0],.159187,.618888) # -> array([ 0.00570404, 0.002 , 0.31110466]) # Convert to camera coordinates: np.inner(Pose,np.array([ 0.00570404, 0.002 , 0.31110466, 1.0])) # -> array([ 0.0151601 , -0.05353841, -6.14003631, 1. ]) # Invert Y and Z back to OpenCV camera coordinates # -> array([ 0.0151601 , 0.05353841, 6.14003631, 1. ]) # Normalize to x', y' -> array([ 0.00246906, 0.00871956, 1. ]) # Multiply by camera matrix # np.inner(projmodel.new_camera_mtx,np.array([ 0.00246906, 0.00871956, 1. ])) # -> KILVOK:(416,351) # -> KEXQET:(343,329) dgf.savesnapshot("/tmp/surfacemapped.dgs",landmarked_3d_wfmdict)
jessesleeping/my_peloton
src/postgres/backend/access/gist/gistxlog.cpp
<reponame>jessesleeping/my_peloton<gh_stars>1-10 /*------------------------------------------------------------------------- * * gistxlog.c * WAL replay logic for GiST. * * * Portions Copyright (c) 1996-2015, PostgreSQL Global Development Group * Portions Copyright (c) 1994, Regents of the University of California * * IDENTIFICATION * src/backend/access/gist/gistxlog.c *------------------------------------------------------------------------- */ #include "postgres.h" #include "access/gist_private.h" #include "access/xloginsert.h" #include "access/xlogutils.h" #include "utils/memutils.h" static MemoryContext opCtx; /* working memory for operations */ /* * Replay the clearing of F_FOLLOW_RIGHT flag on a child page. * * Even if the WAL record includes a full-page image, we have to update the * follow-right flag, because that change is not included in the full-page * image. To be sure that the intermediate state with the wrong flag value is * not visible to concurrent Hot Standby queries, this function handles * restoring the full-page image as well as updating the flag. (Note that * we never need to do anything else to the child page in the current WAL * action.) */ static void gistRedoClearFollowRight(XLogReaderState *record, uint8 block_id) { XLogRecPtr lsn = record->EndRecPtr; Buffer buffer; Page page; XLogRedoAction action; /* * Note that we still update the page even if it was restored from a full * page image, because the updated NSN is not included in the image. */ action = XLogReadBufferForRedo(record, block_id, &buffer); if (action == BLK_NEEDS_REDO || action == BLK_RESTORED) { page = BufferGetPage(buffer); GistPageSetNSN(page, lsn); GistClearFollowRight(page); PageSetLSN(page, lsn); MarkBufferDirty(buffer); } if (BufferIsValid(buffer)) UnlockReleaseBuffer(buffer); } /* * redo any page update (except page split) */ static void gistRedoPageUpdateRecord(XLogReaderState *record) { XLogRecPtr lsn = record->EndRecPtr; gistxlogPageUpdate *xldata = (gistxlogPageUpdate *) XLogRecGetData(record); Buffer buffer; Page page; if (XLogReadBufferForRedo(record, 0, &buffer) == BLK_NEEDS_REDO) { char *begin; char *data; Size datalen; int ninserted = 0; data = begin = XLogRecGetBlockData(record, 0, &datalen); page = (Page) BufferGetPage(buffer); /* Delete old tuples */ if (xldata->ntodelete > 0) { int i; OffsetNumber *todelete = (OffsetNumber *) data; data += sizeof(OffsetNumber) * xldata->ntodelete; for (i = 0; i < xldata->ntodelete; i++) PageIndexTupleDelete(page, todelete[i]); if (GistPageIsLeaf(page)) GistMarkTuplesDeleted(page); } /* add tuples */ if (data - begin < datalen) { OffsetNumber off = (PageIsEmpty(page)) ? FirstOffsetNumber : OffsetNumberNext(PageGetMaxOffsetNumber(page)); while (data - begin < datalen) { IndexTuple itup = (IndexTuple) data; Size sz = IndexTupleSize(itup); OffsetNumber l; data += sz; l = PageAddItem(page, (Item) itup, sz, off, false, false); if (l == InvalidOffsetNumber) elog(ERROR, "failed to add item to GiST index page, size %d bytes", (int) sz); off++; ninserted++; } } Assert(ninserted == xldata->ntoinsert); PageSetLSN(page, lsn); MarkBufferDirty(buffer); } /* * Fix follow-right data on left child page * * This must be done while still holding the lock on the target page. Note * that even if the target page no longer exists, we still attempt to * replay the change on the child page. */ if (XLogRecHasBlockRef(record, 1)) gistRedoClearFollowRight(record, 1); if (BufferIsValid(buffer)) UnlockReleaseBuffer(buffer); } /* * Returns an array of index pointers. */ static IndexTuple * decodePageSplitRecord(char *begin, int len, int *n) { char *ptr; int i = 0; IndexTuple *tuples; /* extract the number of tuples */ memcpy(n, begin, sizeof(int)); ptr = begin + sizeof(int); tuples = static_cast<IndexTuple *>(palloc(*n * sizeof(IndexTuple))); for (i = 0; i < *n; i++) { Assert(ptr - begin < len); tuples[i] = (IndexTuple) ptr; ptr += IndexTupleSize((IndexTuple) ptr); } Assert(ptr - begin == len); return tuples; } static void gistRedoPageSplitRecord(XLogReaderState *record) { XLogRecPtr lsn = record->EndRecPtr; gistxlogPageSplit *xldata = (gistxlogPageSplit *) XLogRecGetData(record); Buffer firstbuffer = InvalidBuffer; Buffer buffer; Page page; int i; bool isrootsplit = false; /* * We must hold lock on the first-listed page throughout the action, * including while updating the left child page (if any). We can unlock * remaining pages in the list as soon as they've been written, because * there is no path for concurrent queries to reach those pages without * first visiting the first-listed page. */ /* loop around all pages */ for (i = 0; i < xldata->npage; i++) { int flags; char *data; Size datalen; int num; BlockNumber blkno; IndexTuple *tuples; XLogRecGetBlockTag(record, i + 1, NULL, NULL, &blkno); if (blkno == GIST_ROOT_BLKNO) { Assert(i == 0); isrootsplit = true; } buffer = XLogInitBufferForRedo(record, i + 1); page = (Page) BufferGetPage(buffer); data = XLogRecGetBlockData(record, i + 1, &datalen); tuples = decodePageSplitRecord(data, datalen, &num); /* ok, clear buffer */ if (xldata->origleaf && blkno != GIST_ROOT_BLKNO) flags = F_LEAF; else flags = 0; GISTInitBuffer(buffer, flags); /* and fill it */ gistfillbuffer(page, tuples, num, FirstOffsetNumber); if (blkno == GIST_ROOT_BLKNO) { GistPageGetOpaque(page)->rightlink = InvalidBlockNumber; GistPageSetNSN(page, xldata->orignsn); GistClearFollowRight(page); } else { if (i < xldata->npage - 1) { BlockNumber nextblkno; XLogRecGetBlockTag(record, i + 2, NULL, NULL, &nextblkno); GistPageGetOpaque(page)->rightlink = nextblkno; } else GistPageGetOpaque(page)->rightlink = xldata->origrlink; GistPageSetNSN(page, xldata->orignsn); if (i < xldata->npage - 1 && !isrootsplit && xldata->markfollowright) GistMarkFollowRight(page); else GistClearFollowRight(page); } PageSetLSN(page, lsn); MarkBufferDirty(buffer); if (i == 0) firstbuffer = buffer; else UnlockReleaseBuffer(buffer); } /* Fix follow-right data on left child page, if any */ if (XLogRecHasBlockRef(record, 0)) gistRedoClearFollowRight(record, 0); /* Finally, release lock on the first page */ UnlockReleaseBuffer(firstbuffer); } static void gistRedoCreateIndex(XLogReaderState *record) { XLogRecPtr lsn = record->EndRecPtr; Buffer buffer; Page page; buffer = XLogInitBufferForRedo(record, 0); Assert(BufferGetBlockNumber(buffer) == GIST_ROOT_BLKNO); page = (Page) BufferGetPage(buffer); GISTInitBuffer(buffer, F_LEAF); PageSetLSN(page, lsn); MarkBufferDirty(buffer); UnlockReleaseBuffer(buffer); } void gist_redo(XLogReaderState *record) { uint8 info = XLogRecGetInfo(record) & ~XLR_INFO_MASK; MemoryContext oldCxt; /* * GiST indexes do not require any conflict processing. NB: If we ever * implement a similar optimization we have in b-tree, and remove killed * tuples outside VACUUM, we'll need to handle that here. */ oldCxt = MemoryContextSwitchTo(opCtx); switch (info) { case XLOG_GIST_PAGE_UPDATE: gistRedoPageUpdateRecord(record); break; case XLOG_GIST_PAGE_SPLIT: gistRedoPageSplitRecord(record); break; case XLOG_GIST_CREATE_INDEX: gistRedoCreateIndex(record); break; default: elog(PANIC, "gist_redo: unknown op code %u", info); } MemoryContextSwitchTo(oldCxt); MemoryContextReset(opCtx); } void gist_xlog_startup(void) { opCtx = createTempGistContext(); } void gist_xlog_cleanup(void) { MemoryContextDelete(opCtx); } /* * Write WAL record of a page split. */ XLogRecPtr gistXLogSplit(RelFileNode node, BlockNumber blkno, bool page_is_leaf, SplitedPageLayout *dist, BlockNumber origrlink, GistNSN orignsn, Buffer leftchildbuf, bool markfollowright) { gistxlogPageSplit xlrec; SplitedPageLayout *ptr; int npage = 0; XLogRecPtr recptr; int i; for (ptr = dist; ptr; ptr = ptr->next) npage++; xlrec.origrlink = origrlink; xlrec.orignsn = orignsn; xlrec.origleaf = page_is_leaf; xlrec.npage = (uint16) npage; xlrec.markfollowright = markfollowright; XLogBeginInsert(); /* * Include a full page image of the child buf. (only necessary if a * checkpoint happened since the child page was split) */ if (BufferIsValid(leftchildbuf)) XLogRegisterBuffer(0, leftchildbuf, REGBUF_STANDARD); /* * NOTE: We register a lot of data. The caller must've called * XLogEnsureRecordSpace() to prepare for that. We cannot do it here, * because we're already in a critical section. If you change the number * of buffer or data registrations here, make sure you modify the * XLogEnsureRecordSpace() calls accordingly! */ XLogRegisterData((char *) &xlrec, sizeof(gistxlogPageSplit)); i = 1; for (ptr = dist; ptr; ptr = ptr->next) { XLogRegisterBuffer(i, ptr->buffer, REGBUF_WILL_INIT); XLogRegisterBufData(i, (char *) &(ptr->block.num), sizeof(int)); XLogRegisterBufData(i, (char *) ptr->list, ptr->lenlist); i++; } recptr = XLogInsert(RM_GIST_ID, XLOG_GIST_PAGE_SPLIT); return recptr; } /* * Write XLOG record describing a page update. The update can include any * number of deletions and/or insertions of tuples on a single index page. * * If this update inserts a downlink for a split page, also record that * the F_FOLLOW_RIGHT flag on the child page is cleared and NSN set. * * Note that both the todelete array and the tuples are marked as belonging * to the target buffer; they need not be stored in XLOG if XLogInsert decides * to log the whole buffer contents instead. */ XLogRecPtr gistXLogUpdate(RelFileNode node, Buffer buffer, OffsetNumber *todelete, int ntodelete, IndexTuple *itup, int ituplen, Buffer leftchildbuf) { gistxlogPageUpdate xlrec; int i; XLogRecPtr recptr; xlrec.ntodelete = ntodelete; xlrec.ntoinsert = ituplen; XLogBeginInsert(); XLogRegisterData((char *) &xlrec, sizeof(gistxlogPageUpdate)); XLogRegisterBuffer(0, buffer, REGBUF_STANDARD); XLogRegisterBufData(0, (char *) todelete, sizeof(OffsetNumber) * ntodelete); /* new___ tuples */ for (i = 0; i < ituplen; i++) XLogRegisterBufData(0, (char *) (itup[i]), IndexTupleSize(itup[i])); /* * Include a full page image of the child buf. (only necessary if a * checkpoint happened since the child page was split) */ if (BufferIsValid(leftchildbuf)) XLogRegisterBuffer(1, leftchildbuf, REGBUF_STANDARD); recptr = XLogInsert(RM_GIST_ID, XLOG_GIST_PAGE_UPDATE); return recptr; }
bardurdam/EventStore.JVM
src/main/scala/eventstore/util/DefaultFormats.scala
package eventstore package util import akka.util.{ ByteIterator, ByteStringBuilder } import java.nio.ByteOrder.{ BIG_ENDIAN, LITTLE_ENDIAN } object DefaultFormats extends DefaultFormats trait DefaultFormats { implicit object UuidFormat extends BytesFormat[Uuid] { private val length = 16 // This is necessary because of an issue with Protobuf in .NET. // See also https://github.com/EventStore/EventStore.JVM/issues/78 def inverseBitMagic(mostSignificant: Long): Long = { val a: Long = (mostSignificant >> 16) & 0xFFFF val b: Long = (mostSignificant >> 48) & 0xFFFF val c: Long = (mostSignificant >> 32) & 0xFFFF val d: Long = mostSignificant & 0xFFFF (a << 48) | (d << 32) | (c << 16) | b } def bitMagic(mostSignificant: Long): Long = { val a: Long = mostSignificant & 0xFFFF val b: Long = (mostSignificant >> 16) & 0xFFFF val c: Long = (mostSignificant >> 48) & 0xFFFF val d: Long = (mostSignificant >> 32) & 0xFFFF (a << 48) | (b << 32) | (c << 16) | d } def write(x: Uuid, builder: ByteStringBuilder) = { val mostSignificant = bitMagic(x.getMostSignificantBits) val leastSignificant = x.getLeastSignificantBits builder.putLong(mostSignificant)(LITTLE_ENDIAN) builder.putLong(leastSignificant)(BIG_ENDIAN) } def read(bi: ByteIterator) = { val length = bi.len require(length >= this.length, s"cannot parse uuid, actual length: $length, expected: ${this.length}") val mostSignificant = inverseBitMagic(bi.getLong(LITTLE_ENDIAN)) val leastSignificant = bi.getLong(BIG_ENDIAN) new Uuid(mostSignificant, leastSignificant) } } }
JET31ZHANG/CppInOneHourADay
lesson4/exp_4_1.cpp
/* Declare an array of integers and accessing its elements */ #include <iostream> using namespace std; int main() { int myNumbers [5] = {34, 56, -21, 5002, 365}; cout << "First element at index 0: " << myNumbers [0] << endl; cout << "Second element at index 1: " << myNumbers [1] << endl; cout << "Third element at index 2: " << myNumbers [2] << endl; cout << "Fourth element at index 3: " << myNumbers [3] << endl; cout << "Fifth element at index 4: " << myNumbers [4] << endl; return 0; }
dyna-dot/WebGL
sdk/demos/intel/fluid/demo.js
<filename>sdk/demos/intel/fluid/demo.js /* ** Copyright (c) 2019 Intel Corporation ** ** Permission is hereby granted, free of charge, to any person obtaining a ** copy of this software and/or associated documentation files (the ** "Materials"), to deal in the Materials without restriction, including ** without limitation the rights to use, copy, modify, merge, publish, ** distribute, sublicense, and/or sell copies of the Materials, and to ** permit persons to whom the Materials are furnished to do so, subject to ** the following conditions: ** ** The above copyright notice and this permission notice shall be included ** in all copies or substantial portions of the Materials. ** ** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, ** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF ** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. ** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY ** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, ** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE ** MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. */ 'use strict'; const VS = 0; const FS = 1; const CS_DEN_SIM = 2; const CS_FOR_SIM = 3; const CS_INT = 4; const CS_DEN_SHA = 5; const CS_FOR_SHA = 6; const CS_BUILD_GRID = 7; const CS_BITONIC_SORT = 8; const CS_TRANSPOSE = 9; const CS_CLEAR_GRID_IND = 10; const CS_BUILD_GRID_IND = 11; const CS_REARRANGE = 12; const CS_DEN_GRID = 13; const CS_FOR_GRID = 14; const CS_INT_GRID = 15; const BITONIC_BLOCK_SIZE = 512; const TRANSPOSE_BLOCK_SIZE = 16; const SIMULATION_BLOCK_SIZE = 256; const NUM_GRID_INDICES = 65536; let gNumParticles = 8 * 1024; let gThreads = 256; let gSimMode = 'simple'; let gDispatchNum = null; let gCanvas = null; let gl = null; let gShaderStr = []; let gPrograms = []; let gParticleBuffers = []; let gUpdateParams = null; let gUpdateSortCB = []; let gFInitialParticleSpacing = 0.0045; let gMapHeight = 1.2; let gMapWidth = (4.0 / 3.0) * gMapHeight; let gSmoothlen = 0.012; let gPressureStiffness = 200.0; let gRestDensity = 1000.0; let gParticleMass = 0.0002; let gViscosity = 0.1; let gWallStiffness = 3000.0; let gGravityX = 0.0; let gGravityY = -0.5; let gViewProjection = new Matrix4x4(); let gPointSize = 0; let gFPSCounter = null; let gFpsElem = null; let gScheduledRAF = false; function loadShader(type, shaderSrc) { let shader = gl.createShader(type); // Load the shader source gl.shaderSource(shader, shaderSrc); // Compile the shader gl.compileShader(shader); // Check the compile status if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS) && !gl.isContextLost()) { let infoLog = gl.getShaderInfoLog(shader); alert('Error compiling shader:\n' + infoLog); gl.deleteShader(shader); return null; } return shader; } function getUrlString(name) { let reg = new RegExp('(^|&)'+ name +'=([^&]*)(&|$)'); let r = window.location.search.substr(1).match(reg); if (r != null) { return unescape(r[2]); } return null; } function createElement(element, attribute, inner) { if (typeof(element) === 'undefined') { return false; } if (typeof(inner) === 'undefined') { inner = ''; } let el = document.createElement(element); if (typeof(attribute) === 'object') { for (let key in attribute) { el.setAttribute(key, attribute[key]); } } if (!Array.isArray(inner)) { inner = [inner]; } for (let k = 0; k < inner.length; k++) { if (inner[k].tagName) { el.appendChild(inner[k]); } else { el.appendChild(document.createTextNode(inner[k])); } } return el; } function initController() { let fpsContainer = createElement('div', {'class': 'fpsContainer'}); document.body.appendChild(fpsContainer); let lableFps = createElement('label', {}, 'FPS: '); let sFps = createElement('span', {'id': 'fps'}); fpsContainer.appendChild(lableFps); fpsContainer.appendChild(sFps); gFpsElem = document.getElementById('fps'); let container = createElement('div', {'class': 'container'}); document.body.appendChild(container); let tButton = createElement( 'button', { 'class': 'button', 'onClick': 'resetParticles()', }, 'Reset Particles' ); let dButton = createElement('div', {}, tButton); let option8k = createElement('option', {'value': '8', 'selected': 'selected'}, '8K Particles'); let option16k = createElement('option', {'value': '16'}, '16K Particles'); let option32k = createElement('option', {'value': '32'}, '32K Particles'); let option64k = createElement('option', {'value': '64'}, '64K Particles'); let tSelectP = createElement( 'select', {'id': 'selP', 'onchange': 'restart(this.value)'}, [option8k, option16k, option32k, option64k] ); let dSelectP = createElement('div', {}, tSelectP); let optionDown = createElement('option', {'value': 'down'}, 'Gravity Down'); let optionUp = createElement('option', {'value': 'up'}, 'Gravity Up'); let optionLeft = createElement('option', {'value': 'left'}, 'Gravity Left'); let optionRight = createElement('option', {'value': 'right'}, 'Gravity Right'); let tSelectG = createElement( 'select', {'id': 'selG', 'onchange': 'changeGravity(this.value)'}, [optionDown, optionUp, optionLeft, optionRight] ); let dSelectG = createElement('div', {}, tSelectG); let radioSimple = createElement('input', {'type': 'radio', 'name': 'mode', 'id': 'radio1', 'value': 'simple', 'onchange': 'changeMode(this.value)', 'checked': 'checked'}); let lableSimple = createElement('label', {}, 'Simple N^2'); let dRadioSimple = createElement('div', {}, [radioSimple, lableSimple]); let radioShared = createElement('input', {'type': 'radio', 'name': 'mode', 'id': 'radio1', 'onchange': 'changeMode(this.value)', 'value': 'shared'}); let lableShared = createElement('label', {}, 'Shared Memory N^2'); let dRadioShared = createElement('div', {}, [radioShared, lableShared]); let radioGrid = createElement('input', {'type': 'radio', 'name': 'mode', 'id': 'radio1', 'onchange': 'changeMode(this.value)', 'value': 'grid'}); let lableGrid = createElement('label', {}, 'Grid + Sort'); let dRadioGrid = createElement('div', {}, [radioGrid, lableGrid]); container.appendChild(dButton); container.appendChild(dSelectP); container.appendChild(dSelectG); container.appendChild(dRadioSimple); container.appendChild(dRadioShared); container.appendChild(dRadioGrid); } function changeMode(o) { gSimMode = o; gFPSCounter.reset(); gFpsElem.innerHTML = 'calculating frames per second...'; } function changeGravity(g) { let data1 = new ArrayBuffer(8); let view1 = new Float32Array(data1); switch (g) { case 'up': gGravityX = view1[0] = 0.0; gGravityY = view1[1] = 0.5; break; case 'down': gGravityX = view1[0] = 0.0; gGravityY = view1[1] = -0.5; break; case 'left': gGravityX = view1[0] = -0.5; gGravityY = view1[1] = 0.0; break; case 'right': gGravityX = view1[0] = 0.5; gGravityY = view1[1] = 0.0; break; } gl.bindBuffer(gl.UNIFORM_BUFFER, gUpdateParams); gl.bufferSubData(gl.UNIFORM_BUFFER, 12*4, view1, 0); gl.bindBufferBase(gl.UNIFORM_BUFFER, 0, gUpdateParams); } function resetParticles() { destroyBuffers(); initBuffers(); changeMode(gSimMode); initRender(); } function restart(num) { gNumParticles = num * 1024; gDispatchNum = Math.ceil(gNumParticles / gThreads); resetParticles(); } function gpuSort() { let NUM_ELEMENTS = gNumParticles; let MATRIX_WIDTH = BITONIC_BLOCK_SIZE; let MATRIX_HEIGHT = NUM_ELEMENTS / BITONIC_BLOCK_SIZE; // Sort the data // First sort the rows for the levels <= to the block size gl.bindBuffer(gl.UNIFORM_BUFFER, gUpdateSortCB[0]); for ( let level = 2; level <= BITONIC_BLOCK_SIZE; level <<= 1 ) { let sortCB = new Int32Array(4); sortCB[0] = level; sortCB[1] = level; sortCB[2] = MATRIX_HEIGHT; sortCB[3] = MATRIX_WIDTH; gl.bufferSubData(gl.UNIFORM_BUFFER, 0, sortCB, 0); gl.bindBufferBase(gl.UNIFORM_BUFFER, 1, gUpdateSortCB[0]); // Sort the row data computePass(CS_BITONIC_SORT, gNumParticles / BITONIC_BLOCK_SIZE, 1, 1); } // Then sort the rows and columns for the levels > than the block size // Transpose. Sort the Columns. Transpose. Sort the Rows. for ( let level = (BITONIC_BLOCK_SIZE << 1); level <= NUM_ELEMENTS; level <<= 1 ) { let sortCB1 = new Int32Array(4); sortCB1[0] = level / BITONIC_BLOCK_SIZE; sortCB1[1] = (level & ~NUM_ELEMENTS) / BITONIC_BLOCK_SIZE; sortCB1[2] = MATRIX_WIDTH; sortCB1[3] = MATRIX_HEIGHT; gl.bindBuffer(gl.UNIFORM_BUFFER, gUpdateSortCB[1]); gl.bufferSubData(gl.UNIFORM_BUFFER, 0, sortCB1, 0); gl.bindBufferBase(gl.UNIFORM_BUFFER, 1, gUpdateSortCB[1]); // Transpose the data from buffer 1 into buffer 2 computePass(CS_TRANSPOSE, MATRIX_WIDTH / TRANSPOSE_BLOCK_SIZE, MATRIX_HEIGHT / TRANSPOSE_BLOCK_SIZE, 1); // Sort the transposed column data gl.bindBufferBase(gl.SHADER_STORAGE_BUFFER, 3, gParticleBuffers[4]); computePass(CS_BITONIC_SORT, gNumParticles / BITONIC_BLOCK_SIZE, 1, 1); let sortCB2 = new Int32Array(4); sortCB2[0] = BITONIC_BLOCK_SIZE; sortCB2[1] = level; sortCB2[2] = MATRIX_HEIGHT; sortCB2[3] = MATRIX_WIDTH; gl.bindBuffer(gl.UNIFORM_BUFFER, gUpdateSortCB[2]); gl.bufferSubData(gl.UNIFORM_BUFFER, 0, sortCB2, 0); // Transpose the data from buffer 2 back into buffer 1 gl.bindBufferBase(gl.UNIFORM_BUFFER, 1, gUpdateSortCB[2]); gl.bindBufferBase(gl.SHADER_STORAGE_BUFFER, 4, gParticleBuffers[3]); computePass(CS_TRANSPOSE, MATRIX_HEIGHT / TRANSPOSE_BLOCK_SIZE, MATRIX_WIDTH / TRANSPOSE_BLOCK_SIZE, 1); // Sort the row data gl.bindBufferBase(gl.SHADER_STORAGE_BUFFER, 3, gParticleBuffers[3]); gl.bindBufferBase(gl.SHADER_STORAGE_BUFFER, 4, gParticleBuffers[4]); computePass(CS_BITONIC_SORT, gNumParticles / BITONIC_BLOCK_SIZE, 1, 1); } } function initBuffers() { let iStartingWidth = Math.round(Math.sqrt(gNumParticles)); let particles = new Float32Array(4 * gNumParticles); for ( let i = 0, n = 0; i < 4 * gNumParticles;) { let x = n % iStartingWidth; let y = Math.round(n / iStartingWidth); particles[i] = gFInitialParticleSpacing * x; particles[i+1] = gFInitialParticleSpacing * y; i += 4; n++; } gParticleBuffers[0] = gl.createBuffer(); gl.bindBuffer(gl.SHADER_STORAGE_BUFFER, gParticleBuffers[0]); gl.bufferData(gl.SHADER_STORAGE_BUFFER, particles, gl.DYNAMIC_DRAW); let particleDensities = new Float32Array(4 * gNumParticles); gParticleBuffers[1] = gl.createBuffer(); gl.bindBuffer(gl.SHADER_STORAGE_BUFFER, gParticleBuffers[1]); gl.bufferData(gl.SHADER_STORAGE_BUFFER, particleDensities, gl.DYNAMIC_DRAW); let particleForces = new Float32Array(4 * gNumParticles); gParticleBuffers[2] = gl.createBuffer(); gl.bindBuffer(gl.SHADER_STORAGE_BUFFER, gParticleBuffers[2]); gl.bufferData(gl.SHADER_STORAGE_BUFFER, particleForces, gl.DYNAMIC_DRAW); let grids = new Int32Array(4 * gNumParticles); gParticleBuffers[3] = gl.createBuffer(); gl.bindBuffer(gl.SHADER_STORAGE_BUFFER, gParticleBuffers[3]); gl.bufferData(gl.SHADER_STORAGE_BUFFER, grids, gl.DYNAMIC_DRAW); let gridsTemp = new Int32Array(4 * gNumParticles); gParticleBuffers[4] = gl.createBuffer(); gl.bindBuffer(gl.SHADER_STORAGE_BUFFER, gParticleBuffers[4]); gl.bufferData(gl.SHADER_STORAGE_BUFFER, gridsTemp, gl.DYNAMIC_DRAW); let gridIndices = new Int32Array(4 * NUM_GRID_INDICES); gParticleBuffers[5] = gl.createBuffer(); gl.bindBuffer(gl.SHADER_STORAGE_BUFFER, gParticleBuffers[5]); gl.bufferData(gl.SHADER_STORAGE_BUFFER, gridIndices, gl.DYNAMIC_DRAW); gParticleBuffers[6] = gl.createBuffer(); gl.bindBuffer(gl.SHADER_STORAGE_BUFFER, gParticleBuffers[6]); gl.bufferData(gl.SHADER_STORAGE_BUFFER, particles, gl.DYNAMIC_DRAW); let SimCons = { 'iNumParticles': gNumParticles, 'fTimeStep': 0.005, 'fSmoothlen': gSmoothlen, 'fPressureStiffness': gPressureStiffness, 'fRestDensity': gRestDensity, 'fDensityCoef': gParticleMass * 315.0 / (64.0 * Math.PI * Math.pow(gSmoothlen, 9)), 'fGradPressureCoef': gParticleMass * -45.0 / (Math.PI * Math.pow(gSmoothlen, 6)), 'fLapViscosityCoef': gParticleMass * gViscosity * 45.0 / (Math.PI * Math.pow(gSmoothlen, 6)), 'fWallStiffness': gWallStiffness, 'vGravity': [gGravityX, gGravityY, 0.0, 0.0], 'vGridDim': [1.0 / gSmoothlen, 1.0 / gSmoothlen, 0.0, 0.0], 'vPlanes': [ [1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [-1.0, 0.0, gMapWidth, 0.0], [0.0, -1.0, gMapHeight, 0.0], ], }; let data = new ArrayBuffer(1 * 4 + 8 * 4 + 3 * 4 + 2 * 4 * 4 + 4 * 4 * 4); let view = new Int32Array(data, 0, 1); view[0] = SimCons.iNumParticles; view = new Float32Array(data, 4, 35); view[0] = SimCons.fTimeStep; view[1] = SimCons.fSmoothlen; view[2] = SimCons.fPressureStiffness; view[3] = SimCons.fRestDensity; view[4] = SimCons.fDensityCoef; view[5] = SimCons.fGradPressureCoef; view[6] = SimCons.fLapViscosityCoef; view[7] = SimCons.fWallStiffness; view[8] = 0.0; view[9] = 0.0; view[10] = 0.0; view[11] = SimCons.vGravity[0]; view[12] = SimCons.vGravity[1]; view[13] = SimCons.vGravity[2]; view[14] = SimCons.vGravity[3]; view[15] = SimCons.vGridDim[0]; view[16] = SimCons.vGridDim[1]; view[17] = SimCons.vGridDim[2]; view[18] = SimCons.vGridDim[3]; view[19] = SimCons.vPlanes[0][0]; view[20] = SimCons.vPlanes[0][1]; view[21] = SimCons.vPlanes[0][2]; view[22] = SimCons.vPlanes[0][3]; view[23] = SimCons.vPlanes[1][0]; view[24] = SimCons.vPlanes[1][1]; view[25] = SimCons.vPlanes[1][2]; view[26] = SimCons.vPlanes[1][3]; view[27] = SimCons.vPlanes[2][0]; view[28] = SimCons.vPlanes[2][1]; view[29] = SimCons.vPlanes[2][2]; view[30] = SimCons.vPlanes[2][3]; view[31] = SimCons.vPlanes[3][0]; view[32] = SimCons.vPlanes[3][1]; view[33] = SimCons.vPlanes[3][2]; view[34] = SimCons.vPlanes[3][3]; gUpdateParams = gl.createBuffer(); gl.bindBuffer(gl.UNIFORM_BUFFER, gUpdateParams); gl.bufferData(gl.UNIFORM_BUFFER, new Uint8Array(data), gl.STATIC_DRAW); gl.bindBufferBase(gl.UNIFORM_BUFFER, 0, gUpdateParams); let sortData = new ArrayBuffer(4 * 4); gUpdateSortCB[0] = gl.createBuffer(); gl.bindBuffer(gl.UNIFORM_BUFFER, gUpdateSortCB[0]); gl.bufferData(gl.UNIFORM_BUFFER, sortData, gl.STATIC_DRAW); gUpdateSortCB[1] = gl.createBuffer(); gl.bindBuffer(gl.UNIFORM_BUFFER, gUpdateSortCB[1]); gl.bufferData(gl.UNIFORM_BUFFER, sortData, gl.STATIC_DRAW); gUpdateSortCB[2] = gl.createBuffer(); gl.bindBuffer(gl.UNIFORM_BUFFER, gUpdateSortCB[2]); gl.bufferData(gl.UNIFORM_BUFFER, sortData, gl.STATIC_DRAW); let orthographic = function(left, right, bottom, top, near, far) { let o = new Matrix4x4(); o.elements = [ 2 / (right - left), 0, 0, 0, 0, 2 / (top - bottom), 0, 0, 0, 0, 1 / (near - far), 0, (left + right) / (left - right), (bottom + top) / (bottom - top), near / (near - far), 1, ]; return o; }; let mView = new Matrix4x4(); mView.elements = [ 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, -gMapWidth/2.0, -gMapHeight / 2.0, 0, 1, ]; let mProjection = orthographic(-gMapWidth/2.0, gMapWidth/2.0, -gMapHeight / 2.0, gMapHeight / 2.0, 0, 1); gViewProjection.loadIdentity(); gViewProjection.multiply(mView); gViewProjection.multiply(mProjection); gl.bindBufferBase(gl.SHADER_STORAGE_BUFFER, 0, gParticleBuffers[0]); gl.bindBufferBase(gl.SHADER_STORAGE_BUFFER, 1, gParticleBuffers[1]); gl.bindBufferBase(gl.SHADER_STORAGE_BUFFER, 2, gParticleBuffers[2]); gl.bindBufferBase(gl.SHADER_STORAGE_BUFFER, 3, gParticleBuffers[3]); gl.bindBufferBase(gl.SHADER_STORAGE_BUFFER, 4, gParticleBuffers[4]); gl.bindBufferBase(gl.SHADER_STORAGE_BUFFER, 5, gParticleBuffers[5]); gl.bindBufferBase(gl.SHADER_STORAGE_BUFFER, 6, gParticleBuffers[6]); } function initRender() { gCanvas.width = window.innerWidth; gCanvas.height = window.innerHeight; gl.viewport(0, 0, gCanvas.width, gCanvas.height); gl.clearColor(0.0, 0.0, 0.0, 1.0); gPointSize = 4.0 * gCanvas.width * gCanvas.height / (1024 * 1024); let vertexShader = loadShader(gl.VERTEX_SHADER, gShaderStr[VS]); let fragmentShader = loadShader(gl.FRAGMENT_SHADER, gShaderStr[FS]); gPrograms[VS] = gl.createProgram(); gl.attachShader(gPrograms[VS], vertexShader); gl.attachShader(gPrograms[VS], fragmentShader); // Bind a_particlePos to attribute 0 // Bind a_particleVel to attribute 1 gl.bindAttribLocation(gPrograms[VS], 0, 'a_particlePos'); gl.bindAttribLocation(gPrograms[VS], 1, 'a_particleDen'); // Load the vertex data gl.bindBuffer(gl.ARRAY_BUFFER, gParticleBuffers[0]); gl.enableVertexAttribArray(0); gl.vertexAttribPointer(0, 2, gl.FLOAT, gl.FALSE, 16, 0); gl.vertexAttribDivisor(0, 1); gl.bindBuffer(gl.ARRAY_BUFFER, gParticleBuffers[1]); gl.enableVertexAttribArray(1); gl.vertexAttribPointer(1, 1, gl.FLOAT, gl.FALSE, 16, 0); gl.vertexAttribDivisor(1, 1); // Link the program gl.linkProgram(gPrograms[VS]); gl.useProgram(gPrograms[VS]); // Set uniforms let mViewProjectionLoc = gl.getUniformLocation(gPrograms[VS], 'u_viewProjection'); gl.uniformMatrix4fv(mViewProjectionLoc, false, gViewProjection.elements); let fPointSizeLoc = gl.getUniformLocation(gPrograms[VS], 'u_fPointSize'); gl.uniform1f(fPointSizeLoc, gPointSize); } function initCS() { [CS_DEN_SIM, CS_DEN_SHA, CS_FOR_SIM, CS_FOR_SHA, CS_INT, CS_BUILD_GRID, CS_BITONIC_SORT, CS_TRANSPOSE, CS_CLEAR_GRID_IND, CS_BUILD_GRID_IND, CS_REARRANGE, CS_DEN_GRID, CS_FOR_GRID, CS_INT_GRID] .forEach((v) => { let cs = loadShader(gl.COMPUTE_SHADER, gShaderStr[v]); gPrograms[v] = gl.createProgram(); gl.attachShader(gPrograms[v], cs); gl.linkProgram(gPrograms[v]); }); } function destroyBuffers() { gl.deleteBuffer(gParticleBuffers[0]); gl.deleteBuffer(gParticleBuffers[1]); gl.deleteBuffer(gParticleBuffers[2]); gl.deleteBuffer(gParticleBuffers[3]); gl.deleteBuffer(gParticleBuffers[4]); gl.deleteBuffer(gParticleBuffers[5]); gl.deleteBuffer(gParticleBuffers[6]); gl.deleteBuffer(gUpdateParams); gl.deleteBuffer(gUpdateSortCB[0]); gl.deleteBuffer(gUpdateSortCB[1]); gl.deleteBuffer(gUpdateSortCB[2]); } function renderPass() { gl.clear(gl.COLOR_BUFFER_BIT); gl.useProgram(gPrograms[VS]); gl.drawArraysInstanced(gl.POINTS, 0, 1, gNumParticles); } function init() { let threads = getUrlString('t'); if (threads) { gThreads = threads; } gDispatchNum = Math.ceil(gNumParticles / gThreads); for (let i in gShaderStr) { gShaderStr[i] = gShaderStr[i].replace(/NUM_THREADS/g, gThreads); }; initController(); initBuffers(); initCS(); initRender(); gFPSCounter = new FPSCounter(gFpsElem, 16); // make canvas auto fit window window.addEventListener('resize', function() { onResize(); }, false); function onResize() { gCanvas.width = window.innerWidth * window.devicePixelRatio; gCanvas.height = window.innerHeight * window.devicePixelRatio; gl.viewport(0, 0, gCanvas.width, gCanvas.height); // Reset point size gPointSize = 4.0 * gCanvas.width * gCanvas.height / (1024 * 1024); let fPointSizeLoc = gl.getUniformLocation(gPrograms[VS], 'u_fPointSize'); gl.uniform1f(fPointSizeLoc, gPointSize); } } function computePass(shader, x, y, z) { gl.useProgram(gPrograms[shader]); gl.dispatchCompute(x, y, z); gl.memoryBarrier(gl.SHADER_STORAGE_BARRIER_BIT); } function simulateFluidSimple() { // Density computePass(CS_DEN_SIM, gDispatchNum, 1, 1); // Force computePass(CS_FOR_SIM, gDispatchNum, 1, 1); // Integrate computePass(CS_INT, gDispatchNum, 1, 1); } function simulateFluidShared() { // Density computePass(CS_DEN_SHA, gDispatchNum, 1, 1); // Force computePass(CS_FOR_SHA, gDispatchNum, 1, 1); // Integrate computePass(CS_INT, gDispatchNum, 1, 1); } function simulateFluidGrid() { // Build Grid computePass(CS_BUILD_GRID, gNumParticles / gThreads, 1, 1); // Sort Grid gpuSort(); // Clear and build Grid Indices computePass(CS_CLEAR_GRID_IND, NUM_GRID_INDICES / gThreads, 1, 1); computePass(CS_BUILD_GRID_IND, gNumParticles / gThreads, 1, 1); // Rearrange computePass(CS_REARRANGE, gNumParticles / gThreads, 1, 1); // Density computePass(CS_DEN_GRID, gNumParticles / gThreads, 1, 1); // Force computePass(CS_FOR_GRID, gNumParticles / gThreads, 1, 1); // Integrate computePass(CS_INT_GRID, gNumParticles / gThreads, 1, 1); } function frame() { if (gScheduledRAF) { return; } gScheduledRAF = true; gFPSCounter.update(); switch (gSimMode) { case 'simple': simulateFluidSimple(); break; case 'shared': simulateFluidShared(); break; case 'grid': simulateFluidGrid(); } renderPass(); requestAnimationFrame(() => { gScheduledRAF = false; frame(); }); } function start() { gCanvas = document.getElementById('example'); gl = WebGLUtils.setupWebGL2Compute(gCanvas); if (!gl) { return; } init(); requestAnimationFrame(frame); } function loadShaderFromFile(filename, index, onLoadShader) { let request = new XMLHttpRequest(); request.onreadystatechange = function () { if (request.readyState === 4 && request.status === 200) { onLoadShader(index, request.responseText); } }; request.open('GET', './shaders/' + filename, true); request.send(); } (function () { let shaders = [ 'shader.vert', 'shader.frag', 'shaderDensitySimple.comp', 'shaderForceSimple.comp', 'shaderIntegrate.comp', 'shaderDensityShared.comp', 'shaderForceShared.comp', 'shaderBuildGrid.comp', 'shaderBitonicSort.comp', 'shaderTranspose.comp', 'shaderClearGridIndices.comp', 'shaderBuildGridIndices.comp', 'shaderRearrange.comp', 'shaderDensityGrid.comp', 'shaderForceGrid.comp', 'shaderIntegrateGrid.comp', ]; let counter = 0; for (let i = 0; i < shaders.length; i++) { loadShaderFromFile(shaders[i], i, (index, str) => { gShaderStr[index] = str; if (++counter === shaders.length) start(); }); } })();
paolomococci/business-automation-workshop
Quarkus/sample-planning/src/main/java/local/mocaccino/planning/SamplePlanning.java
<gh_stars>1-10 package local.mocaccino.planning; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; @Path("/sample") public class SamplePlanning { @GET @Produces(MediaType.TEXT_PLAIN) public String sample() { return "Hello from SamplePlanning"; } }
Keneral/aframeworksbase1
tools/aapt2/util/BigBuffer.cpp
<filename>tools/aapt2/util/BigBuffer.cpp<gh_stars>0 /* * Copyright (C) 2015 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "util/BigBuffer.h" #include <algorithm> #include <memory> #include <vector> namespace aapt { void* BigBuffer::nextBlockImpl(size_t size) { if (!mBlocks.empty()) { Block& block = mBlocks.back(); if (block.mBlockSize - block.size >= size) { void* outBuffer = block.buffer.get() + block.size; block.size += size; mSize += size; return outBuffer; } } const size_t actualSize = std::max(mBlockSize, size); Block block = {}; // Zero-allocate the block's buffer. block.buffer = std::unique_ptr<uint8_t[]>(new uint8_t[actualSize]()); assert(block.buffer); block.size = size; block.mBlockSize = actualSize; mBlocks.push_back(std::move(block)); mSize += size; return mBlocks.back().buffer.get(); } } // namespace aapt
atlassubbed/atlas-relax
test/effects/Tracker.js
// Tracker is used to log mutation events in order. // * many edit permuations may lead to a correct outcome // use this effect when the order matters // * when testing final trees, use Renderer instead module.exports = class Tracker { constructor(events){ this.events = events; } log(type, f, t=f.temp){ const e = {[type]: t && t.data.id}; this.events.push(e); } temp(f, t){this.log("mWR", f, t)} move(f){this.log("mWM", f)} add(f, p, s, t){this.log("mWA", f, t)} remove(f, p, s, t){this.log("mWP", f, t)} }
igor-sfdc/aura
aura-components/src/main/components/ui/outputSelect/outputSelectTest.js
/* * Copyright (C) 2013 salesforce.com, inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ ({ /** * Verify setting value attribute to '' blank. */ testEmptyValue:{ attributes : {value: ''}, test: function(component){ aura.test.assertEquals('', $A.test.getText(component.find('span').getElement()), "When value is initialized to an empty string, nothing should be shown."); } }, /** * positive test case: Verify outputSelect basic functionality. */ testValue: { attributes : {value : 'holy guacamole!'}, test: function(component){ aura.test.assertEquals('holy guacamole!', $A.test.getText(component.find('span').getElement()), "Select text not correct"); } } })
chibaye/epass
src/ui/icons/caret.js
import React from 'react' const style = { fill: 'none', stroke: '#000', strokeWidth: 32, strokeMiterlimit: 10 } const data = { down: { viewBox: '0 0 512 512', paths: [ {d: 'M342.43,238.23,268.3,327.32a16,16,0,0,1-24.6,0l-74.13-89.09A16,16,0,0,1,181.86,212H330.14A16,16,0,0,1,342.43,238.23Z'}, {d: 'M448,256c0-106-86-192-192-192S64,150,64,256s86,192,192,192S448,362,448,256Z', style} ] }, up: { viewBox: '0 0 512 512', paths: [ {d: 'M342.43,273.77,268.3,184.68a16,16,0,0,0-24.6,0l-74.13,89.09A16,16,0,0,0,181.86,300H330.14A16,16,0,0,0,342.43,273.77Z'}, {d: 'M448,256c0-106-86-192-192-192S64,150,64,256s86,192,192,192S448,362,448,256Z', style} ] } } const CaretIcon = ({name, size=28, ...rest}) => <svg xmlns='http://www.w3.org/2000/svg' viewBox={data[name].viewBox} width={size} height={size} {...rest}> {data[name].paths.map(({d, style}, i) => <path key={i} d={d} style={{...style, stroke: rest.fill}}/>)} </svg> export default CaretIcon
testingisdocumenting/webtau
webtau-cli/src/main/java/org/testingisdocumenting/webtau/cli/CliForegroundCommand.java
<reponame>testingisdocumenting/webtau /* * Copyright 2020 webtau maintainers * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.testingisdocumenting.webtau.cli; import org.testingisdocumenting.webtau.cli.expectation.CliValidationExitCodeOutputHandler; import org.testingisdocumenting.webtau.cli.expectation.CliValidationOutputOnlyHandler; import org.testingisdocumenting.webtau.expectation.ActualPath; import org.testingisdocumenting.webtau.expectation.ExpectationHandler; import org.testingisdocumenting.webtau.expectation.ExpectationHandlers; import org.testingisdocumenting.webtau.expectation.ValueMatcher; import org.testingisdocumenting.webtau.reporter.StepReportOptions; import org.testingisdocumenting.webtau.reporter.WebTauStep; import java.util.function.Consumer; import static org.testingisdocumenting.webtau.Matchers.equal; import static org.testingisdocumenting.webtau.reporter.IntegrationTestsMessageBuilder.action; import static org.testingisdocumenting.webtau.reporter.IntegrationTestsMessageBuilder.stringValue; import static org.testingisdocumenting.webtau.reporter.TokenizedMessage.tokenizedMessage; public class CliForegroundCommand { CliForegroundCommand() { } public CliRunResult run(String command, CliProcessConfig config, CliValidationOutputOnlyHandler handler) { return cliStep(command, config, (validationResult) -> handler.handle( validationResult.getOut(), validationResult.getErr())); } public CliRunResult run(String command, CliProcessConfig config, CliValidationExitCodeOutputHandler handler) { return cliStep(command, config, (validationResult) -> handler.handle( validationResult.getExitCode(), validationResult.getOut(), validationResult.getErr())); } private CliRunResult cliStep(String command, CliProcessConfig config, Consumer<CliValidationResult> validationCode) { CliValidationResult validationResult = new CliValidationResult(command); validationResult.setConfig(config); WebTauStep step = WebTauStep.createStep( tokenizedMessage(action("running cli command "), stringValue(command)), () -> tokenizedMessage(action("ran cli command"), stringValue(command)), () -> runAndValidate(validationResult, command, config, validationCode)); try { step.setInput(config.createStepInput()); step.setOutputSupplier(() -> validationResult); step.execute(StepReportOptions.REPORT_ALL); return new CliRunResult(command, validationResult.getExitCode().get(), validationResult.getOut().get(), validationResult.getErr().get()); } finally { Cli.cli.setLastDocumentationArtifact(validationResult.createDocumentationArtifact()); } } private void runAndValidate(CliValidationResult validationResult, String command, CliProcessConfig config, Consumer<CliValidationResult> validationCode) { try { long startTime = System.currentTimeMillis(); ProcessRunResult runResult = ProcessUtils.run(command, config); long endTime = System.currentTimeMillis(); if (!runResult.isTimeOut()) { validationResult.setExitCode(exitCode(runResult.getExitCode())); } validationResult.setOut(runResult.getOutput()); validationResult.setErr(runResult.getError()); validationResult.setStartTime(startTime); validationResult.setElapsedTime(endTime - startTime); if (runResult.isTimeOut()) { throw new RuntimeException("process timed-out"); } if (runResult.getErrorReadingException() != null) { throw runResult.getErrorReadingException(); } if (runResult.getOutputReadingException() != null) { throw runResult.getOutputReadingException(); } ExpectationHandler recordAndThrowHandler = new ExpectationHandler() { @Override public Flow onValueMismatch(ValueMatcher valueMatcher, ActualPath actualPath, Object actualValue, String message) { validationResult.addMismatch(message); return ExpectationHandler.Flow.PassToNext; } }; ExpectationHandlers.withAdditionalHandler(recordAndThrowHandler, () -> { validationCode.accept(validationResult); validateExitCode(validationResult); return null; }); } catch (AssertionError e) { throw e; } catch (Throwable e) { validationResult.setErrorMessage(e.getMessage()); throw new CliException(e.getMessage(), e); } } private static void validateExitCode(CliValidationResult validationResult) { if (validationResult.getExitCode().isChecked()) { return; } validationResult.getExitCode().should(equal(0)); } private CliExitCode exitCode(int exitCode) { return new CliExitCode(exitCode); } }
McLeodMoores/starling
examples/examples-simulated/src/main/java/com/mcleodmoores/examples/simulated/loader/legalentity/ExamplesLegalEntityLoader.java
<reponame>McLeodMoores/starling<gh_stars>1-10 /** * Copyright (C) 2017 - present McLeod Moores Software Limited. All rights reserved. */ package com.mcleodmoores.examples.simulated.loader.legalentity; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.opengamma.component.tool.AbstractTool; import com.opengamma.core.legalentity.Rating; import com.opengamma.core.obligor.CreditRating; import com.opengamma.financial.tool.ToolContext; import com.opengamma.id.ExternalIdBundle; import com.opengamma.master.legalentity.LegalEntityDocument; import com.opengamma.master.legalentity.LegalEntityMaster; import com.opengamma.master.legalentity.LegalEntitySearchRequest; import com.opengamma.master.legalentity.LegalEntitySearchResult; import com.opengamma.master.legalentity.ManageableLegalEntity; import com.opengamma.masterdb.legalentity.DbLegalEntityBeanMaster; import com.opengamma.scripts.Scriptable; import com.opengamma.util.ArgumentChecker; /** * Simple implementation of a legal entity loader. */ @Scriptable public class ExamplesLegalEntityLoader extends AbstractTool<ToolContext> { /** The logger */ private static final Logger LOGGER = LoggerFactory.getLogger(ExamplesLegalEntityLoader.class); /** * Main method to run this loader. * * @param args * The program arguments */ public static void main(final String[] args) { new ExamplesLegalEntityLoader().invokeAndTerminate(args); } @Override protected void doRun() { addGovernments(); } private void addGovernments() { final ManageableLegalEntity usGovernment = new ManageableLegalEntity("US Government", ExternalIdBundle.of(DbLegalEntityBeanMaster.IDENTIFIER_SCHEME_DEFAULT, "USGVT")); usGovernment.setRatings(Arrays.asList(new Rating("Fitch", CreditRating.AAA, null))); storeLegalEntity(getToolContext().getLegalEntityMaster(), usGovernment); } /** * Stores a legal entity in the legal entity database. If the entity is already present, updates it. Otherwise, adds a new entry. * * @param master * the master in which to store the legal entity * @param entity * The legal entity */ public static void storeLegalEntity(final LegalEntityMaster master, final ManageableLegalEntity entity) { ArgumentChecker.notNull(master, "master"); final LegalEntitySearchRequest request = new LegalEntitySearchRequest(); request.setName(entity.getName()); final LegalEntitySearchResult result = master.search(request); if (result.getFirstDocument() != null) { LOGGER.info("Updating {}", entity.getName()); final LegalEntityDocument document = result.getFirstDocument(); document.setLegalEntity(entity); master.update(document); } else { LOGGER.info("Adding {}", entity.getName()); master.add(new LegalEntityDocument(entity)); } } }
34Audiovisual/Progetti_JUCE
TLDisplay/Source/PluginProcessor.cpp
<reponame>34Audiovisual/Progetti_JUCE /* ============================================================================== This file contains the basic framework code for a JUCE plugin processor. ============================================================================== */ #include "PluginProcessor.h" #include "PluginEditor.h" //============================================================================== ControlerMidiAudioProcessor::ControlerMidiAudioProcessor() #ifndef JucePlugin_PreferredChannelConfigurations : AudioProcessor (BusesProperties() #if ! JucePlugin_IsMidiEffect #if ! JucePlugin_IsSynth .withInput ("Input", juce::AudioChannelSet::stereo(), true) #endif .withOutput ("Output", juce::AudioChannelSet::stereo(), true) #endif ) #endif { valuesTreeMidi.setProperty("displayValue", 0, nullptr); valuesTreeMidi.setProperty("positionValue", 0, nullptr); valuesTreeMidi.setProperty("riga1Value", "", nullptr); valuesTreeMidi.setProperty("riga2Value", "", nullptr); valuesTreeMidi.setProperty("sentValue", 0, nullptr); auto midiDevice = juce::MidiOutput::getAvailableDevices(); for ( auto m : midiDevice) { //DEBUG juce::String dev = "Device name: " + m.name + " Device ID: " + m.identifier; DBG(dev); //DEBUG // if (m.name == "TLLINK") //if (m.name == "MIDI Monitor (Untitled)") if (m.name == "TLLINK") midiDev = juce::MidiOutput::openDevice(m.identifier); } startTimer(30); } ControlerMidiAudioProcessor::~ControlerMidiAudioProcessor() { stopTimer(); } //============================================================================== const juce::String ControlerMidiAudioProcessor::getName() const { return JucePlugin_Name; } bool ControlerMidiAudioProcessor::acceptsMidi() const { return false; } bool ControlerMidiAudioProcessor::producesMidi() const { return true; } bool ControlerMidiAudioProcessor::isMidiEffect() const { #if JucePlugin_IsMidiEffect return true; #else return false; #endif } double ControlerMidiAudioProcessor::getTailLengthSeconds() const { return 0.0; } int ControlerMidiAudioProcessor::getNumPrograms() { return 1; // NB: some hosts don't cope very well if you tell them there are 0 programs, // so this should be at least 1, even if you're not really implementing programs. } int ControlerMidiAudioProcessor::getCurrentProgram() { return 0; } void ControlerMidiAudioProcessor::setCurrentProgram (int index) { } const juce::String ControlerMidiAudioProcessor::getProgramName (int index) { return {}; } void ControlerMidiAudioProcessor::changeProgramName (int index, const juce::String& newName) { } //============================================================================== void ControlerMidiAudioProcessor::prepareToPlay (double sampleRate, int samplesPerBlock) { } void ControlerMidiAudioProcessor::releaseResources() { } #ifndef JucePlugin_PreferredChannelConfigurations bool ControlerMidiAudioProcessor::isBusesLayoutSupported (const BusesLayout& layouts) const { #if JucePlugin_IsMidiEffect juce::ignoreUnused (layouts); return true; #else if (layouts.getMainOutputChannelSet() != juce::AudioChannelSet::mono() && layouts.getMainOutputChannelSet() != juce::AudioChannelSet::stereo()) return false; #if ! JucePlugin_IsSynth if (layouts.getMainOutputChannelSet() != layouts.getMainInputChannelSet()) return false; #endif return true; #endif } #endif void ControlerMidiAudioProcessor::processBlock (juce::AudioBuffer<float>& buffer, juce::MidiBuffer& midiMessages) { } //============================================================================== bool ControlerMidiAudioProcessor::hasEditor() const { return true; // (change this to false if you choose to not supply an editor) } juce::AudioProcessorEditor* ControlerMidiAudioProcessor::createEditor() { return new ControlerMidiAudioProcessorEditor (*this); } void ControlerMidiAudioProcessor::getStateInformation (juce::MemoryBlock& destData) { juce::MemoryOutputStream mos(destData, false); DBG( "WRITE : " << valuesTreeMidi.toXmlString() ); valuesTreeMidi.writeToStream(mos); } void ControlerMidiAudioProcessor::setStateInformation (const void* data, int sizeInBytes) { auto tree = juce::ValueTree::readFromData(data, sizeInBytes); DBG( "READ : " << tree.toXmlString() ); if( tree.isValid() ) { valuesTreeMidi = tree; } //retrig(); int d = (int)((valuesTreeMidi.getPropertyAsValue("displayValue", nullptr, true)).getValue()); int p = (int)((valuesTreeMidi.getPropertyAsValue("positionValue", nullptr, true)).getValue()); // int stepTimeMs = 200; //auto delayTimeMs = stepTimeMs * (d + p); auto delayTimeMs = 4000 + (d * 1000 - 1000 + p * 100); std::function<void()> trig = [this] {retrig();}; juce::Timer::callAfterDelay(delayTimeMs, trig); } //============================================================================== void ControlerMidiAudioProcessor::retrig () { auto started = valuesTreeMidi.getPropertyAsValue("sentValue", nullptr, true); if ((bool)(started.getValue())) { int display = (int)((valuesTreeMidi.getPropertyAsValue("displayValue", nullptr, true)).getValue()); int position = (int)((valuesTreeMidi.getPropertyAsValue("positionValue", nullptr, true)).getValue()); juce::String riga1 = (valuesTreeMidi.getPropertyAsValue("riga1Value", nullptr, true)).toString(); juce::String riga2 = (valuesTreeMidi.getPropertyAsValue("riga2Value", nullptr, true)).toString(); auto array1 = makeNoteArray(riga1); auto array2 = makeNoteArray(riga2); addEvent(display, position - 1, array1, array2); }; } //============================================================================== // This creates new instances of the plugin.. juce::AudioProcessor* JUCE_CALLTYPE createPluginFilter() { return new ControlerMidiAudioProcessor(); }
yongliu-mdsol/openapi3-generator
gems/gems/ttfunk-1.5.1/lib/ttfunk/table/cmap/format00.rb
<reponame>yongliu-mdsol/openapi3-generator require_relative '../../encoding/mac_roman' require_relative '../../encoding/windows_1252' module TTFunk class Table class Cmap module Format00 attr_reader :language attr_reader :code_map # Expects a hash mapping character codes to glyph ids (where the # glyph ids are from the original font). Returns a hash including # a new map (:charmap) that maps the characters in charmap to a # another hash containing both the old (:old) and new (:new) glyph # ids. The returned hash also includes a :subtable key, which contains # the encoded subtable for the given charmap. def self.encode(charmap) next_id = 0 glyph_indexes = Array.new(256, 0) glyph_map = { 0 => 0 } new_map = charmap.keys.sort.each_with_object({}) do |code, map| glyph_map[charmap[code]] ||= next_id += 1 map[code] = { old: charmap[code], new: glyph_map[charmap[code]] } glyph_indexes[code] = glyph_map[charmap[code]] map end # format, length, language, indices subtable = [0, 262, 0, *glyph_indexes].pack('nnnC*') { charmap: new_map, subtable: subtable, max_glyph_id: next_id + 1 } end def [](code) @code_map[code] || 0 end def supported? true end private def parse_cmap! @language = read(4, 'x2n') @code_map = read(256, 'C*') end end end end end
DougBurke/xspeclmodels
xspec/XSUtil/Utils/ProcessManager.h
//C++ #ifndef PROCESSMANAGER_H #define PROCESSMANAGER_H 1 #include <xsTypes.h> #include <map> #include <vector> #include <stdio.h> #include <unistd.h> #include <string> struct TransferStruct { TransferStruct(): iValues(), dValues(), sValues(), status(0) {} std::vector<std::vector<int> > iValues; std::vector<std::vector<double> > dValues; std::vector<std::string> sValues; int status; }; class ParallelFunc { public: ParallelFunc() {} virtual ~ParallelFunc() {} // This function is allowed to throw, though YellowAlerts will not // propagate out of ProcessManager. If this also wishes to prevent // the remainder of the execution loop from occuring, it should // set the output.status flag to a negative value prior to throwing. virtual void execute(const bool isParallel, const TransferStruct& input, TransferStruct& output) = 0; }; struct Process { // The child process id, as retained by the parent. pid_t pid; // File descriptor for the parent's end of the pipe (or socket). int fd; // Pointer to "file" opened on fd. FILE* fp; // This flag will be 'true' from the time the child process is sent // the signal to execute until all expected output values have been // retrieved. It should be 'false' at ALL other times. bool resultsPending; }; class ProcessManager { public: // ProcessManager assumes ownership of childFunc. ProcessManager(ParallelFunc* childFunc, const string& contextName); ~ProcessManager(); typedef std::map<int, TransferStruct> ParallelResults; // nRequestedProcs is the number of procs the calling code is // requesting. It may not get all of them, depending on the user's // setting in s_maxProcs. This may throw, but only after it kills // all the child processes it created, and calls waitpid on each of them. void createProcesses(const int nRequestedProcs); // This can only throw a RedAlert. But if the execute function has thrown // AND set its corresponding output.status to a negative value, this will // prevent the rest of the execution calls from occuring and set each of // the remaining output.status flags negative. void run(const std::vector<TransferStruct>& input, ParallelResults& output); void killProcesses(); const string& contextName() const; bool isParallel() const; static std::map<string,int>& maxProcs(); static void initMaxProcsMap(); private: ProcessManager(const ProcessManager& right); ProcessManager& operator=(const ProcessManager& right); void multiProcesses(const std::vector<TransferStruct>& input); // This gets executed when doParallel=false is passed to run(). void singleProcess(const std::vector<TransferStruct>& input); // See description for m_processAssignments below. void setProcessAssignments(const size_t inputVecSize); // Pass execution info from m_processAssignments to the // individual child processes. This releases them from // the waiting state which they entered in createProcs. void startChildProcs(); void getResults(ParallelResults& results); static void transferSend(const TransferStruct& outgoing, FILE* fp); static void transferReceive(TransferStruct& incoming, FILE* fp); // This vector should remain empty when in single-process mode, // ie. when m_doParallel is false. std::vector<Process> m_processes; ParallelFunc* const m_childFunc; ParallelResults m_singleProcResults; // When s_maxProcs is less than the vector size of input TransferStructs, // processes will have to handle multiple calls to the execution // function. m_processAssignments will be resized to nProcs+1 where // nProcs is the number of child processes. Each element contains // the 0-based starting index of the TransferStruct (with the nProc // element containing one past the last index). // It will not be resized or set when m_doParallel = false. std::vector<size_t> m_processAssignments; const string m_context; bool m_doParallel; // These values are sent through sockets via non-const pointers, // which is why they are not consts (or enums for that matter). static size_t s_READY; static size_t s_QUIT; // Map entries <context>:<maxProcs> will be filled by xsParallel handler // If context is not found in map (as will happen if 'parallel' has never // been called by user), we will assume its maxProcs = 1. // The context names entered into this map ought to match the // names inserted into m_context for individual ProcessManager objects. static std::map<string,int> s_maxProcs; }; inline const string& ProcessManager::contextName() const { return m_context; } inline bool ProcessManager::isParallel() const { return m_doParallel; } inline std::map<string,int>& ProcessManager::maxProcs() { return s_maxProcs; } #endif
Tom-Pritchard/importer
src/main/java/com/norconex/importer/handler/ExternalHandler.java
/* Copyright 2018-2020 Norconex Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.norconex.importer.handler; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.Reader; import java.io.Writer; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ReflectionToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.norconex.commons.lang.EqualsUtil; import com.norconex.commons.lang.exec.SystemCommand; import com.norconex.commons.lang.exec.SystemCommandException; import com.norconex.commons.lang.io.ICachedStream; import com.norconex.commons.lang.io.InputStreamLineListener; import com.norconex.commons.lang.map.Properties; import com.norconex.commons.lang.map.PropertySetter; import com.norconex.commons.lang.text.RegexFieldValueExtractor; import com.norconex.commons.lang.xml.IXMLConfigurable; import com.norconex.commons.lang.xml.XML; import com.norconex.importer.ImporterRuntimeException; import com.norconex.importer.handler.tagger.impl.ExternalTagger; import com.norconex.importer.handler.transformer.impl.ExternalTransformer; import com.norconex.importer.parser.impl.ExternalParser; /** * <p> * Class executing an external application * to extract data from and/or manipulate a document. * </p> * * <h3>Command-line arguments:</h3> * <p> * When constructing the command to launch the external application, it * will look for specific tokens to be replaced by file paths * arguments (in addition to other arguments you may have). * The path arguments are created by this class. They are case-sensitive and * the file they represent are temporary (will be deleted after * they have been dealt with). It is possible to omit one or more tokens to use * standard streams instead where applicable. * </p> * <p> * Tokens supported by this class are: * </p> * <dl> * * <dt><code>${INPUT}</code></dt> * <dd>Path to document to be handled by the external application. * When omitted, the document content * is sent to the external application using the standard input * stream (STDIN).</dd> * * <dt><code>${INPUT_META}</code></dt> * <dd>Path to file containing metadata information available * so far for the document to be handled by the external application. * By default in JSON format. When omitted, no metadata will be made * available to the external application.</dd> * * <dt><code>${OUTPUT}</code></dt> * <dd>Path to document resulting from this external handler. * When omitted, the output content will be read from the external * application standard output (STDOUT).</dd> * * <dt><code>${OUTPUT_META}</code></dt> * <dd>Path to file containing new metadata for the document. * By default, the expected format is JSON. * When omitted, any metadata extraction patterns defined will be * applied against both the external program standard output (STDOUT) * and standard error (STDERR). If no patterns are defined, it is * assumed no new metadata resulted from the external application.</dd> * * <dt><code>${REFERENCE}</code></dt> * <dd>Unique reference to the document being handled * (URL, original file system location, etc.). When omitted, * the document reference will not be made available * to the external application.</dd> * * </dl> * * <h3>Metadata file format:</h3> * * <p> * If <code>${INPUT_META}</code> is part of the command, metadata can be * provided to the external application in JSON (default), XML or * Properties format. Those * formats can also be used if <code>${OUTPUT_META}</code> is part of the * command. The formats are: * </p> * * <h4>JSON</h4> * <pre><code class="language-json"> * { * "field1" : [ "value1a", "value1b", "value1c" ], * "field2" : [ "value2" ], * "field3" : [ "value3a", "value3b" ] * } * </code></pre> * * <h4>XML</h4> * <p>Java Properties XML file format, with the exception that * metadata with multiple values are supported, and will have their values * joined by the symbol for record separator (U+241E). * Example: * </p> * <pre><code class="language-xml"> * &lt;?xml version="1.0" encoding="UTF-8"?&gt; * &lt;!DOCTYPE properties SYSTEM "http://java.sun.com/dtd/properties.dtd"&gt; * &lt;properties&gt; * &lt;comment&gt;My Comment&lt;/comment&gt; * &lt;entry key="field1"&gt;value1a\u241Evalue1b\u241Evalue1c&lt;/entry&gt; * &lt;entry key="field2"&gt;value2&lt;/entry&gt; * &lt;entry key="field3"&gt;value3a\u241Evalue3b&lt;/entry&gt; * &lt;/properties&gt; * </code></pre> * * <h4>Properties</h4> * <p>Java Properties standard file format, with the exception that * metadata with multiple values are supported, and will have their values * joined by the symbol for record separator (U+241E). Refer to Java * {@link Properties#loadFromProperties(java.io.Reader)} for * general syntax information. * Example: * </p> * <pre><code class="language-properties"> * # My Comment * field1 = value1a\u241Evalue1b\u241Evalue1c * field2 = value2 * field3 = value3a\u241Evalue3b * </code></pre> * * <h3>Metadata extraction patterns:</h3> * <p> * It is possible to specify metadata extraction patterns that will be * applied either on the returned metadata file or from the standard output and * error streams. If <code>${OUTPUT_META}</code> is found in the command, * the output format will be * used to parse the outgoing metadata file. Leave the format to * <code>null</code> to rely on extraction patterns for parsing the output file. * </p> * <p> * When <code>${OUTPUT_META}</code> is omitted, extraction patterns will be * applied to * the external application standard output and standard error streams. If * there are no <code>${OUTPUT_META}</code> and no metadata extraction patterns * are defined, it is assumed the external application did not produce any new * metadata. * </p> * <p> * When using metadata extraction patterns with standard streams, each pattern * is applied on each line returned from STDOUT and STDERR. With each pattern, * there could be a matadata field name supplied. If the pattern does not * contain any match group, the entire matched expression will be used as the * metadata field value. * </p> * <p> * Field names and values can be obtained by using the same regular * expression. This is done by using * match groups in your regular expressions (parenthesis). For each pattern * you define, you can specify which match group hold the field name and * which one holds the value. * Specifying a field match group is optional if a <code>field</code> * is provided. If no match groups are specified, a <code>field</code> * is expected. * </p> * * <h3>Storing values in an existing field</h3> * <p> * If a target field with the same name already exists for a document, * values will be added to the end of the existing value list. * It is possible to change this default behavior by supplying a * {@link PropertySetter}. * </p> * * <h3>Environment variables:</h3> * * <p> * Execution environment variables can be set to replace environment variables * defined for the current process. * </p> * * <p> * To extract raw text from files, it is recommended to use an * {@link com.norconex.importer.parser.impl.ExternalParser} instead. * </p> * * {@nx.xml.usage * <command> * /Apps/myapp.exe ${INPUT} ${OUTPUT} ${INPUT_META} ${OUTPUT_META} ${REFERENCE} * </command> * * <metadata * inputFormat="[json|xml|properties]" * outputFormat="[json|xml|properties]" * {@nx.include com.norconex.commons.lang.map.PropertySetter#attributes}> * <!-- Pattern only used when no output format is specified. * Repeat as needed. --> * <pattern {@nx.include com.norconex.commons.lang.text.RegexFieldValueExtractor#attributes}> * (regular expression) * </pattern> * </metadata> * * <environment> * <!-- repeat variable tag as needed --> * <variable name="(environment variable name)"> * (environment variable value) * </variable> * </environment> * * <tempDir> * (Optional directory where to store temporary files used * by this class.) * </tempDir> * } * <p>Consuming classes implementing {@link IXMLConfigurable} can use * the XML save/load methods of this class to inherit the above * (which they can support differently).</p> * * @author <NAME> * @see ExternalTagger * @see ExternalTransformer * @see ExternalParser * @since 3.0.0 */ @SuppressWarnings("javadoc") public class ExternalHandler { private static final Logger LOG = LoggerFactory.getLogger(ExternalHandler.class); public static final String TOKEN_INPUT = "${INPUT}"; public static final String TOKEN_OUTPUT = "${OUTPUT}"; public static final String TOKEN_INPUT_META = "${INPUT_META}"; public static final String TOKEN_OUTPUT_META = "${OUTPUT_META}"; public static final String TOKEN_REFERENCE = "${REFERENCE}"; public static final String META_FORMAT_JSON = "json"; public static final String META_FORMAT_XML = "xml"; public static final String META_FORMAT_PROPERTIES = "properties"; private String command; private final List<RegexFieldValueExtractor> patterns = new ArrayList<>(); // Null means inherit from those of java process private Map<String, String> environmentVariables = null; private String metadataInputFormat = META_FORMAT_JSON; private String metadataOutputFormat = META_FORMAT_JSON; private Path tempDir; private PropertySetter onSet; /** * Gets the command to execute. * @return the command */ public String getCommand() { return command; } /** * Sets the command to execute. Make sure to escape spaces in * executable path and its arguments as well as other special command * line characters. * @param command the command */ public void setCommand(String command) { this.command = command; } /** * Gets directory where to store temporary files sent to the external * handler as file paths. * @return temporary directory */ public Path getTempDir() { return tempDir; } /** * Sets directory where to store temporary files sent to the external * handler as file paths. * @param tempDir temporary directory */ public void setTempDir(Path tempDir) { this.tempDir = tempDir; } /** * Gets metadata extraction patterns. See class documentation. * @return map of patterns and field names */ public List<RegexFieldValueExtractor> getMetadataExtractionPatterns() { return Collections.unmodifiableList(patterns); } /** * Adds a metadata extraction pattern that will extract the whole text * matched into the given field. * @param field target field to store the matching pattern. * @param pattern the pattern */ public void addMetadataExtractionPattern(String field, String pattern) { if (StringUtils.isAnyBlank(pattern, field)) { return; } addMetadataExtractionPatterns( new RegexFieldValueExtractor(pattern).setToField(field)); } /** * Adds a metadata extraction pattern, which will extract the value from * the specified group index upon matching. * @param field target field to store the matching pattern. * @param pattern the pattern * @param valueGroup which pattern group to return. */ public void addMetadataExtractionPattern( String field, String pattern, int valueGroup) { if (StringUtils.isAnyBlank(pattern, field)) { return; } addMetadataExtractionPatterns(new RegexFieldValueExtractor( pattern).setToField(field).setValueGroup(valueGroup)); } /** * Adds a metadata extraction pattern that will extract matching field * names/values. * @param patterns extraction pattern */ public void addMetadataExtractionPatterns( RegexFieldValueExtractor... patterns) { if (ArrayUtils.isNotEmpty(patterns)) { this.patterns.addAll(Arrays.asList(patterns)); } } /** * Sets metadata extraction patterns. Clears any previously assigned * patterns. * @param patterns extraction pattern */ public void setMetadataExtractionPatterns(RegexFieldValueExtractor... patterns) { this.patterns.clear(); addMetadataExtractionPatterns(patterns); } /** * Gets environment variables. * @return environment variables or <code>null</code> if using the current * process environment variables */ public Map<String, String> getEnvironmentVariables() { return environmentVariables; } /** * Sets the environment variables. Clearing any prevously assigned * environment variables. Set <code>null</code> to use * the current process environment variables (default). * @param environmentVariables environment variables */ public void setEnvironmentVariables( Map<String, String> environmentVariables) { this.environmentVariables = environmentVariables; } /** * Adds the environment variables, keeping environment variables previously * assigned. Existing variables of the same name * will be overwritten. To clear all previously assigned variables and use * the current process environment variables, pass * <code>null</code> to * {@link ExternalTransformer#setEnvironmentVariables(Map)}. * @param environmentVariables environment variables */ public void addEnvironmentVariables( Map<String, String> environmentVariables) { if (this.environmentVariables != null) { this.environmentVariables.putAll(environmentVariables); } else { this.environmentVariables = environmentVariables; } } /** * Adds an environment variables to the list of previously * assigned variables (if any). Existing variables of the same name * will be overwritten. Setting a variable with a * <code>null</code> name has no effect while <code>null</code> * values are converted to empty strings. * @param name environment variable name * @param value environment variable value */ public void addEnvironmentVariable(String name, String value) { if (this.environmentVariables == null) { this.environmentVariables = new HashMap<>(); } environmentVariables.put(name, value); } /** * Gets the format of the metadata input file sent to the external * application. One of "json" (default), "xml", or "properties" is expected. * Only applicable when the <code>${INPUT}</code> token * is part of the command. * @return metadata input format */ public String getMetadataInputFormat() { return metadataInputFormat; } /** * Sets the format of the metadata input file sent to the external * application. One of "json" (default), "xml", or "properties" is expected. * Only applicable when the <code>${INPUT}</code> token * is part of the command. * @param metadataInputFormat format of the metadata input file */ public void setMetadataInputFormat(String metadataInputFormat) { this.metadataInputFormat = metadataInputFormat; } /** * Gets the format of the metadata output file from the external * application. By default no format is set, and metadata extraction * patterns are used to extract metadata information. * One of "json", "xml", or "properties" is expected. * Only applicable when the <code>${OUTPUT}</code> token * is part of the command. * @return metadata output format */ public String getMetadataOutputFormat() { return metadataOutputFormat; } /** * Sets the format of the metadata output file from the external * application. One of "json" (default), "xml", or "properties" is expected. * Set to <code>null</code> for relying metadata extraction * patterns instead. * Only applicable when the <code>${OUTPUT}</code> token * is part of the command. * @param metadataOutputFormat format of the metadata output file */ public void setMetadataOutputFormat(String metadataOutputFormat) { this.metadataOutputFormat = metadataOutputFormat; } /** * Gets the property setter to use when a metadata value is set. * @return property setter * @since 3.0.0 */ public PropertySetter getOnSet() { return onSet; } /** * Sets the property setter to use when a metadata value is set. * @param onSet property setter * @since 3.0.0 */ public void setOnSet(PropertySetter onSet) { this.onSet = onSet; } /** * Invoke the external application on a document. * @param doc document * @param input document content * @param output processed document output stream * @throws ImporterHandlerException failed to handle the document */ public void handleDocument( HandlerDoc doc, InputStream input, OutputStream output) throws ImporterHandlerException { //TODO eliminate output an set it back on doc??? validate(); String cmd = command; final ArgFiles files = new ArgFiles(); Properties externalMeta = new Properties(); //--- Resolve command tokens --- LOG.debug("Command before token replacement: {}", cmd); try { cmd = resolveInputToken(cmd, files, input); cmd = resolveInputMetaToken(cmd, files, input, doc.getMetadata()); cmd = resolveOutputToken(cmd, files, output); cmd = resolveOutputMetaToken(cmd, files, output); cmd = resolveReferenceToken(cmd, doc.getReference()); LOG.debug("Command after token replacement: {}", cmd); //--- Execute Command --- executeCommand(cmd, files, externalMeta, input, output); try { if (files.hasOutputFile() && output != null) { FileUtils.copyFile(files.outputFile.toFile(), output); output.flush(); } if (files.hasOutputMetaFile()) { try (Reader outputMetaReader = Files.newBufferedReader( files.outputMetaFile)) { String format = getMetadataOutputFormat(); if (META_FORMAT_PROPERTIES.equalsIgnoreCase(format)) { externalMeta.loadFromProperties(outputMetaReader); } else if (META_FORMAT_XML.equals(format)) { externalMeta.loadFromXML(outputMetaReader); } else if (META_FORMAT_JSON.equals(format)) { externalMeta.loadFromJSON(outputMetaReader); } else { extractMetaFromFile(outputMetaReader, externalMeta); } } } } catch (IOException e) { throw new ImporterHandlerException( "Could not read command output file. Command: " + command, e); } // Set extracted metadata on actual metadata externalMeta.forEach((k, v) -> { PropertySetter.orAppend(onSet).apply(doc.getMetadata(), k, v); }); } finally { files.deleteAll(); } } private int executeCommand( final String cmd, final ArgFiles files, final Properties metadata, final InputStream input, final OutputStream output) throws ImporterHandlerException { SystemCommand systemCommand = new SystemCommand(cmd); systemCommand.setEnvironmentVariables(environmentVariables); systemCommand.addOutputListener(new InputStreamLineListener() { @Override protected void lineStreamed(String type, String line) { if (!files.hasOutputFile() && output != null) { writeLine(line, output); } if (!files.hasOutputMetaFile()) { extractMetaFromLine(line, metadata); } } }); systemCommand.addErrorListener(new InputStreamLineListener() { @Override protected void lineStreamed(String type, String line) { if (!files.hasOutputMetaFile()) { extractMetaFromLine(line, metadata); } } }); try { int exitValue; if (files.hasInputFile() || input == null) { exitValue = systemCommand.execute(); } else { exitValue = systemCommand.execute(input); } if (exitValue != 0) { LOG.error("Bad command exit value: {}", exitValue); } return exitValue; } catch (SystemCommandException e) { throw new ImporterHandlerException( "External transformer failed. Command: " + command, e); } } private void writeLine(String line, OutputStream output) { try { output.write(line.getBytes()); output.write('\n'); output.flush(); } catch (IOException e) { throw new ImporterRuntimeException( "Could not write to output", e); } } private synchronized void extractMetaFromFile( Reader reader, Properties metadata) { Iterator<String> it = IOUtils.lineIterator(reader); while (it.hasNext()) { extractMetaFromLine(it.next(), metadata); } } private synchronized void extractMetaFromLine( String line, Properties metadata) { RegexFieldValueExtractor.extractFieldValues(metadata, line, patterns.toArray(RegexFieldValueExtractor.EMPTY_ARRAY)); } private Path createTempFile( Object stream, String name, String suffix) throws ImporterHandlerException { Path tempDirectory; if (tempDir != null) { tempDirectory = tempDir; } else if (stream instanceof ICachedStream) { tempDirectory = ((ICachedStream) stream).getCacheDirectory(); } else { tempDirectory = FileUtils.getTempDirectory().toPath(); } Path file = null; try { if (!tempDirectory.toFile().exists()) { Files.createDirectories(tempDirectory); } file = Files.createTempFile(tempDirectory, name, suffix); return file; } catch (IOException e) { ArgFiles.delete(file); throw new ImporterHandlerException( "Could not create temporary input file.", e); } } private String resolveInputToken(String cmd, ArgFiles files, InputStream is) throws ImporterHandlerException { if (!cmd.contains(TOKEN_INPUT) || is == null) { return cmd; } String newCmd = cmd; files.inputFile = createTempFile(is, "input", ".tmp"); newCmd = StringUtils.replace(newCmd, TOKEN_INPUT, files.inputFile.toAbsolutePath().toString()); try { FileUtils.copyInputStreamToFile(is, files.inputFile.toFile()); return newCmd; } catch (IOException e) { ArgFiles.delete(files.inputFile); throw new ImporterHandlerException( "Could not create temporary input file.", e); } } private String resolveInputMetaToken( String cmd, ArgFiles files, InputStream is, Properties meta) throws ImporterHandlerException { if (!cmd.contains(TOKEN_INPUT_META)) { return cmd; } String newCmd = cmd; files.inputMetaFile = createTempFile( is, "input-meta", "." + StringUtils.defaultIfBlank( getMetadataInputFormat(), META_FORMAT_JSON)); newCmd = StringUtils.replace(newCmd, TOKEN_INPUT_META, files.inputMetaFile.toAbsolutePath().toString()); try (Writer fw = Files.newBufferedWriter(files.inputMetaFile)) { String format = getMetadataInputFormat(); if (META_FORMAT_PROPERTIES.equalsIgnoreCase(format)) { meta.storeToProperties(fw); } else if (META_FORMAT_XML.equals(format)) { meta.storeToXML(fw); } else { meta.storeToJSON(fw); } fw.flush(); return newCmd; } catch (IOException e) { ArgFiles.delete(files.inputMetaFile); throw new ImporterHandlerException( "Could not create temporary input metadata file.", e); } } private String resolveOutputToken( String cmd, ArgFiles files, OutputStream os) throws ImporterHandlerException { if (!cmd.contains(TOKEN_OUTPUT) || os == null) { return cmd; } String newCmd = cmd; files.outputFile = createTempFile(os, "output", ".tmp"); newCmd = StringUtils.replace(newCmd, TOKEN_OUTPUT, files.outputFile.toAbsolutePath().toString()); return newCmd; } private String resolveOutputMetaToken( String cmd, ArgFiles files, OutputStream os) throws ImporterHandlerException { if (!cmd.contains(TOKEN_OUTPUT_META)) { return cmd; } String newCmd = cmd; files.outputMetaFile = createTempFile( os, "output-meta", "." + StringUtils.defaultIfBlank( getMetadataOutputFormat(), ".tmp")); newCmd = StringUtils.replace(newCmd, TOKEN_OUTPUT_META, files.outputMetaFile.toAbsolutePath().toString()); return newCmd; } private String resolveReferenceToken(String cmd, String reference) { if (!cmd.contains(TOKEN_REFERENCE)) { return cmd; } return StringUtils.replace(cmd, TOKEN_REFERENCE, reference); } private void validate() throws ImporterHandlerException { if (StringUtils.isBlank(command)) { throw new ImporterHandlerException("External command missing."); } } public void loadHandlerFromXML(XML xml) { setCommand(xml.getString("command", command)); setTempDir(xml.getPath("tempDir", tempDir)); setMetadataInputFormat(xml.getString( "metadata/@inputFormat", metadataInputFormat)); setMetadataOutputFormat(xml.getString( "metadata/@outputFormat", metadataOutputFormat)); setOnSet(xml.getEnum("metadata/@onSet", PropertySetter.class, onSet)); List<XML> nodes = xml.getXMLList("metadata/pattern"); for (XML node : nodes) { RegexFieldValueExtractor ex = new RegexFieldValueExtractor(); ex.loadFromXML(node); addMetadataExtractionPatterns(ex); } List<XML> xmlEnvs = xml.getXMLList("environment/variable"); if (!xmlEnvs.isEmpty()) { Map<String, String> vars = new HashMap<>(); for (XML node : xmlEnvs) { vars.put(node.getString("@name"), node.getString(".")); } setEnvironmentVariables(vars); } } public void saveHandlerToXML(XML xml) { xml.addElement("command", command); xml.addElement("tempDir", tempDir); if (!getMetadataExtractionPatterns().isEmpty()) { XML metaXML = xml.addElement("metadata") .setAttribute("inputFormat", metadataInputFormat) .setAttribute("outputFormat", metadataOutputFormat) .setAttribute("onSet", onSet); for (RegexFieldValueExtractor rfe : patterns) { rfe.saveToXML(metaXML.addElement("pattern")); } } if (getEnvironmentVariables() != null) { XML envXML = xml.addElement("environment"); for (Entry<String, String> entry : getEnvironmentVariables().entrySet()) { envXML.addElement("variable", entry.getValue()) .setAttribute("name", entry.getKey()); } } } @Override public boolean equals(final Object other) { if (!(other instanceof ExternalHandler)) { return false; } ExternalHandler castOther = (ExternalHandler) other; return EqualsBuilder.reflectionEquals( this, other, "environmentVariables") && EqualsUtil.equalsMap( getEnvironmentVariables(), castOther.getEnvironmentVariables()); } @Override public int hashCode() { return HashCodeBuilder.reflectionHashCode(this); } @Override public String toString() { return new ReflectionToStringBuilder( this, ToStringStyle.SHORT_PREFIX_STYLE).toString(); } static class ArgFiles { Path inputFile; Path inputMetaFile; Path outputFile; Path outputMetaFile; boolean hasInputFile() { return inputFile != null; } boolean hasInputMetaFile() { return inputMetaFile != null; } boolean hasOutputFile() { return outputFile != null; } boolean hasOutputMetaFile() { return outputMetaFile != null; } void deleteAll() { delete(inputFile); delete(inputMetaFile); delete(outputFile); delete(outputMetaFile); } static void delete(Path file) { if (file != null) { try { java.nio.file.Files.delete(file); } catch (IOException e) { LOG.warn("Could not delete temporary file: " + file.toAbsolutePath(), e); } } } } }
DataDog/istio
galley/pkg/resource/accessor.go
<filename>galley/pkg/resource/accessor.go<gh_stars>1-10 // Copyright 2018 Istio Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package resource import ( "reflect" "sync" "time" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/runtime/schema" "k8s.io/apimachinery/pkg/watch" "k8s.io/client-go/dynamic" "k8s.io/client-go/tools/cache" "istio.io/istio/galley/pkg/change" "istio.io/istio/galley/pkg/common" "istio.io/istio/pkg/log" ) // accessor is a data access object for a particular type of custom resource, identified by its name, // api group and version. type accessor struct { // Lock for changing the running state of the accessor stateLock sync.Mutex // name, API group and version of the resource. name string gv schema.GroupVersion resyncPeriod time.Duration // client for accessing the resources dynamically client dynamic.Interface // The dynamic resource interface for accessing custom resources dynamically. iface dynamic.ResourceInterface // metadata about the resource (i.e. name, kind, group, version etc.) apiResource *metav1.APIResource // stopCh is used to quiesce the background activity during shutdown stopCh chan struct{} // SharedIndexInformer for watching/caching resources informer cache.SharedIndexInformer // The processor function to invoke to send the incoming changes. processor changeProcessorFn } type changeProcessorFn func(c *change.Info) // creates a new accessor instance. func newAccessor(kube common.Kube, resyncPeriod time.Duration, name string, gv schema.GroupVersion, kind string, listKind string, processor changeProcessorFn) (*accessor, error) { log.Debugf("Creating a new resource accessor for: name='%s', gv:'%v'", name, gv) var client dynamic.Interface client, err := kube.DynamicInterface(gv, kind, listKind) if err != nil { return nil, err } apiResource := &metav1.APIResource{ Name: name, Group: gv.Group, Version: gv.Version, Namespaced: true, Kind: kind, } iface := client.Resource(apiResource, "") return &accessor{ resyncPeriod: resyncPeriod, name: name, gv: gv, iface: iface, client: client, processor: processor, apiResource: apiResource, }, nil } func (s *accessor) start() { s.stateLock.Lock() defer s.stateLock.Unlock() if s.stopCh != nil { log.Errorf("already synchronizing resources: name='%s', gv='%v'", s.name, s.gv) return } log.Debugf("Starting accessor for %s(%v)", s.name, s.gv) s.stopCh = make(chan struct{}) s.informer = cache.NewSharedIndexInformer( &cache.ListWatch{ ListFunc: func(options metav1.ListOptions) (runtime.Object, error) { return s.iface.List(options) }, WatchFunc: func(options metav1.ListOptions) (watch.Interface, error) { options.Watch = true return s.iface.Watch(options) }, }, &unstructured.Unstructured{}, s.resyncPeriod, cache.Indexers{}) s.informer.AddEventHandler(cache.ResourceEventHandlerFuncs{ AddFunc: func(obj interface{}) { s.handleEvent(change.Add, obj) }, UpdateFunc: func(old, new interface{}) { newRes := new.(*unstructured.Unstructured) oldRes := old.(*unstructured.Unstructured) if newRes.GetResourceVersion() == oldRes.GetResourceVersion() { // Periodic resync will send update events for all known resources. // Two different versions of the same resource will always have different RVs. return } s.handleEvent(change.Update, new) }, DeleteFunc: func(obj interface{}) { s.handleEvent(change.Delete, obj) }, }) // start CRD shared informer background process. go s.informer.Run(s.stopCh) // Wait for CRD cache sync. if !cache.WaitForCacheSync(s.stopCh, s.informer.HasSynced) { log.Warnf("Shutting down while waiting for accessor cache sync %s(%v)", s.name, s.gv) } log.Debugf("Completed cache sync and listening. %s(%v)", s.name, s.gv) } func (s *accessor) stop() { s.stateLock.Lock() defer s.stateLock.Unlock() if s.stopCh == nil { log.Errorf("already stopped") return } close(s.stopCh) s.stopCh = nil } func (s *accessor) handleEvent(t change.Type, obj interface{}) { object, ok := obj.(metav1.Object) if !ok { var tombstone cache.DeletedFinalStateUnknown if tombstone, ok = obj.(cache.DeletedFinalStateUnknown); !ok { log.Errorf("error decoding object, invalid type: %v", reflect.TypeOf(obj)) return } if object, ok = tombstone.Obj.(metav1.Object); !ok { log.Errorf("error decoding object tombstone, invalid type: %v", reflect.TypeOf(tombstone.Obj)) return } log.Infof("Recovered deleted object '%s' from tombstone", object.GetName()) } key, err := cache.MetaNamespaceKeyFunc(object) if err != nil { log.Errorf("Error creating a MetaNamespaceKey from object: %v", object) return } info := &change.Info{ Type: t, Name: key, GroupVersion: s.gv, } s.processor(info) }
iostream04/sunbird-utils
common-util/src/test/java/org/sunbird/common/request/CommonRequestValidatorTest.java
<filename>common-util/src/test/java/org/sunbird/common/request/CommonRequestValidatorTest.java /** */ package org.sunbird.common.request; import static org.junit.Assert.assertEquals; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.Assert; import org.junit.Test; import org.sunbird.common.exception.ProjectCommonException; import org.sunbird.common.models.util.JsonKey; import org.sunbird.common.models.util.ProjectUtil; import org.sunbird.common.responsecode.ResponseCode; /** @author Manzarul */ public class CommonRequestValidatorTest { @Test public void enrollCourseValidationSuccess() { Request request = new Request(); boolean response = false; Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.COURSE_ID, "do_1233343"); request.setRequest(requestObj); try { RequestValidator.validateEnrollCourse(request); response = true; } catch (ProjectCommonException e) { Assert.assertNull(e); } assertEquals(true, response); } @Test public void enrollCourseValidationWithOutCourseId() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); request.setRequest(requestObj); request.setRequest(requestObj); try { RequestValidator.validateEnrollCourse(request); } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.courseIdRequiredError.getErrorCode(), e.getCode()); } } @Test public void validateUpdateContentSuccess() { Request request = new Request(); boolean response = false; List<Map<String, Object>> listOfMap = new ArrayList<>(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.CONTENT_ID, "do_1233343"); requestObj.put(JsonKey.STATUS, "Completed"); listOfMap.add(requestObj); Map<String, Object> innerMap = new HashMap<>(); innerMap.put(JsonKey.CONTENTS, listOfMap); request.setRequest(innerMap); try { RequestValidator.validateUpdateContent(request); response = true; } catch (ProjectCommonException e) { Assert.assertNull(e); } assertEquals(true, response); } @Test public void validateUpdateContentWithContentIdAsNull() { Request request = new Request(); boolean response = false; List<Map<String, Object>> listOfMap = new ArrayList<>(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.CONTENT_ID, null); requestObj.put(JsonKey.STATUS, "Completed"); listOfMap.add(requestObj); Map<String, Object> innerMap = new HashMap<>(); innerMap.put(JsonKey.CONTENTS, listOfMap); request.setRequest(innerMap); try { RequestValidator.validateUpdateContent(request); response = true; } catch (ProjectCommonException e) { Assert.assertNotNull(e); } assertEquals(false, response); } @Test public void validateUpdateContentWithOutContentId() { Request request = new Request(); List<Map<String, Object>> listOfMap = new ArrayList<>(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.STATUS, "Completed"); listOfMap.add(requestObj); Map<String, Object> innerMap = new HashMap<>(); innerMap.put(JsonKey.CONTENTS, listOfMap); request.setRequest(innerMap); try { RequestValidator.validateUpdateContent(request); } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.contentIdRequiredError.getErrorCode(), e.getCode()); } } @Test public void validateUpdateContentWithOutStatus() { Request request = new Request(); List<Map<String, Object>> listOfMap = new ArrayList<>(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.CONTENT_ID, "do_1233343"); listOfMap.add(requestObj); Map<String, Object> innerMap = new HashMap<>(); innerMap.put(JsonKey.CONTENTS, listOfMap); request.setRequest(innerMap); try { RequestValidator.validateUpdateContent(request); } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.contentStatusRequired.getErrorCode(), e.getCode()); } } @Test public void validateUpdateContentWithWrongType() { Request request = new Request(); List<Map<String, Object>> listOfMap = new ArrayList<>(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.CONTENT_ID, "do_1233343"); Map<String, Object> innerMap = new HashMap<>(); innerMap.put(JsonKey.CONTENTS, listOfMap); request.setRequest(innerMap); try { RequestValidator.validateUpdateContent(request); } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.contentIdRequiredError.getErrorCode(), e.getCode()); } } @Test public void validateRegisterClientTest() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.CLIENT_NAME, ""); request.setRequest(requestObj); try { RequestValidator.validateRegisterClient(request); } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.invalidClientName.getErrorCode(), e.getCode()); } } @Test public void validateRegisterClientSuccessTest() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.CLIENT_NAME, "1234"); request.setRequest(requestObj); try { RequestValidator.validateRegisterClient(request); } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.invalidClientName.getErrorCode(), e.getCode()); } } @Test public void validateUpdateClientKeyTest() { try { RequestValidator.validateUpdateClientKey("1234", ""); } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.invalidRequestData.getErrorCode(), e.getCode()); } } @Test public void validateUpdateClientKeyWithSuccessTest() { try { RequestValidator.validateUpdateClientKey("1234", "test123"); } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.invalidRequestData.getErrorCode(), e.getCode()); } } @Test public void validateClientIdTest() { try { RequestValidator.validateClientId(""); } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.invalidClientId.getErrorCode(), e.getCode()); } } @Test public void validateFileUploadTest() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.CONTAINER, ""); request.setRequest(requestObj); try { RequestValidator.validateFileUpload(request); } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.storageContainerNameMandatory.getErrorCode(), e.getCode()); } } @Test public void validateSendMailRecipientUserTest() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.SUBJECT, "test123"); requestObj.put(JsonKey.BODY, "test"); List<String> data = new ArrayList<>(); data.add("<EMAIL>"); requestObj.put(JsonKey.RECIPIENT_EMAILS, data); requestObj.put(JsonKey.RECIPIENT_USERIDS, new ArrayList<>()); request.setRequest(requestObj); try { RequestValidator.validateSendMail(request); } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.recipientAddressError.getErrorCode(), e.getCode()); } } @Test public void validateSendMailRecipientEmailTest() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.SUBJECT, "test123"); requestObj.put(JsonKey.BODY, "test"); requestObj.put(JsonKey.RECIPIENT_EMAILS, null); request.setRequest(requestObj); try { RequestValidator.validateSendMail(request); } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.recipientAddressError.getErrorCode(), e.getCode()); } } @Test public void validateSendMailBodyTest() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.SUBJECT, "test123"); requestObj.put(JsonKey.BODY, ""); request.setRequest(requestObj); try { RequestValidator.validateSendMail(request); } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.emailBodyError.getErrorCode(), e.getCode()); } } @Test public void validateSendMailSubjectTest() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.SUBJECT, ""); request.setRequest(requestObj); try { RequestValidator.validateSendMail(request); } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.emailSubjectError.getErrorCode(), e.getCode()); } } @Test public void validateEnrollmentTypeWithEmptyType() { try { RequestValidator.validateEnrolmentType(""); } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.enrolmentTypeRequired.getErrorCode(), e.getCode()); } } @Test public void validateEnrollmentTypeWithWrongType() { try { RequestValidator.validateEnrolmentType("test"); } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.enrolmentIncorrectValue.getErrorCode(), e.getCode()); } } @Test public void validateEnrollmentOpenType() { boolean response = false; try { RequestValidator.validateEnrolmentType(ProjectUtil.EnrolmentType.open.getVal()); response = true; } catch (ProjectCommonException e) { Assert.assertNull(e); } Assert.assertTrue(response); } @Test public void validateEnrollmentInviteType() { boolean response = false; try { RequestValidator.validateEnrolmentType(ProjectUtil.EnrolmentType.inviteOnly.getVal()); response = true; } catch (ProjectCommonException e) { Assert.assertNull(e); } Assert.assertTrue(response); } @Test public void validateKeyuclaockSyncRequest() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.OPERATION_FOR, "keycloak"); requestObj.put(JsonKey.OBJECT_TYPE, JsonKey.USER); request.setRequest(requestObj); boolean response = false; try { RequestValidator.validateSyncRequest(request); response = true; } catch (ProjectCommonException e) { Assert.assertNull(e); } Assert.assertTrue(response); } @Test public void validateSyncRequestwithInvalidObjType() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.OPERATION_FOR, "not keycloack"); requestObj.put(JsonKey.OBJECT_TYPE, null); request.setRequest(requestObj); boolean response = false; try { RequestValidator.validateSyncRequest(request); response = true; } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.dataTypeError.getErrorCode(), e.getCode()); } Assert.assertFalse(response); } @Test public void validateSyncRequestwithInvalidObjTypeValue() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.OPERATION_FOR, "not keycloack"); List<String> objectLsit = new ArrayList<>(); objectLsit.add("testval"); requestObj.put(JsonKey.OBJECT_TYPE, objectLsit); request.setRequest(requestObj); boolean response = false; try { RequestValidator.validateSyncRequest(request); response = true; } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.invalidObjectType.getErrorCode(), e.getCode()); } Assert.assertFalse(response); } @Test public void validateAddMembersSuccessTest() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.PROVIDER, "Ilimi"); requestObj.put(JsonKey.EXTERNAL_ID, "il-01"); List<String> roles = new ArrayList<>(); roles.add("PUBLIC"); roles.add("CONTENT-CREATOR"); requestObj.put(JsonKey.ROLES, roles); requestObj.put(JsonKey.USER_ID, "userId"); request.setRequest(requestObj); boolean response = false; try { RequestValidator.validateAddMember(request); response = true; } catch (ProjectCommonException e) { Assert.assertNull(e); } Assert.assertTrue(response); } @Test public void validateAddMembersSuccessWithOrgIdTest() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.ORGANISATION_ID, "org-test"); List<String> roles = new ArrayList<>(); roles.add("PUBLIC"); roles.add("CONTENT-CREATOR"); requestObj.put(JsonKey.ROLES, roles); requestObj.put(JsonKey.USER_ID, "userId"); request.setRequest(requestObj); boolean response = false; try { RequestValidator.validateAddMember(request); response = true; } catch (ProjectCommonException e) { Assert.assertNull(e); } Assert.assertTrue(response); } @Test public void validateAddMembersProviderMissingTest() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.PROVIDER, ""); requestObj.put(JsonKey.EXTERNAL_ID, "il-01"); List<String> roles = new ArrayList<>(); roles.add("PUBLIC"); roles.add("CONTENT-CREATOR"); requestObj.put(JsonKey.ROLES, roles); requestObj.put(JsonKey.USER_ID, "userId"); request.setRequest(requestObj); boolean response = false; try { RequestValidator.validateAddMember(request); response = true; } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.sourceAndExternalIdValidationError.getErrorCode(), e.getCode()); } Assert.assertFalse(response); } @Test public void validateAddMembersInvalidRoleDataTypeTest() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.PROVIDER, "ilimi"); requestObj.put(JsonKey.EXTERNAL_ID, "il-01"); requestObj.put(JsonKey.ROLES, "roles"); requestObj.put(JsonKey.USER_ID, "userId"); request.setRequest(requestObj); boolean response = false; try { RequestValidator.validateAddMember(request); response = true; } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.roleRequired.getErrorCode(), e.getCode()); } Assert.assertFalse(response); } @Test public void validateAddMembersUserIdMissingTest() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.PROVIDER, "ilimi"); requestObj.put(JsonKey.EXTERNAL_ID, "il-01"); List<String> roles = new ArrayList<>(); roles.add("PUBLIC"); roles.add("CONTENT-CREATOR"); requestObj.put(JsonKey.ROLES, roles); requestObj.put(JsonKey.USER_ID, ""); request.setRequest(requestObj); boolean response = false; try { RequestValidator.validateAddMember(request); response = true; } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.userIdRequired.getErrorCode(), e.getCode()); } Assert.assertFalse(response); } @Test public void validateSystemSettingTest() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.PHONE_UNIQUE, true); requestObj.put(JsonKey.EMAIL_UNIQUE, true); request.setRequest(requestObj); boolean response = false; try { RequestValidator.validateUpdateSystemSettingsRequest(request); response = true; } catch (ProjectCommonException e) { Assert.assertNull(e); } Assert.assertTrue(response); } @Test public void validateSystemSettingFailureTest() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put("notListedSettingVal", true); requestObj.put(JsonKey.EMAIL_UNIQUE, true); request.setRequest(requestObj); boolean response = false; try { RequestValidator.validateUpdateSystemSettingsRequest(request); response = true; } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.invalidPropertyError.getErrorCode(), e.getCode()); } Assert.assertFalse(response); } @Test public void validateUserOrgSuccessTest() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.PROVIDER, "Ilimi"); requestObj.put(JsonKey.EXTERNAL_ID, "il-01"); requestObj.put(JsonKey.USER_ID, "userId"); request.setRequest(requestObj); boolean response = false; try { RequestValidator.validateUserOrg(request); response = true; } catch (ProjectCommonException e) { Assert.assertNull(e); } Assert.assertTrue(response); } @Test public void validateUserOrgSuccessWithOrgIdTest() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.ORGANISATION_ID, "org-test"); requestObj.put(JsonKey.USER_ID, "userId"); request.setRequest(requestObj); boolean response = false; try { RequestValidator.validateUserOrg(request); response = true; } catch (ProjectCommonException e) { Assert.assertNull(e); } Assert.assertTrue(response); } @Test public void validateUserOrgProviderMissingTest() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.PROVIDER, ""); requestObj.put(JsonKey.EXTERNAL_ID, "il-01"); requestObj.put(JsonKey.USER_ID, "userId"); request.setRequest(requestObj); boolean response = false; try { RequestValidator.validateUserOrg(request); response = true; } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.sourceAndExternalIdValidationError.getErrorCode(), e.getCode()); } Assert.assertFalse(response); } @Test public void validateUserOrgUserIdMissingTest() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.PROVIDER, "ilimi"); requestObj.put(JsonKey.EXTERNAL_ID, "il-01"); requestObj.put(JsonKey.USER_ID, ""); request.setRequest(requestObj); boolean response = false; try { RequestValidator.validateUserOrg(request); response = true; } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.userIdRequired.getErrorCode(), e.getCode()); } Assert.assertFalse(response); } @Test public void validateupdateOrgType() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.NAME, "orgtypeName"); requestObj.put(JsonKey.ID, "orgtypeId"); request.setRequest(requestObj); boolean response = false; try { RequestValidator.validateUpdateOrgType(request); response = true; } catch (ProjectCommonException e) { Assert.assertNull(e); } Assert.assertTrue(response); } @Test public void validateupdateOrgTypeWithOutName() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.NAME, ""); requestObj.put(JsonKey.ID, "orgtypeId"); request.setRequest(requestObj); boolean response = false; try { RequestValidator.validateUpdateOrgType(request); response = true; } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.orgTypeMandatory.getErrorCode(), e.getCode()); } Assert.assertFalse(response); } @Test public void validateupdateOrgTypeWithOutID() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.NAME, "orgTypeName"); requestObj.put(JsonKey.ID, ""); request.setRequest(requestObj); boolean response = false; try { RequestValidator.validateUpdateOrgType(request); response = true; } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.orgTypeIdRequired.getErrorCode(), e.getCode()); } Assert.assertFalse(response); } @Test public void validateVerifyUserTest() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.LOGIN_ID, "username@provider"); request.setRequest(requestObj); boolean response = false; try { UserRequestValidator.validateVerifyUser(request); response = true; } catch (ProjectCommonException e) { Assert.assertNull(e); } Assert.assertTrue(response); } @Test public void validateVerifyUserWithOutLoginIdTest() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.LOGIN_ID, ""); request.setRequest(requestObj); boolean response = false; try { UserRequestValidator.validateVerifyUser(request); response = true; } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.loginIdRequired.getErrorCode(), e.getCode()); } Assert.assertFalse(response); } @Test public void validateCreateOrgTypeTest() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.NAME, "OrgTypeName"); request.setRequest(requestObj); boolean response = false; try { RequestValidator.validateCreateOrgType(request); response = true; } catch (ProjectCommonException e) { Assert.assertNull(e); } Assert.assertTrue(response); } @Test public void validateCreateOrgTypeWithOutNameTest() { Request request = new Request(); Map<String, Object> requestObj = new HashMap<>(); requestObj.put(JsonKey.NAME, null); request.setRequest(requestObj); boolean response = false; try { RequestValidator.validateCreateOrgType(request); response = true; } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.orgTypeMandatory.getErrorCode(), e.getCode()); } Assert.assertFalse(response); } @Test public void validateGetClientKeyTest() { boolean response = false; try { RequestValidator.validateGetClientKey("clientId", "clientType"); response = true; } catch (ProjectCommonException e) { Assert.assertNull(e); } Assert.assertTrue(response); } @Test public void validateGetClientKeyWithEmptyClientIdTest() { boolean response = false; try { RequestValidator.validateGetClientKey("", "clientType"); response = true; } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.invalidClientId.getErrorCode(), e.getCode()); } Assert.assertFalse(response); } @Test public void validateGetClientKeyWithEmptyClientTypeTest() { boolean response = false; try { RequestValidator.validateGetClientKey("clientId", ""); response = true; } catch (ProjectCommonException e) { assertEquals(ResponseCode.CLIENT_ERROR.getResponseCode(), e.getResponseCode()); assertEquals(ResponseCode.invalidRequestData.getErrorCode(), e.getCode()); } Assert.assertFalse(response); } }
gbf0722/gmall
gmall-search/src/test/java/com/atguigu/gmall/search/GmallSearchApplicationTests.java
package com.atguigu.gmall.search; import com.atguigu.core.bean.QueryCondition; import com.atguigu.core.bean.Resp; import com.atguigu.gmall.pms.entity.*; import com.atguigu.gmall.search.feign.GmallPmsFeign; import com.atguigu.gmall.search.feign.GmallWmsFeign; import com.atguigu.gmall.search.pojo.Goods; import com.atguigu.gmall.search.pojo.SearchAttr; import com.atguigu.gmall.search.repository.GoodsRepository; import com.atguigu.gmall.wms.entity.WareSkuEntity; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.data.elasticsearch.core.ElasticsearchRestTemplate; import org.springframework.util.CollectionUtils; import java.util.List; import java.util.stream.Collectors; @SpringBootTest class GmallSearchApplicationTests { @Autowired private ElasticsearchRestTemplate restTemplate; @Autowired private GmallPmsFeign pmsApi; @Autowired private GmallWmsFeign wmsApi; @Autowired private GoodsRepository goodsRepository; @Test void contextLoads() { //创建索引和映射 this.restTemplate.createIndex(Goods.class); this.restTemplate.putMapping(Goods.class); } @Test //导入数据的测试 void importDate() { Long pageNum = 1L; Long pageSize = 100L; do { //1.分页查询spu QueryCondition queryCondition = new QueryCondition(); queryCondition.setPage(pageNum); queryCondition.setLimit(pageSize); Resp<List<SpuInfoEntity>> listResp = this.pmsApi.querySpusByPage(queryCondition); List<SpuInfoEntity> spus = listResp.getData(); //2.遍历spu,查询sku; spus.forEach(spuInfoEntity -> { Resp<List<SkuInfoEntity>> skuResp = pmsApi.querySkuBySpuId(spuInfoEntity.getId()); List<SkuInfoEntity> skuInfoEntities = skuResp.getData(); if (!CollectionUtils.isEmpty(skuInfoEntities)) { //把sku转化为goods对象 List<Goods> goodlist = skuInfoEntities.stream().map(skuInfoEntity -> { Goods goods = new Goods(); //查询搜索属性和值 Resp<List<ProductAttrValueEntity>> attrResp = this.pmsApi.querySearchAttrValue(spuInfoEntity.getId()); List<ProductAttrValueEntity> attrValueEntities = attrResp.getData(); if (!CollectionUtils.isEmpty(attrValueEntities)) { List<SearchAttr> searchAttrs = attrValueEntities.stream().map(productAttrValueEntity -> { SearchAttr searchAttr = new SearchAttr(); searchAttr.setAttrId(productAttrValueEntity.getAttrId()); searchAttr.setAttrName(productAttrValueEntity.getAttrName()); searchAttr.setAttrValue(productAttrValueEntity.getAttrValue()); return searchAttr; }).collect(Collectors.toList()); goods.setAttrs(searchAttrs); } // 查询品牌 Resp<BrandEntity> brandEntityResp = this.pmsApi.brandInfo(skuInfoEntity.getBrandId()); BrandEntity brandEntity = brandEntityResp.getData(); if (brandEntity != null) { goods.setBrandId(skuInfoEntity.getBrandId()); goods.setBrandName(brandEntity.getName()); } //查询分类 Resp<CategoryEntity> categoryEntityResp = this.pmsApi.catInfo(skuInfoEntity.getCatalogId()); CategoryEntity categoryEntity = categoryEntityResp.getData(); if (categoryEntity != null) { goods.setCategoryId(skuInfoEntity.getCatalogId()); goods.setCategoryName(categoryEntity.getName()); } goods.setCreateTime(spuInfoEntity.getCreateTime()); goods.setPic(skuInfoEntity.getSkuDefaultImg()); goods.setPrice(skuInfoEntity.getPrice().doubleValue()); goods.setSale(0L); goods.setSkuId(skuInfoEntity.getSkuId()); //查询库存信息 Resp<List<WareSkuEntity>> wareSkuResp = this.wmsApi.queryWareSkuBySkuId(skuInfoEntity.getSkuId()); List<WareSkuEntity> wareSkuEntities = wareSkuResp.getData(); if (!CollectionUtils.isEmpty(wareSkuEntities)) { Boolean flag=false; flag = wareSkuEntities.stream().anyMatch(wareSkuEntity -> { return wareSkuEntity.getStock()>0; }); goods.setStore(flag); System.out.println(goods.getStore()); } goods.setTitle(skuInfoEntity.getSkuTitle()); return goods; }).collect(Collectors.toList()); this.goodsRepository.saveAll(goodlist); } }); //导入索引库 pageSize = (long) spus.size(); pageNum++; } while (pageSize == 100); } }
jcserracampos/catarse.js
legacy/src/vms/projects-subscription-report-vm.js
import m from 'mithril'; import { commonPayment } from '../api'; import models from '../models'; const { replaceDiacritics } = window; const vm = commonPayment.filtersVM({ status: 'in', search_index: 'fts(portuguese)', reward_external_id: 'eq', payment_method: 'eq', project_id: 'eq', total_paid: 'between', paid_count: 'between' }), paramToString = function (p) { return (p || '').toString().trim(); }; vm.total_paid.gte(0); vm.total_paid.lte(0); vm.paid_count.gte(0); vm.paid_count.lte(0); vm.status(''); vm.payment_method(''); vm.order({ created_at: 'desc' }); vm.search_index.toFilter = function () { const filter = paramToString(vm.search_index()); return filter && replaceDiacritics(filter) || undefined; }; vm.getAllSubscriptions = (filterVM) => { models.userSubscription.pageSize(false); const allSubs = commonPayment.loaderWithToken( models.userSubscription.getPageOptions(filterVM.parameters())).load(); models.userSubscription.pageSize(9); return allSubs; }; vm.withNullParameters = () => { const withNullVm = commonPayment.filtersVM({ status: 'in', reward_external_id: 'is', search_index: 'fts(portuguese)', payment_method: 'eq', project_id: 'eq' }); withNullVm.order(vm.order()); withNullVm.status(vm.status()); withNullVm.reward_external_id(vm.reward_external_id()); withNullVm.payment_method(vm.payment_method()); withNullVm.search_index(vm.search_index()); withNullVm.project_id(vm.project_id()); return withNullVm.parameters(); }; export default vm;
SEED0228/kGenProg
src/main/java/jp/kusumotolab/kgenprog/ga/validation/LimitedNumberCodeValidation.java
<reponame>SEED0228/kGenProg package jp.kusumotolab.kgenprog.ga.validation; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import jp.kusumotolab.kgenprog.ga.variant.Base; import jp.kusumotolab.kgenprog.ga.variant.Gene; import jp.kusumotolab.kgenprog.project.test.TestResults; /** * テスト通過率をそのまま評価値とする評価方法.ただし評価の残り回数を過ぎると評価値を0にする. */ public class LimitedNumberCodeValidation implements SourceCodeValidation { private static final int DEFAULT_CAPACITY = 100; private final int capacity; private final Map<List<Base>, LimitedNumberSimpleFitness> basesFitnessMap; /** * デフォルトの評価回数(100)で初期化 */ public LimitedNumberCodeValidation() { this(DEFAULT_CAPACITY); } /** * 与えられた評価回数で初期化 * * @param capacity 評価回数 */ public LimitedNumberCodeValidation(final int capacity) { this.capacity = capacity; this.basesFitnessMap = new HashMap<>(); } /** * @param input 評価値計算に利用する情報 * @return 評価値 */ @Override public Fitness exec(final Input input) { final Gene targetGene = input.getGene(); final List<Base> targetBases = targetGene.getBases(); // 評価対象の遺伝子の評価値がすでに計算されているのであれば,再計算を避ける if (basesFitnessMap.containsKey(targetBases)) { return basesFitnessMap.get(targetBases); } final TestResults testResults = input.getTestResults(); final double successRate = testResults.getSuccessRate(); final List<Base> parentBases = getParentGene(targetGene); // 親の評価値がこの変異プログラムの評価値よりも高い場合は,親の評価値を下げる. // parentBasesが存在しているかどうかを確認している理由は, // 交叉により生成された変異プログラムについては現在対象外のため. // TODO 交叉により生成された変異プログラムへの対応 if (basesFitnessMap.containsKey(parentBases)) { final LimitedNumberSimpleFitness parentFitness = basesFitnessMap.get(parentBases); if (successRate <= parentFitness.getNormalizedValue()) { parentFitness.reduceCapacity(); } } final LimitedNumberSimpleFitness fitness = new LimitedNumberSimpleFitness(successRate, capacity); basesFitnessMap.put(targetBases, fitness); return fitness; } // TODO 交叉の場合に対応できたらGeneクラスに移動すべき private List<Base> getParentGene(final Gene gene) { final List<Base> bases = new ArrayList<>(gene.getBases()); bases.remove(bases.size() - 1); return bases; } }
mathewhodson/watchman
watchman/tests/integration/test_find.py
<reponame>mathewhodson/watchman # vim:ts=4:sw=4:et: # Copyright (c) Facebook, Inc. and its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # no unicode literals from __future__ import absolute_import, division, print_function import json import os import os.path import WatchmanTestCase @WatchmanTestCase.expand_matrix class TestFind(WatchmanTestCase.WatchmanTestCase): def test_find(self): root = self.mkdtemp() self.touchRelative(root, "foo.c") self.touchRelative(root, "bar.txt") self.watchmanCommand("watch", root) self.assertFileList(root, ["foo.c", "bar.txt"]) # Make sure we correctly observe deletions os.unlink(os.path.join(root, "bar.txt")) self.assertFileList(root, ["foo.c"]) # touch -> delete -> touch, should show up as exists self.touchRelative(root, "bar.txt") self.assertFileList(root, ["foo.c", "bar.txt"]) os.unlink(os.path.join(root, "bar.txt")) # A moderately more complex set of changes os.mkdir(os.path.join(root, "adir")) os.mkdir(os.path.join(root, "adir", "subdir")) self.touchRelative(root, "adir", "subdir", "file") os.rename( os.path.join(root, "adir", "subdir"), os.path.join(root, "adir", "overhere") ) self.assertFileList( root, ["adir", "adir/overhere", "adir/overhere/file", "foo.c"] ) os.rename(os.path.join(root, "adir"), os.path.join(root, "bdir")) self.assertFileList( root, ["bdir", "bdir/overhere", "bdir/overhere/file", "foo.c"] ) self.assertTrue(self.rootIsWatched(root)) self.watchmanCommand("watch-del", root) self.waitFor(lambda: not self.rootIsWatched(root)) self.assertFalse(self.rootIsWatched(root))
drmatthews/slidecrop_pyqt
slidecrop/gui/segmentation_ui.py
<filename>slidecrop/gui/segmentation_ui.py # -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'slidecrop\resources\segmentation.ui' # # Created by: PyQt5 UI code generator 5.12.1 # # WARNING! All changes made in this file will be lost! from PyQt5 import QtCore, QtGui, QtWidgets class Ui_MainWindow(object): def setupUi(self, MainWindow): MainWindow.setObjectName("MainWindow") MainWindow.resize(232, 90) icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(":/newPrefix/icon.ico"), QtGui.QIcon.Normal, QtGui.QIcon.Off) MainWindow.setWindowIcon(icon) self.centralwidget = QtWidgets.QWidget(MainWindow) self.centralwidget.setObjectName("centralwidget") self.verticalLayoutWidget = QtWidgets.QWidget(self.centralwidget) self.verticalLayoutWidget.setGeometry(QtCore.QRect(9, 9, 211, 71)) self.verticalLayoutWidget.setObjectName("verticalLayoutWidget") self.verticalLayout = QtWidgets.QVBoxLayout(self.verticalLayoutWidget) self.verticalLayout.setContentsMargins(0, 0, 0, 0) self.verticalLayout.setObjectName("verticalLayout") self.channel_combo = QtWidgets.QComboBox(self.verticalLayoutWidget) self.channel_combo.setObjectName("channel_combo") self.verticalLayout.addWidget(self.channel_combo) spacerItem = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding) self.verticalLayout.addItem(spacerItem) self.horizontalLayout = QtWidgets.QHBoxLayout() self.horizontalLayout.setObjectName("horizontalLayout") spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum) self.horizontalLayout.addItem(spacerItem1) self.segment_button = QtWidgets.QPushButton(self.verticalLayoutWidget) self.segment_button.setObjectName("segment_button") self.horizontalLayout.addWidget(self.segment_button) self.verticalLayout.addLayout(self.horizontalLayout) MainWindow.setCentralWidget(self.centralwidget) self.retranslateUi(MainWindow) QtCore.QMetaObject.connectSlotsByName(MainWindow) def retranslateUi(self, MainWindow): _translate = QtCore.QCoreApplication.translate MainWindow.setWindowTitle(_translate("MainWindow", "Segmentation")) self.segment_button.setText(_translate("MainWindow", "Run")) from .. resources import resources_rc
openMF/egalite-web-service
src/main/java/com/bfsi/mfi/dao/CustomerInfoDao.java
package com.bfsi.mfi.dao; import java.util.List; import com.bfsi.mfi.entity.CustomerDocumentInfo; import com.bfsi.mfi.entity.CustomerEnrolmentInfo; import com.bfsi.mfi.entity.CustomerInfo; public interface CustomerInfoDao extends MaintenanceDao<CustomerInfo> { void insertCustomerInfo(CustomerInfo customerInfo); String checkReceivedId(String id); void deleteIdRecord(List<String> pendingIdList); void insertIdStatus(String id, String syncsessionid); void updateEnrollmentIdStatus(List<String> enrollmentIdList); void submitCustPersonalInfo(CustomerEnrolmentInfo p_custInfo); void submitCustomerDocInfo(CustomerDocumentInfo p_custDocInfo); }
Amity-Network/amity
contrib/depends/SDKs/MacOSX10.11.sdk/System/Library/Frameworks/AppKit.framework/Versions/C/Headers/NSController.h
<filename>contrib/depends/SDKs/MacOSX10.11.sdk/System/Library/Frameworks/AppKit.framework/Versions/C/Headers/NSController.h /* NSController.h Application Kit Copyright (c) 2002-2015, Apple Inc. All rights reserved. */ #import <AppKit/NSKeyValueBinding.h> #import <CoreFoundation/CoreFoundation.h> NS_ASSUME_NONNULL_BEGIN @class NSMutableArray, NSMutableDictionary, NSMutableSet; @interface NSController : NSObject <NSCoding> { @private #if !__LP64__ void *_reserved; void *_reserved2; #endif int _specialPurposeType; id _bindingAdaptor; NSMutableArray *_editors; NSMutableArray *_declaredKeys; NSMutableDictionary *_dependentKeyToModelKeyTable; NSMutableDictionary *_modelKeyToDependentKeyTable; NSMutableArray *_modelKeysToRefreshEachTime; struct __bindingsControllerFlags { unsigned int _alwaysPresentsApplicationModalAlerts:1; unsigned int _refreshesAllModelKeys:1; unsigned int _multipleObservedModelObjects:1; unsigned int _isEditing:1; unsigned int _reservedController:28; } _bindingsControllerFlags; #if !__LP64__ NSMutableDictionary *_reservedOther; #endif @protected // all instance variables are private id _modelObservingTracker; id _expectedObservingInfo; id _singleValueAccessor; } - (instancetype)init NS_DESIGNATED_INITIALIZER; - (nullable instancetype)initWithCoder:(NSCoder *)coder NS_DESIGNATED_INITIALIZER; - (void)objectDidBeginEditing:(id)editor; - (void)objectDidEndEditing:(id)editor; - (void)discardEditing; - (BOOL)commitEditing; - (void)commitEditingWithDelegate:(nullable id)delegate didCommitSelector:(nullable SEL)didCommitSelector contextInfo:(nullable void *)contextInfo; @property (getter=isEditing, readonly) BOOL editing; @end NS_ASSUME_NONNULL_END
melan/go-force
sobjects/relationshipinfo.go
// This file was generated for SObject RelationshipInfo, API Version v43.0 at 2018-07-30 03:48:04.421676602 -0400 EDT m=+50.766187920 package sobjects import ( "fmt" "strings" ) type RelationshipInfo struct { BaseSObject ChildSobjectId string `force:",omitempty"` DurableId string `force:",omitempty"` FieldId string `force:",omitempty"` Id string `force:",omitempty"` IsCascadeDelete bool `force:",omitempty"` IsDeprecatedAndHidden bool `force:",omitempty"` IsRestrictedDelete bool `force:",omitempty"` JunctionIdListNames string `force:",omitempty"` } func (t *RelationshipInfo) ApiName() string { return "RelationshipInfo" } func (t *RelationshipInfo) String() string { builder := strings.Builder{} builder.WriteString(fmt.Sprintf("RelationshipInfo #%s - %s\n", t.Id, t.Name)) builder.WriteString(fmt.Sprintf("\tChildSobjectId: %v\n", t.ChildSobjectId)) builder.WriteString(fmt.Sprintf("\tDurableId: %v\n", t.DurableId)) builder.WriteString(fmt.Sprintf("\tFieldId: %v\n", t.FieldId)) builder.WriteString(fmt.Sprintf("\tId: %v\n", t.Id)) builder.WriteString(fmt.Sprintf("\tIsCascadeDelete: %v\n", t.IsCascadeDelete)) builder.WriteString(fmt.Sprintf("\tIsDeprecatedAndHidden: %v\n", t.IsDeprecatedAndHidden)) builder.WriteString(fmt.Sprintf("\tIsRestrictedDelete: %v\n", t.IsRestrictedDelete)) builder.WriteString(fmt.Sprintf("\tJunctionIdListNames: %v\n", t.JunctionIdListNames)) return builder.String() } type RelationshipInfoQueryResponse struct { BaseQuery Records []RelationshipInfo `json:"Records" force:"records"` }
lapd87/SoftUniJavaOOPBasics
01DefiningClasses/src/_06RawData/Main.java
package _06RawData; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * Created by IntelliJ IDEA. * User: LAPD * Date: 14.6.2018 г. * Time: 20:29 ч. */ public class Main { public static void main(String[] args) throws IOException { BufferedReader bufferedReader = new BufferedReader( new InputStreamReader(System.in)); List<Car> cars = new ArrayList<>(); int n = Integer.parseInt(bufferedReader.readLine()); for (int i = 0; i < n; i++) { String[] input = bufferedReader.readLine() .split("\\s+"); String model = input[0]; int engineSpeed = Integer.parseInt(input[1]); int enginePower = Integer.parseInt(input[2]); int cargoWeight = Integer.parseInt(input[3]); String cargoType = input[4]; double tyre1pressure = Double.parseDouble(input[5]); int tyre1age = Integer.parseInt(input[6]); double tyre2pressure = Double.parseDouble(input[7]); int tyre2age = Integer.parseInt(input[8]); double tyre3pressure = Double.parseDouble(input[9]); int tyre3age = Integer.parseInt(input[10]); double tyre4pressure = Double.parseDouble(input[11]); int tyre4age = Integer.parseInt(input[12]); Car car = new Car(model, engineSpeed, enginePower, cargoWeight, cargoType, tyre1pressure, tyre1age, tyre2pressure, tyre2age, tyre3pressure, tyre3age, tyre4pressure, tyre4age); cars.add(car); } String command = bufferedReader.readLine(); printFilteredCars(command, cars); } private static void printFilteredCars(String command, List<Car> cars) { cars.stream() .filter(c -> { boolean filterCargo = c.getCargo().getCargoType().equals(command); boolean extraFilter = false; switch (command) { case "fragile": extraFilter = Arrays.stream(c.getTyres()) .anyMatch(t -> t.getPressure() < 1); break; case "flamable": extraFilter = c.getEngine().getEnginePower() > 250; break; } return filterCargo && extraFilter; }) .forEach(System.out::println); } }
Irio/homebrew-cask
Casks/macvim.rb
class Macvim < Cask if MacOS.version == :mavericks url 'https://github.com/b4winckler/macvim/releases/download/snapshot-72/MacVim-snapshot-72-Mavericks.tbz' sha1 'dc983ae1e3ffae1c80f06eea9eacee49019a0c8a' else url 'https://github.com/eee19/macvim/releases/download/snapshot-72/MacVim-snapshot-72-Mountain-Lion.tbz' sha1 'bc3b899634d73908ddba5afd9b9a74778988aec3' end homepage 'http://code.google.com/p/macvim/' version '7.4-72' link 'MacVim-snapshot-72/MacVim.app' end
atbashEE/atbash-octopus
impl/non-web/oauth2-oidc-sdk/src/main/java/be/atbash/ee/oauth2/sdk/device/DeviceCodeGrant.java
/* * Copyright 2014-2019 <NAME> (https://www.atbash.be) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package be.atbash.ee.oauth2.sdk.device; import be.atbash.ee.oauth2.sdk.AuthorizationGrant; import be.atbash.ee.oauth2.sdk.GrantType; import be.atbash.ee.oauth2.sdk.OAuth2Error; import be.atbash.ee.oauth2.sdk.OAuth2JSONParseException; import be.atbash.ee.oauth2.sdk.util.MultivaluedMapUtils; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Device code grant for the OAuth 2.0 Device Authorization Grant. * * <p> * Related specifications: * * <ul> * <li>OAuth 2.0 Device Authorization Grant (draft-ietf-oauth-device-flow-15) * </ul> */ public class DeviceCodeGrant extends AuthorizationGrant { /** * The grant type. */ public static final GrantType GRANT_TYPE = GrantType.DEVICE_CODE; /** * The device code received from the authorisation server. */ private final DeviceCode deviceCode; /** * Creates a new device code grant. * * @param deviceCode The device code. Must not be {@code null}. */ public DeviceCodeGrant(DeviceCode deviceCode) { super(GRANT_TYPE); if (deviceCode == null) { throw new IllegalArgumentException("The device code must not be null"); } this.deviceCode = deviceCode; } /** * Returns the device code received from the authorisation server. * * @return The device code received from the authorisation server. */ public DeviceCode getDeviceCode() { return deviceCode; } @Override public Map<String, List<String>> toParameters() { Map<String, List<String>> params = new LinkedHashMap<>(); params.put("grant_type", Collections.singletonList(GRANT_TYPE.getValue())); params.put("device_code", Collections.singletonList(deviceCode.getValue())); return params; } /** * Parses a device code grant from the specified request body * parameters. * * <p>Example: * * <pre> * grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Adevice_code * &amp;device_code=GmRhmhcxhwAzkoEqiMEg_DnyEysNkuNhszIySk9eS * </pre> * * @param params The parameters. * @return The device code grant. * @throws OAuth2JSONParseException If parsing failed. */ public static DeviceCodeGrant parse(Map<String, List<String>> params) throws OAuth2JSONParseException { // Parse grant type String grantTypeString = MultivaluedMapUtils.getFirstValue(params, "grant_type"); if (grantTypeString == null) { String msg = "Missing \"grant_type\" parameter"; throw new OAuth2JSONParseException(msg, OAuth2Error.INVALID_REQUEST.appendDescription(": " + msg)); } if (!GrantType.parse(grantTypeString).equals(GRANT_TYPE)) { String msg = "The \"grant_type\" must be " + GRANT_TYPE; throw new OAuth2JSONParseException(msg, OAuth2Error.UNSUPPORTED_GRANT_TYPE.appendDescription(": " + msg)); } // Parse authorisation code String deviceCodeString = MultivaluedMapUtils.getFirstValue(params, "device_code"); if (deviceCodeString == null || deviceCodeString.trim().isEmpty()) { String msg = "Missing or empty \"device_code\" parameter"; throw new OAuth2JSONParseException(msg, OAuth2Error.INVALID_REQUEST.appendDescription(": " + msg)); } DeviceCode deviceCode = new DeviceCode(deviceCodeString); return new DeviceCodeGrant(deviceCode); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof DeviceCodeGrant)) { return false; } DeviceCodeGrant deviceCodeGrant = (DeviceCodeGrant) o; return deviceCode.equals(deviceCodeGrant.deviceCode); } @Override public int hashCode() { return deviceCode.hashCode(); } }
aut0/vcml
include/vcml/debugging/vncserver.h
/****************************************************************************** * * * Copyright 2018 <NAME> * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * ******************************************************************************/ #if defined(HAVE_LIBVNC) && !defined(VCML_DEBUGGING_VNCSERVER_H) #define VCML_DEBUGGING_VNCSERVER_H #include <rfb/rfb.h> #include "vcml/common/types.h" #include "vcml/common/strings.h" #include "vcml/common/report.h" #include "vcml/logging/logger.h" namespace vcml { namespace debugging { typedef struct { u8 offset; u8 size; } vnc_color_format; typedef struct { u32 resx; u32 resy; u32 size; vnc_color_format a; vnc_color_format r; vnc_color_format g; vnc_color_format b; vcml_endian endian; } vnc_fbmode; vnc_fbmode fbmode_argb32(u32 width, u32 height); vnc_fbmode fbmode_bgra32(u32 width, u32 height); vnc_fbmode fbmode_rgb24(u32 width, u32 height); vnc_fbmode fbmode_bgr24(u32 width, u32 height); vnc_fbmode fbmode_rgb16(u32 width, u32 height); vnc_fbmode fbmode_gray8(u32 width, u32 height); class vncserver { private: rfbScreenInfo* m_screen; pthread_t m_thread; volatile bool m_running; vnc_fbmode m_fbmode; u8* m_fb; vector<function<void(u32, bool)>*> m_key_handler; static std::map<u16, shared_ptr<vncserver>> servers; void run(); void dokey(unsigned int key, bool down); static void* thread_func(void* data); static void key_func(rfbBool down, rfbKeySym key, rfbClientPtr cl); vncserver(); vncserver(u16 port); vncserver(const vncserver&); public: u16 get_port() const { return (u16)m_screen->port; } u8* setup_framebuffer(const vnc_fbmode& desc); void setup_framebuffer(const vnc_fbmode& desc, u8* ptr); void render(); virtual ~vncserver(); void add_key_listener(function<void(u32, bool)>* handler); void remove_key_listener(function<void(u32, bool)>* handler); static shared_ptr<vncserver> lookup(u16 port); }; }} #endif
re995/fuchsia
src/ui/a11y/lib/view/view_wrapper.cc
<reponame>re995/fuchsia // Copyright 2020 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "view_wrapper.h" #include <fuchsia/accessibility/semantics/cpp/fidl.h> #include <lib/async/default.h> #include <lib/syslog/cpp/macros.h> #include <stack> #include "src/ui/a11y/lib/semantics/util/semantic_transform.h" namespace a11y { namespace { // Builds a transform of the form: // | 1 0 0 Tx | // | 0 1 0 Ty | // | 0 0 1 0 | // | 0 0 0 1 | // Where: Tx and Ty come from |offset|. fuchsia::ui::gfx::mat4 MakeTranslationTransform(const fuchsia::ui::gfx::vec2& offset) { fuchsia::ui::gfx::mat4 transform; transform.matrix[0] = 1; transform.matrix[5] = 1; transform.matrix[10] = 1; transform.matrix[15] = 1; transform.matrix[12] = -offset.x; transform.matrix[13] = -offset.y; return transform; } } // namespace ViewWrapper::ViewWrapper(fuchsia::ui::views::ViewRef view_ref, std::unique_ptr<ViewSemantics> view_semantics, std::unique_ptr<AnnotationViewInterface> annotation_view) : view_ref_(std::move(view_ref)), view_semantics_(std::move(view_semantics)), annotation_view_(std::move(annotation_view)) {} void ViewWrapper::EnableSemanticUpdates(bool enabled) { view_semantics_->EnableSemanticUpdates(enabled); } fxl::WeakPtr<::a11y::SemanticTree> ViewWrapper::GetTree() const { return view_semantics_->GetTree(); } fuchsia::ui::views::ViewRef ViewWrapper::ViewRefClone() const { return Clone(view_ref_); } void ViewWrapper::HighlightMagnificationViewport(float magnification_scale, float magnification_translation_x, float magnification_translation_y) { auto tree_weak_ptr = GetTree(); if (!tree_weak_ptr) { FX_LOGS(ERROR) << "ViewWrapper::DrawHighlight: Invalid tree pointer"; return; } // We need to get the bounds of the view's root node, so retrieve the root // node. auto root_node = tree_weak_ptr->GetNode(0u); FX_DCHECK(root_node); auto root_node_bounding_box = root_node->location(); // Get the dimensions of the root node's bounding box. We will use these to // compute the dimensions of the magnification viewport later. auto width = root_node_bounding_box.max.x - root_node_bounding_box.min.x; auto height = root_node_bounding_box.max.y - root_node_bounding_box.min.y; // Get the "top left" or "minimum" in NDC for the magnification viewport. // Note that the local coordinate space for this view is rotated 90 degrees // clockwise from NDC, so the "top left" corner of the screen is actually the // "bottom left" corenr in NDC. So, the "top left" corner of the screen is // at point (-1, 1) in NDC. // We want to determine which NDC point in unmagnified space will be located // at (-1, 1) in NDC (this point will be the "min" of the bounding box for // the magnifier viewport in NDC. Here, we are essentially applying the // inverse of the magnification transform to the point (-1, 1). auto x_top_left_ndc = (-1 - magnification_translation_x) / magnification_scale; auto y_top_left_ndc = (1 - magnification_translation_y) / magnification_scale; // Now, convert the NDC location of the upper left corner of the magnification // viewport to local coordinates. NDC point (0, 0) will be in the center of // the view -- (root_node_bounding_box.min.x + (width / 2), // root_node_bounding_box.min.y + (height / 2)). Furthermore, since NDC // coordinates fall between -1 and 1, the conversion factor for NDC to local // is just (width or height) / 2. // NOTE: Since the local space is rotated relative to NDC, we need to switch // the x- and y- coordinates (i.e. use the y NDC coordinate to compute the local // x and vice versa). We also need to use the opposite of the y coordinate to // account for the rotation of the screen. auto x_translation = root_node_bounding_box.min.x + (width / 2) + (width / 2) * -y_top_left_ndc; auto y_translation = root_node_bounding_box.min.y + (height / 2) + (height / 2) * x_top_left_ndc; // Finally, compute the bounds of the magnification viewport in local // coordinates. fuchsia::ui::gfx::BoundingBox magnification_viewport_bounding_box; magnification_viewport_bounding_box.min.x = x_translation; magnification_viewport_bounding_box.min.y = y_translation; magnification_viewport_bounding_box.max.x = magnification_viewport_bounding_box.min.x + (width / magnification_scale); magnification_viewport_bounding_box.max.y = magnification_viewport_bounding_box.min.y + (height / magnification_scale); // Compute the local->global coordinate transform, which will just be the // root node's transform since the root node doesn't have a parent. SemanticTransform transform; if (root_node->has_transform()) { transform.ChainLocalTransform(root_node->transform()); } annotation_view_->DrawHighlight(magnification_viewport_bounding_box, transform.scale_vector(), transform.translation_vector(), true /* is_magnification_highlight */); } std::optional<SemanticTransform> ViewWrapper::GetNodeToRootTransform(uint32_t node_id) const { auto tree_weak_ptr = GetTree(); if (!tree_weak_ptr) { FX_LOGS(ERROR) << "Invalid tree pointer"; return std::nullopt; } auto* node = tree_weak_ptr->GetNode(node_id); if (!node) { FX_LOGS(ERROR) << "No node found iwth id: " << node_id; return std::nullopt; } // Compute the translation and scaling vectors for the node's bounding box. // Each node can supply a 4x4 transform matrix of the form: // [ Sx 0 0 Tx ] // [ 0 Sy 0 Ty ] // [ 0 0 Sz Tz ] // [ 0 0 0 1 ] // // Here, Sx, Sy, and Sz are the scale coefficients on the x, y, and z axes, // respectively. Tx, Ty, and Tz are the x, y, and z components of translation, // respectively. // // In order to compute the transform matrix from the focused node's coordinate // space to the root node's coordinate space, we can simply compute the // cross product of the focused node's ancestors' transform matrices, // beginning at the focused node and up to the minimum-depth non-root ancestor // (the root does not have a parent, so it does not need a transform). // // [Focused node to scenic view] = [root transform] x [depth 1 ancestor transform] x // [depth 2 ancestor transform] x ... x [parent transform] x [focused node transform] // // The resulting transform will be of the same form as described above. Using // this matrix, we can simply extract the scaling and translation vectors // required by scenic: (Sx, Sy, Sz) and (Tx, Ty, Tz), respectively. // // Note that if a node has scroll offsets, it introduces a transform matrix filling only the // translation values to account for the scrolling. This transform is part of the computation // described above. uint32_t current_node_id = node_id; SemanticTransform node_to_root_transform; while (true) { auto current_node = tree_weak_ptr->GetNode(current_node_id); FX_DCHECK(current_node); // Don't apply scrolling that's on the target node, since scrolling affects // the location of its children rather than it. Apply scrolling before the // node's transform, since the scrolling moves its children within it and // then the transform moves the result to the parent's space. if (current_node_id != node_id && current_node->has_states() && current_node->states().has_viewport_offset()) { auto translation_matrix = MakeTranslationTransform(current_node->states().viewport_offset()); node_to_root_transform.ChainLocalTransform(translation_matrix); } if (current_node->has_transform()) { node_to_root_transform.ChainLocalTransform(current_node->transform()); } // Once we have applied the root node's tranform, we shoud exit the loop. if (current_node_id == 0) { break; } // If |current_node| has an offset container specified, then its transform // puts local coordinates into the coordinate space of the offset container // node, NOT the parent of |current_node|. If no offset container is // specified, then we assume the transform is relative to the parent. if (current_node->has_container_id()) { current_node_id = current_node->container_id(); } else { auto parent_node = tree_weak_ptr->GetParentNode(current_node_id); FX_DCHECK(parent_node); current_node_id = parent_node->node_id(); } } return node_to_root_transform; } void ViewWrapper::HighlightNode(uint32_t node_id) { auto tree_weak_ptr = GetTree(); if (!tree_weak_ptr) { FX_LOGS(ERROR) << "Invalid tree pointer"; return; } auto annotated_node = tree_weak_ptr->GetNode(node_id); if (!annotated_node) { FX_LOGS(ERROR) << "No node found with id: " << node_id; return; } auto transform = GetNodeToRootTransform(node_id); if (!transform) { FX_LOGS(ERROR) << "Could not compute node-to-root transform for node: " << node_id; return; } auto bounding_box = annotated_node->location(); annotation_view_->DrawHighlight(bounding_box, transform->scale_vector(), transform->translation_vector(), false /* is_magnification_highlight */); } void ViewWrapper::ClearAllHighlights() { annotation_view_->ClearAllAnnotations(); } void ViewWrapper::ClearFocusHighlights() { annotation_view_->ClearFocusHighlights(); } void ViewWrapper::ClearMagnificationHighlights() { annotation_view_->ClearMagnificationHighlights(); } std::shared_ptr<input::Injector> ViewWrapper::take_view_injector() { auto tmp = view_injector_; view_injector_.reset(); return tmp; } } // namespace a11y
imsatyam/techlearn
content/programming-realm/code_problems/src/main/java/com/satyam/problem/leetcode/medium/LC43MultiplyStrings.java
package com.satyam.problem.leetcode.medium; /** LC #43 Given two non-negative integers num1 and num2 represented as strings, return the product of num1 and num2, also represented as a string. Example 1: Input: num1 = "2", num2 = "3" Output: "6" Example 2: Input: num1 = "123", num2 = "456" Output: "56088" Note: - The length of both num1 and num2 is < 110. - Both num1 and num2 contain only digits 0-9. - Both num1 and num2 do not contain any leading zero, except the number 0 itself. - You must not use any built-in BigInteger library or convert the inputs to integer directly. Idea: Use multiplication logic from school Submission Detail Link: https://leetcode.com/submissions/detail/291379707/ Runtime: 3 ms Memory: 36.6 MB */ public class LC43MultiplyStrings { public String multiply(String num1, String num2) { int len1 = num1.length(); int len2 = num2.length(); if (len1 == 0 || len2 == 0) { return ""; } int[] result = new int[len1 + len2]; int in1 = 0; int in2 = 0; int i = 0, j = 0;; for (i = len1 - 1; i >= 0; i--) { int carry = 0; int n1 = num1.charAt(i) - '0'; in2 = 0; for (j = len2 - 1; j >= 0; j--) { int n2 = num2.charAt(j) - '0'; int sum = n1 * n2 + result[in1 + in2] + carry; carry = sum / 10; result[in1 + in2] = sum % 10; in2++; } if (carry > 0) { result[in1 + in2] += carry; } in1++; } i = len1 + len2 - 1; while (i >= 0 && result[i] == 0) { i--; } if (i == -1) { return "0"; } StringBuilder builder = new StringBuilder(); while(i >= 0) { builder.append(result[i]); i--; } return builder.toString(); } }
clinlfoundation/EMBOX
build/base/gen/include/config/embox/kernel/timer/itimer.h
/* Generated by GNU Make 4.2.1. Do not edit. */ #ifndef __CONFIG__embox__kernel__timer__itimer__H_ #define __CONFIG__embox__kernel__timer__itimer__H_ // This is the most specific implementation type #ifndef OPTION_NUMBER_embox__kernel__timer__itimer__itimer_quantity #define OPTION_NUMBER_embox__kernel__timer__itimer__itimer_quantity 10 #endif #endif /* __CONFIG__embox__kernel__timer__itimer__H_ */
factset/enterprise-sdk
code/java/ProcuretoPayAPISCIM/v1/src/main/java/com/factset/sdk/ProcuretoPayAPISCIM/models/GroupResource.java
/* * FactSet SCIM API * FactSet's SCIM API implementation. * * The version of the OpenAPI document: 1.0.0 * * * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). * https://openapi-generator.tech * Do not edit the class manually. */ package com.factset.sdk.ProcuretoPayAPISCIM.models; import java.util.Objects; import java.util.Arrays; import java.util.Map; import java.util.HashMap; import com.factset.sdk.ProcuretoPayAPISCIM.models.GroupResourceMembers; import com.factset.sdk.ProcuretoPayAPISCIM.models.GroupResourceMeta; import com.factset.sdk.ProcuretoPayAPISCIM.models.GroupResourceUrnScimSchemasExtensionFactSetEnterpriseHosting10Group; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonTypeName; import com.fasterxml.jackson.annotation.JsonValue; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import java.io.Serializable; import com.fasterxml.jackson.annotation.JsonPropertyOrder; import com.factset.sdk.ProcuretoPayAPISCIM.JSON; /** * GroupResource */ @JsonPropertyOrder({ GroupResource.JSON_PROPERTY_SCHEMAS, GroupResource.JSON_PROPERTY_ID, GroupResource.JSON_PROPERTY_EXTERNAL_ID, GroupResource.JSON_PROPERTY_DISPLAY_NAME, GroupResource.JSON_PROPERTY_DESCRIPTION, GroupResource.JSON_PROPERTY_MEMBERS, GroupResource.JSON_PROPERTY_URN_COLON_SCIM_COLON_SCHEMAS_COLON_EXTENSION_COLON_FACT_SET_COLON_ENTERPRISE_HOSTING_COLON10_COLON_GROUP, GroupResource.JSON_PROPERTY_META }) @javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen") public class GroupResource implements Serializable { private static final long serialVersionUID = 1L; public static final String JSON_PROPERTY_SCHEMAS = "schemas"; private java.util.List<String> schemas = null; public static final String JSON_PROPERTY_ID = "id"; private String id; public static final String JSON_PROPERTY_EXTERNAL_ID = "externalId"; private String externalId; public static final String JSON_PROPERTY_DISPLAY_NAME = "displayName"; private String displayName; public static final String JSON_PROPERTY_DESCRIPTION = "description"; private String description; public static final String JSON_PROPERTY_MEMBERS = "members"; private java.util.List<GroupResourceMembers> members = null; public static final String JSON_PROPERTY_URN_COLON_SCIM_COLON_SCHEMAS_COLON_EXTENSION_COLON_FACT_SET_COLON_ENTERPRISE_HOSTING_COLON10_COLON_GROUP = "urn:scim:schemas:extension:FactSet:EnterpriseHosting:1.0:Group"; private GroupResourceUrnScimSchemasExtensionFactSetEnterpriseHosting10Group urnColonScimColonSchemasColonExtensionColonFactSetColonEnterpriseHostingColon10ColonGroup; public static final String JSON_PROPERTY_META = "meta"; private GroupResourceMeta meta; public GroupResource() { } @JsonCreator public GroupResource( @JsonProperty(JSON_PROPERTY_ID) String id ) { this(); this.id = id; } public GroupResource schemas(java.util.List<String> schemas) { this.schemas = schemas; return this; } public GroupResource addSchemasItem(String schemasItem) { if (this.schemas == null) { this.schemas = new java.util.ArrayList<>(); } this.schemas.add(schemasItem); return this; } /** * Get schemas * @return schemas **/ @javax.annotation.Nullable @ApiModelProperty(example = "[\"urn:ietf:params:scim:schemas:core:2.0:Group\",\"urn:scim:schemas:extension:FactSet:EnterpriseHosting:1.0:Group\"]", value = "") @JsonProperty(JSON_PROPERTY_SCHEMAS) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) public java.util.List<String> getSchemas() { return schemas; } @JsonProperty(JSON_PROPERTY_SCHEMAS) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) public void setSchemas(java.util.List<String> schemas) { this.schemas = schemas; } /** * Get id * @return id **/ @javax.annotation.Nullable @ApiModelProperty(example = "Domain btud - Pod 04 - CitrixApps - Cymba OMS", value = "") @JsonProperty(JSON_PROPERTY_ID) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) public String getId() { return id; } public GroupResource externalId(String externalId) { this.externalId = externalId; return this; } /** * Get externalId * @return externalId **/ @javax.annotation.Nullable @ApiModelProperty(example = "B8FE8BBD-0E04-40B2-9BB3-E5EE17C4C9C9", value = "") @JsonProperty(JSON_PROPERTY_EXTERNAL_ID) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) public String getExternalId() { return externalId; } @JsonProperty(JSON_PROPERTY_EXTERNAL_ID) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) public void setExternalId(String externalId) { this.externalId = externalId; } public GroupResource displayName(String displayName) { this.displayName = displayName; return this; } /** * A human-readable name for the Group. * @return displayName **/ @javax.annotation.Nullable @ApiModelProperty(example = "Domain btud - Pod 04 - CitrixApps - Cymba OMS", value = "A human-readable name for the Group.") @JsonProperty(JSON_PROPERTY_DISPLAY_NAME) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) public String getDisplayName() { return displayName; } @JsonProperty(JSON_PROPERTY_DISPLAY_NAME) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) public void setDisplayName(String displayName) { this.displayName = displayName; } public GroupResource description(String description) { this.description = description; return this; } /** * A description for the Group. * @return description **/ @javax.annotation.Nullable @ApiModelProperty(value = "A description for the Group.") @JsonProperty(JSON_PROPERTY_DESCRIPTION) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) public String getDescription() { return description; } @JsonProperty(JSON_PROPERTY_DESCRIPTION) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) public void setDescription(String description) { this.description = description; } public GroupResource members(java.util.List<GroupResourceMembers> members) { this.members = members; return this; } public GroupResource addMembersItem(GroupResourceMembers membersItem) { if (this.members == null) { this.members = new java.util.ArrayList<>(); } this.members.add(membersItem); return this; } /** * A list of members of the Group. * @return members **/ @javax.annotation.Nullable @ApiModelProperty(value = "A list of members of the Group.") @JsonProperty(JSON_PROPERTY_MEMBERS) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) public java.util.List<GroupResourceMembers> getMembers() { return members; } @JsonProperty(JSON_PROPERTY_MEMBERS) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) public void setMembers(java.util.List<GroupResourceMembers> members) { this.members = members; } public GroupResource urnColonScimColonSchemasColonExtensionColonFactSetColonEnterpriseHostingColon10ColonGroup(GroupResourceUrnScimSchemasExtensionFactSetEnterpriseHosting10Group urnColonScimColonSchemasColonExtensionColonFactSetColonEnterpriseHostingColon10ColonGroup) { this.urnColonScimColonSchemasColonExtensionColonFactSetColonEnterpriseHostingColon10ColonGroup = urnColonScimColonSchemasColonExtensionColonFactSetColonEnterpriseHostingColon10ColonGroup; return this; } /** * Get urnColonScimColonSchemasColonExtensionColonFactSetColonEnterpriseHostingColon10ColonGroup * @return urnColonScimColonSchemasColonExtensionColonFactSetColonEnterpriseHostingColon10ColonGroup **/ @javax.annotation.Nullable @ApiModelProperty(value = "") @JsonProperty(JSON_PROPERTY_URN_COLON_SCIM_COLON_SCHEMAS_COLON_EXTENSION_COLON_FACT_SET_COLON_ENTERPRISE_HOSTING_COLON10_COLON_GROUP) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) public GroupResourceUrnScimSchemasExtensionFactSetEnterpriseHosting10Group getUrnColonScimColonSchemasColonExtensionColonFactSetColonEnterpriseHostingColon10ColonGroup() { return urnColonScimColonSchemasColonExtensionColonFactSetColonEnterpriseHostingColon10ColonGroup; } @JsonProperty(JSON_PROPERTY_URN_COLON_SCIM_COLON_SCHEMAS_COLON_EXTENSION_COLON_FACT_SET_COLON_ENTERPRISE_HOSTING_COLON10_COLON_GROUP) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) public void setUrnColonScimColonSchemasColonExtensionColonFactSetColonEnterpriseHostingColon10ColonGroup(GroupResourceUrnScimSchemasExtensionFactSetEnterpriseHosting10Group urnColonScimColonSchemasColonExtensionColonFactSetColonEnterpriseHostingColon10ColonGroup) { this.urnColonScimColonSchemasColonExtensionColonFactSetColonEnterpriseHostingColon10ColonGroup = urnColonScimColonSchemasColonExtensionColonFactSetColonEnterpriseHostingColon10ColonGroup; } public GroupResource meta(GroupResourceMeta meta) { this.meta = meta; return this; } /** * Get meta * @return meta **/ @javax.annotation.Nullable @ApiModelProperty(value = "") @JsonProperty(JSON_PROPERTY_META) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) public GroupResourceMeta getMeta() { return meta; } @JsonProperty(JSON_PROPERTY_META) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) public void setMeta(GroupResourceMeta meta) { this.meta = meta; } /** * Return true if this GroupResource object is equal to o. */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } GroupResource groupResource = (GroupResource) o; return Objects.equals(this.schemas, groupResource.schemas) && Objects.equals(this.id, groupResource.id) && Objects.equals(this.externalId, groupResource.externalId) && Objects.equals(this.displayName, groupResource.displayName) && Objects.equals(this.description, groupResource.description) && Objects.equals(this.members, groupResource.members) && Objects.equals(this.urnColonScimColonSchemasColonExtensionColonFactSetColonEnterpriseHostingColon10ColonGroup, groupResource.urnColonScimColonSchemasColonExtensionColonFactSetColonEnterpriseHostingColon10ColonGroup) && Objects.equals(this.meta, groupResource.meta); } @Override public int hashCode() { return Objects.hash(schemas, id, externalId, displayName, description, members, urnColonScimColonSchemasColonExtensionColonFactSetColonEnterpriseHostingColon10ColonGroup, meta); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class GroupResource {\n"); sb.append(" schemas: ").append(toIndentedString(schemas)).append("\n"); sb.append(" id: ").append(toIndentedString(id)).append("\n"); sb.append(" externalId: ").append(toIndentedString(externalId)).append("\n"); sb.append(" displayName: ").append(toIndentedString(displayName)).append("\n"); sb.append(" description: ").append(toIndentedString(description)).append("\n"); sb.append(" members: ").append(toIndentedString(members)).append("\n"); sb.append(" urnColonScimColonSchemasColonExtensionColonFactSetColonEnterpriseHostingColon10ColonGroup: ").append(toIndentedString(urnColonScimColonSchemasColonExtensionColonFactSetColonEnterpriseHostingColon10ColonGroup)).append("\n"); sb.append(" meta: ").append(toIndentedString(meta)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
mfayk/KINC
src/core/ccmatrix.h
#ifndef CCMATRIX_H #define CCMATRIX_H #include "pairwise_matrix.h" /*! * This class implements the cluster matrix data object. A cluster matrix is a * pairwise matrix where each pair-cluster element is a sample mask denoting * whether a sample belongs in the cluster. The matrix data can be accessed * using the pairwise iterator for this class. */ class CCMatrix : public Pairwise::Matrix { Q_OBJECT public: class Pair; public: virtual QAbstractTableModel* model() override final; public: void initialize(const EMetaArray& geneNames, int maxClusterSize, const EMetaArray& sampleNames); EMetaArray sampleNames() const; /*! * Return the number of samples in the cluster matrix. */ int sampleSize() const { return _sampleSize; } private: class Model; private: /*! * Write the sub-header to the data object file. */ virtual void writeHeader() override final { stream() << _sampleSize; } /*! * Read the sub-header from the data object file. */ virtual void readHeader() override final { stream() >> _sampleSize; } /*! * The size (in bytes) of the sub-header. The sub-header consists of the * sample size. */ constexpr static qint16 SUBHEADER_SIZE {4}; /*! * The number of samples in each sample mask. */ qint32 _sampleSize {0}; /*! * Pointer to a qt table model for this class. */ Model* _model {nullptr}; }; #endif
RobotLocomotion/drake-python3.7
systems/framework/system_output.cc
#include "drake/systems/framework/system_output.h" DRAKE_DEFINE_CLASS_TEMPLATE_INSTANTIATIONS_ON_DEFAULT_SCALARS( class ::drake::systems::SystemOutput)
DorsaIO/jaydata
lib/Types/StorageProviders/SqLite/SqlExpressionMonitor.js
'use strict'; var _core = require('../../../../core.js'); var _core2 = _interopRequireDefault(_core); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } (0, _core.$C)('$data.sqLite.SqlExpressionMonitor', _core2.default.Expressions.ExpressionMonitor, null, { constructor: function constructor(monitorDefinition) { this.VisitIncludeExpression = function (expression, context) { var newSourceExpression = this.Visit(expression.source, context); monitorDefinition.isMapped = true; var newSelectorExpresion = this.Visit(expression.selector, context); monitorDefinition.isMapped = false; if (newSourceExpression !== expression.source || newSelectorExpresion !== expression.selector) { return _core.Container.createIncludeExpression(newSourceExpression, newSelectorExpresion); } return expression; }; this.VisitProjectionExpression = function (expression, context) { var source = this.Visit(expression.source, context); monitorDefinition.isMapped = true; var selector = this.Visit(expression.selector, context); monitorDefinition.isMapped = false; if (source !== expression.source || selector !== expression.selector) { var expr = _core.Container.createProjectionExpression(source, selector, expression.params, expression.instance); expr.projectionAs = expression.projectionAs; return expr; } return expression; }; } });
djangojeng-e/djangoproejcts
bookmark_app/bookmark/views.py
<filename>bookmark_app/bookmark/views.py<gh_stars>0 from django.shortcuts import render from django.urls import reverse_lazy from django.views.generic import ListView, CreateView, DetailView, UpdateView, DeleteView from .models import Bookmark # Create your views here. class BookmarkListView(ListView): model = Bookmark paginate_by = 6 class BookmarkCreateView(CreateView): model = Bookmark fields = ['site_name', 'url'] success_url = reverse_lazy('list') template_name_suffix = '_create' class BookmarkDetailView(DetailView): model = Bookmark class BookmarkUpdateView(UpdateView): model = Bookmark fields = ['site_name', 'url'] template_name_suffix = '_update' class BookmarkDeleteView(DeleteView): model = Bookmark success_url = reverse_lazy('list')
npocmaka/Windows-Server-2003
ds/adsi/router/oledbutl.hxx
//----------------------------------------------------------------------------- // // Microsoft Windows // Copyright (C) Microsoft Corporation, 1992 - 1995. // // File: oledbutl.hxx // // Contents: Utility object versions for ADSI row providers // // Functions: // // Notes: // // // History: 07/10/96 | RenatoB | Created, lifted most from EricJ code //----------------------------------------------------------------------------- #ifndef _OLEDBUTL_H_ #define _OLEDBUTL_H_ class CRowProvider; class CColumnsInfo; class CSessionObject; class CCommandObject; #ifndef NUMELEM #define NUMELEM(x) (sizeof(x)/sizeof(*x)) #endif // Macros to enable catching exceptions and returning E_UNEXPECTED // for retail versions. Debug versions don't catch exceptions in // order to generate better stack traces. #if DBG == 1 #define TRYBLOCK #define CATCHBLOCKRETURN #define CATCHBLOCKBAIL(hr) #else #define TRYBLOCK try { #define CATCHBLOCKRETURN } \ catch (...) \ { ADsAssert(false); RRETURN(E_UNEXPECTED); } #define CATCHBLOCKBAIL(hr) } \ catch (...) \ { ADsAssert(false); BAIL_ON_FAILURE(hr = E_UNEXPECTED); } #endif //----------------------------------------------------------------------------- // Functions //----------------------------------------------------------------------------- HRESULT CpAccessors2Rowset( IAccessor *pAccessorCommand, //@parm IN |Command's IAccessor IAccessor *pAccessorRowset, //@parm IN |Rowset's IAccessor ULONG cAccessors, //@parm IN |Count,Commnands accessors HACCESSOR rgAccessors[], //@parm IN |Array,Command's accessors CImpIAccessor *pCAccessor // accessor object of rowset ); HRESULT GetDSInterface( LPWSTR lpszPath, CCredentials& Credentials, REFIID iid, void FAR * FAR * ppObject ); HRESULT GetCredentialsFromIAuthenticate(IAuthenticate *pAuthenticate, CCredentials& refCredentials); typedef struct _maptype_struct_ { WORD wType; ULONG ulSize; }MAPTYPE_STRUCT; extern MAPTYPE_STRUCT g_MapADsTypeToDBType[]; extern DWORD g_cMapADsTypeToDBType; extern MAPTYPE_STRUCT g_MapADsTypeToDBType2[]; extern DWORD g_cMapADsTypeToDBType2; extern VARTYPE g_MapADsTypeToVarType[]; extern DWORD g_cMapADsTypeToVarType; extern ADS_SEARCHPREF g_MapDBPropIdToSearchPref[]; extern DWORD g_cMapDBPropToSearchPref; extern LPWSTR RemoveWhiteSpaces(LPWSTR pszText); extern STDMETHODIMP CanConvertHelper( DBTYPE wSrcType, DBTYPE wDstType, DBCONVERTFLAGS dwConvertFlags ); extern BYTE SetPrecision(DBTYPE dbType); #endif // _OLEDBUTL_H_
Killua010/random-things-API-E-Commerce
src/main/java/br/com/randomthings/domain/Stock.java
<reponame>Killua010/random-things-API-E-Commerce package br.com.randomthings.domain; import java.util.HashSet; import java.util.Set; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.JoinColumn; import javax.persistence.OneToMany; import javax.persistence.OneToOne; import lombok.AllArgsConstructor; import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; @NoArgsConstructor @AllArgsConstructor @Getter @Setter @Entity(name="_stock") public class Stock extends DomainEntity { @OneToOne(fetch = FetchType.EAGER) @JoinColumn(name = "product_id") private Product product; private Integer totalQuantity; @OneToMany(mappedBy="stock") private Set<StockInput> stockInputs = new HashSet<>(); }
ontio/libcxx-mirror
test/std/strings/string.view/string.view.ops/compare.sv.pass.cpp
<reponame>ontio/libcxx-mirror //===----------------------------------------------------------------------===// // // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===----------------------------------------------------------------------===// // <string_view> // constexpr int compare(basic_string_view str) const noexcept; #include <string_view> #include <cassert> #include "test_macros.h" #include "constexpr_char_traits.hpp" int sign ( int x ) { return x > 0 ? 1 : ( x < 0 ? -1 : 0 ); } template<typename CharT> void test1 ( std::basic_string_view<CharT> sv1, std::basic_string_view<CharT> sv2, int expected ) { assert ( sign( sv1.compare(sv2)) == sign(expected)); } template<typename CharT> void test ( const CharT *s1, const CharT *s2, int expected ) { typedef std::basic_string_view<CharT> string_view_t; string_view_t sv1 ( s1 ); string_view_t sv2 ( s2 ); test1(sv1, sv2, expected); } int main(int, char**) { test("", "", 0); test("", "abcde", -5); test("", "abcdefghij", -10); test("", "abcdefghijklmnopqrst", -20); test("abcde", "", 5); test("abcde", "abcde", 0); test("abcde", "abcdefghij", -5); test("abcde", "abcdefghijklmnopqrst", -15); test("abcdefghij", "", 10); test("abcdefghij", "abcde", 5); test("abcdefghij", "abcdefghij", 0); test("abcdefghij", "abcdefghijklmnopqrst", -10); test("abcdefghijklmnopqrst", "", 20); test("abcdefghijklmnopqrst", "abcde", 15); test("abcdefghijklmnopqrst", "abcdefghij", 10); test("abcdefghijklmnopqrst", "abcdefghijklmnopqrst", 0); test(L"", L"", 0); test(L"", L"abcde", -5); test(L"", L"abcdefghij", -10); test(L"", L"abcdefghijklmnopqrst", -20); test(L"abcde", L"", 5); test(L"abcde", L"abcde", 0); test(L"abcde", L"abcdefghij", -5); test(L"abcde", L"abcdefghijklmnopqrst", -15); test(L"abcdefghij", L"", 10); test(L"abcdefghij", L"abcde", 5); test(L"abcdefghij", L"abcdefghij", 0); test(L"abcdefghij", L"abcdefghijklmnopqrst", -10); test(L"abcdefghijklmnopqrst", L"", 20); test(L"abcdefghijklmnopqrst", L"abcde", 15); test(L"abcdefghijklmnopqrst", L"abcdefghij", 10); test(L"abcdefghijklmnopqrst", L"abcdefghijklmnopqrst", 0); #if TEST_STD_VER >= 11 test(u"", u"", 0); test(u"", u"abcde", -5); test(u"", u"abcdefghij", -10); test(u"", u"abcdefghijklmnopqrst", -20); test(u"abcde", u"", 5); test(u"abcde", u"abcde", 0); test(u"abcde", u"abcdefghij", -5); test(u"abcde", u"abcdefghijklmnopqrst", -15); test(u"abcdefghij", u"", 10); test(u"abcdefghij", u"abcde", 5); test(u"abcdefghij", u"abcdefghij", 0); test(u"abcdefghij", u"abcdefghijklmnopqrst", -10); test(u"abcdefghijklmnopqrst", u"", 20); test(u"abcdefghijklmnopqrst", u"abcde", 15); test(u"abcdefghijklmnopqrst", u"abcdefghij", 10); test(u"abcdefghijklmnopqrst", u"abcdefghijklmnopqrst", 0); test(U"", U"", 0); test(U"", U"abcde", -5); test(U"", U"abcdefghij", -10); test(U"", U"abcdefghijklmnopqrst", -20); test(U"abcde", U"", 5); test(U"abcde", U"abcde", 0); test(U"abcde", U"abcdefghij", -5); test(U"abcde", U"abcdefghijklmnopqrst", -15); test(U"abcdefghij", U"", 10); test(U"abcdefghij", U"abcde", 5); test(U"abcdefghij", U"abcdefghij", 0); test(U"abcdefghij", U"abcdefghijklmnopqrst", -10); test(U"abcdefghijklmnopqrst", U"", 20); test(U"abcdefghijklmnopqrst", U"abcde", 15); test(U"abcdefghijklmnopqrst", U"abcdefghij", 10); test(U"abcdefghijklmnopqrst", U"abcdefghijklmnopqrst", 0); #endif #if TEST_STD_VER > 11 { typedef std::basic_string_view<char, constexpr_char_traits<char>> SV; constexpr SV sv1 { "abcde", 5 }; constexpr SV sv2 { "abcde", 5 }; constexpr SV sv3 { "edcba0", 6 }; static_assert ( sv1.compare(sv2) == 0, "" ); static_assert ( sv2.compare(sv1) == 0, "" ); static_assert ( sv3.compare(sv2) > 0, "" ); static_assert ( sv2.compare(sv3) < 0, "" ); } #endif return 0; }
ArcticReal/eCommerce
plugins/eCommerce/src/main/java/com/skytala/eCommerce/domain/workeffort/relations/workEffort/control/note/WorkEffortNoteController.java
package com.skytala.eCommerce.domain.workeffort.relations.workEffort.control.note; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import javax.servlet.http.HttpServletRequest; import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.*; import com.google.common.base.Splitter; import com.skytala.eCommerce.domain.workeffort.relations.workEffort.command.note.AddWorkEffortNote; import com.skytala.eCommerce.domain.workeffort.relations.workEffort.command.note.DeleteWorkEffortNote; import com.skytala.eCommerce.domain.workeffort.relations.workEffort.command.note.UpdateWorkEffortNote; import com.skytala.eCommerce.domain.workeffort.relations.workEffort.event.note.WorkEffortNoteAdded; import com.skytala.eCommerce.domain.workeffort.relations.workEffort.event.note.WorkEffortNoteDeleted; import com.skytala.eCommerce.domain.workeffort.relations.workEffort.event.note.WorkEffortNoteFound; import com.skytala.eCommerce.domain.workeffort.relations.workEffort.event.note.WorkEffortNoteUpdated; import com.skytala.eCommerce.domain.workeffort.relations.workEffort.mapper.note.WorkEffortNoteMapper; import com.skytala.eCommerce.domain.workeffort.relations.workEffort.model.note.WorkEffortNote; import com.skytala.eCommerce.domain.workeffort.relations.workEffort.query.note.FindWorkEffortNotesBy; import com.skytala.eCommerce.framework.exceptions.RecordNotFoundException; import com.skytala.eCommerce.framework.pubsub.Scheduler; import static com.skytala.eCommerce.framework.pubsub.ResponseUtil.*; @RestController @RequestMapping("/workeffort/workEffort/workEffortNotes") public class WorkEffortNoteController { private static Map<String, RequestMethod> validRequests = new HashMap<>(); public WorkEffortNoteController() { validRequests.put("find", RequestMethod.GET); validRequests.put("add", RequestMethod.POST); validRequests.put("update", RequestMethod.PUT); validRequests.put("removeById", RequestMethod.DELETE); } /** * * @param allRequestParams * all params by which you want to find a WorkEffortNote * @return a List with the WorkEffortNotes * @throws Exception */ @GetMapping("/find") public ResponseEntity<List<WorkEffortNote>> findWorkEffortNotesBy(@RequestParam(required = false) Map<String, String> allRequestParams) throws Exception { FindWorkEffortNotesBy query = new FindWorkEffortNotesBy(allRequestParams); if (allRequestParams == null) { query.setFilter(new HashMap<>()); } List<WorkEffortNote> workEffortNotes =((WorkEffortNoteFound) Scheduler.execute(query).data()).getWorkEffortNotes(); return ResponseEntity.ok().body(workEffortNotes); } /** * creates a new WorkEffortNote entry in the ofbiz database * * @param workEffortNoteToBeAdded * the WorkEffortNote thats to be added * @return true on success; false on fail */ @RequestMapping(method = RequestMethod.POST, value = "/add", consumes = MediaType.APPLICATION_JSON_UTF8_VALUE) public ResponseEntity<WorkEffortNote> createWorkEffortNote(@RequestBody WorkEffortNote workEffortNoteToBeAdded) throws Exception { AddWorkEffortNote command = new AddWorkEffortNote(workEffortNoteToBeAdded); WorkEffortNote workEffortNote = ((WorkEffortNoteAdded) Scheduler.execute(command).data()).getAddedWorkEffortNote(); if (workEffortNote != null) return successful(workEffortNote); else return conflict(null); } /** * Updates the WorkEffortNote with the specific Id * * @param workEffortNoteToBeUpdated * the WorkEffortNote thats to be updated * @return true on success, false on fail * @throws Exception */ @RequestMapping(method = RequestMethod.PUT, value = "/{nullVal}", consumes = MediaType.APPLICATION_JSON_UTF8_VALUE) public ResponseEntity<String> updateWorkEffortNote(@RequestBody WorkEffortNote workEffortNoteToBeUpdated, @PathVariable String nullVal) throws Exception { // workEffortNoteToBeUpdated.setnull(null); UpdateWorkEffortNote command = new UpdateWorkEffortNote(workEffortNoteToBeUpdated); try { if(((WorkEffortNoteUpdated) Scheduler.execute(command).data()).isSuccess()) return noContent(); } catch (RecordNotFoundException e) { return notFound(); } return conflict(); } @GetMapping("/{workEffortNoteId}") public ResponseEntity<WorkEffortNote> findById(@PathVariable String workEffortNoteId) throws Exception { HashMap<String, String> requestParams = new HashMap<String, String>(); requestParams.put("workEffortNoteId", workEffortNoteId); try { List<WorkEffortNote> foundWorkEffortNote = findWorkEffortNotesBy(requestParams).getBody(); if(foundWorkEffortNote.size()==1){ return successful(foundWorkEffortNote.get(0)); }else{ return notFound(); } } catch (RecordNotFoundException e) { return notFound(); } } @DeleteMapping("/{workEffortNoteId}") public ResponseEntity<String> deleteWorkEffortNoteByIdUpdated(@PathVariable String workEffortNoteId) throws Exception { DeleteWorkEffortNote command = new DeleteWorkEffortNote(workEffortNoteId); try { if (((WorkEffortNoteDeleted) Scheduler.execute(command).data()).isSuccess()) return noContent(); } catch (RecordNotFoundException e) { return notFound(); } return conflict(); } }
zhusongm/AdaptiveCards
source/ios/AdaptiveCards/AdaptiveCards/AdaptiveCards/ACRIBaseActionElementRenderer.h
// // ACRIBaseActionElementRenderer // ACRIBaseActionElementRenderer.h // // Copyright © 2017 Microsoft. All rights reserved. // #import <UIKit/UIKit.h> #import <Foundation/Foundation.h> #import "HostConfig.h" #import "BaseActionElement.h" #import "ACRIContentHoldingView.h" using namespace AdaptiveCards; @protocol ACRIBaseActionElementRenderer - (UIButton* )renderButton:(UIViewController *)vc inputs:(NSMutableArray *)inputs superview:(UIView<ACRIContentHoldingView> *)superview baseActionElement:(std::shared_ptr<BaseActionElement> const &)elem andHostConfig:(std::shared_ptr<HostConfig> const &)config; @end
johanrydstrom/FluentLenium
fluentlenium-core/src/main/java/org/fluentlenium/core/wait/FluentWait.java
/** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License */ package org.fluentlenium.core.wait; import com.google.common.base.Function; import org.fluentlenium.core.Fluent; import org.fluentlenium.core.FluentPage; import org.fluentlenium.core.search.Search; import org.openqa.selenium.Beta; import org.openqa.selenium.StaleElementReferenceException; import org.openqa.selenium.WebDriver; import java.util.concurrent.TimeUnit; public class FluentWait implements org.openqa.selenium.support.ui.Wait<Fluent> { private final org.openqa.selenium.support.ui.FluentWait<Fluent> wait; private final Search search; private final WebDriver driver; private boolean useDefaultException; private boolean useCustomMessage; public org.openqa.selenium.support.ui.FluentWait getWait() { return wait; } public FluentWait(Fluent fluent, Search search) { wait = new org.openqa.selenium.support.ui.FluentWait<Fluent>(fluent); this.search = search; this.driver = fluent.getDriver(); useDefaultException = true; } public FluentWait atMost(long duration, java.util.concurrent.TimeUnit unit) { wait.withTimeout(duration, unit); return this; } /** * @param timeInMillis time In Millis * @return */ public FluentWait atMost(long timeInMillis) { wait.withTimeout(timeInMillis, TimeUnit.MILLISECONDS); return this; } public FluentWait pollingEvery(long duration, java.util.concurrent.TimeUnit unit) { wait.pollingEvery(duration, unit); return this; } public FluentWait ignoreAll(java.util.Collection<java.lang.Class<? extends Throwable>> types) { wait.ignoreAll(types); return this; } public FluentWait ignoring(java.lang.Class<? extends java.lang.RuntimeException> exceptionType) { wait.ignoring(exceptionType); return this; } /** * Ignoring the two exceptions passed as params * * @param firstType * @param secondType * @return */ public FluentWait ignoring(java.lang.Class<? extends java.lang.RuntimeException> firstType, java.lang.Class<? extends java.lang.RuntimeException> secondType) { wait.ignoring(firstType, secondType); return this; } /** * @param isTrue * @return */ public FluentWait until(com.google.common.base.Predicate<Fluent> isTrue) { updateWaitWithDefaultExceptions(); wait.until(isTrue); return this; } /** * @param message - the failing message * @return */ public FluentWait withMessage(String message) { wait.withMessage(message); useCustomMessage = true; return this; } /** * Use this methods only to avoid ignoring StateElementReferenceException * * @return */ @Beta public FluentWait withNoDefaultsException() { useDefaultException = false; return this; } /** * @param string - CSS selector * @return */ public FluentWaitMatcher until(String string) { updateWaitWithDefaultExceptions(); return new FluentWaitMatcher(search, this, string); } /** * @return */ public FluentWaitPageMatcher untilPage() { updateWaitWithDefaultExceptions(); return new FluentWaitPageMatcher(this, driver); } /** * @param page - the page to work with * @return */ public FluentWaitPageMatcher untilPage(FluentPage page) { updateWaitWithDefaultExceptions(); return new FluentWaitPageMatcher(this, driver, page); } /** * Return the current driver * * @return */ public WebDriver getDriver() { return driver; } @Override public <T> T until(Function<? super Fluent, T> isTrue) { updateWaitWithDefaultExceptions(); return wait.until(isTrue); } private void updateWaitWithDefaultExceptions() { if (useDefaultException) { wait.ignoring(StaleElementReferenceException.class); } } public boolean useCustomMessage() { return useCustomMessage; } }
openconnectio/openmanage
pkg/containersvc/mockinfo.go
package containersvc type MockContainerSvcInfo struct { } func NewMockContainerSvcInfo() *MockContainerSvcInfo { return &MockContainerSvcInfo{} } func (m *MockContainerSvcInfo) GetLocalContainerInstanceID() string { return "local-containerInstanceID" } func (m *MockContainerSvcInfo) GetContainerClusterID() string { return "local-clusterID" }
ShipChain/baseline
examples/radish34/ui/src/components/AddSKUField.js
<filename>examples/radish34/ui/src/components/AddSKUField.js import React, { useState } from 'react'; import PropTypes from 'prop-types'; import { Field, ErrorMessage } from 'formik'; import Typography from '@material-ui/core/Typography'; import Button from '@material-ui/core/Button'; import TextField from '@material-ui/core/TextField'; import Table from '@material-ui/core/Table'; import TableBody from '@material-ui/core/TableBody'; import TableHead from '@material-ui/core/TableHead'; import TableRow from '@material-ui/core/TableRow'; import TableCell from '@material-ui/core/TableCell'; import { makeStyles } from '@material-ui/core/styles'; import Add from '@material-ui/icons/Add'; const useStyles = makeStyles(() => ({ table: { marginBottom: '2rem', marginTop: '2rem', }, borderlessTableCell: { borderBottom: 'none', }, field: { width: '100%', }, tableCellButton: { borderTop: '1px solid rgba(224, 224, 224, 1)', }, errorMessage: { color: 'red', }, icon: { margin: '0.5rem', }, button: { color: '#007BFF', }, })); const AddSKUField = ({ formik, volumeField = false, displayOverride }) => { const classes = useStyles(); const [disabled, setDisabled] = useState(false); const [display, setDisplay] = useState(false); return ( <Table className={classes.table}> <TableHead> <TableRow> <TableCell>SKU</TableCell> <TableCell>{volumeField ? 'Volume' : 'Description'}</TableCell> </TableRow> </TableHead> <TableBody> {display || displayOverride ? <TableRow> <TableCell> <Field id="sku" component={TextField} onChange={formik.handleChange} disabled={disabled} className={classes.field} value={formik.values.sku} /> </TableCell> <TableCell> {volumeField ? <Field id="volume" component={TextField} onChange={formik.handleChange} disabled={disabled} className={classes.field} value={formik.values.volume} /> : <Field id="skuDescription" component={TextField} onChange={formik.handleChange} disabled={disabled} className={classes.field} /> } </TableCell> <TableCell className={classes.tableCellButton}> {!disabled ? ( <Button type="button" onClick={() => setDisabled(true)} disabled={formik.values.sku === ''} > ADD </Button> ) : ( <Button type="button" onClick={() => setDisabled(false)}> EDIT </Button> )} <Button type="button" onClick={() => setDisplay(false)} > Remove </Button> </TableCell> </TableRow> : <TableRow> <TableCell className={classes.borderlessTableCell}> <Button className={classes.button} type="button" onClick={() => setDisplay(true)}> <Add className={classes.icon} /> Add Item </Button> </TableCell> </TableRow> } {Object.keys(formik.errors).length > 0 && <TableRow> <TableCell className={classes.borderlessTableCell}> <ErrorMessage name="sku" render={msg => <Typography className={classes.errorMessage}>{msg}</Typography>} /> </TableCell> <TableCell className={classes.borderlessTableCell}> <ErrorMessage name="skuDescription" render={msg => <Typography className={classes.errorMessage}>{msg}</Typography>} /> </TableCell> </TableRow> } </TableBody> </Table> ); }; AddSKUField.propTypes = { formik: PropTypes.shape({}).isRequired, }; export default AddSKUField;
christophersiem/MoodBoos
frontend/src/components/dialogs/FriendDeleteDialog.js
<reponame>christophersiem/MoodBoos import React, { useState } from 'react'; import Button from '@material-ui/core/Button'; import Dialog from '@material-ui/core/Dialog'; import DialogTitle from '@material-ui/core/DialogTitle'; import DialogContent from '@material-ui/core/DialogContent'; import DialogContentText from '@material-ui/core/DialogContentText'; import DialogActions from '@material-ui/core/DialogActions'; import Grid from '@material-ui/core/Grid'; import DeleteIcon from '@material-ui/icons/Delete'; import { deleteFriend } from '../../utils/friends-utils'; import { makeStyles } from '@material-ui/core/styles'; const useStyles = makeStyles(() => ({ delete: { color: '#951010', margin: '15px 0px', }, })); export default function FriendDeleteDialog({ friend, handleDeleteSuccess }) { const classes = useStyles(); const [open, setOpen] = useState(false); const handleClickOpen = () => { setOpen(true); }; const handleClose = () => { setOpen(false); }; function handleDelete(friend) { deleteFriend(friend) .then(() => { handleClose(); handleDeleteSuccess(); }) .catch((e) => console.error(e)); } return ( <> <DeleteIcon style={{ color: 'rgb(185 77 77)' }} onClick={handleClickOpen} /> <Dialog open={open} onClose={handleClose} aria-labelledby="caution" aria-describedby="alert-dialog-description" > <DialogTitle id="caution">{'Caution'}</DialogTitle> <DialogContent> <DialogContentText id="alert-dialog-description"> Are you sure you want to remove {friend} from your friendlist? </DialogContentText> </DialogContent> <DialogActions> <Grid container justify="space-around" alignItems="center"> <Grid item> <Button className={classes.delete} onClick={() => handleDelete(friend)} > Delete </Button> </Grid> <Button onClick={handleClose} color="primary" autoFocus> Cancel </Button> </Grid> </DialogActions> </Dialog> </> ); }
hamusuke0323/TwitterForMinecraftFabric
src/main/java/com/hamusuke/twitter4mc/gui/widget/ScalableImageButton.java
<reponame>hamusuke0323/TwitterForMinecraftFabric package com.hamusuke.twitter4mc.gui.widget; import com.mojang.blaze3d.systems.RenderSystem; import net.fabricmc.api.EnvType; import net.fabricmc.api.Environment; import net.minecraft.client.gui.widget.ButtonWidget; import net.minecraft.client.util.NarratorManager; import net.minecraft.client.util.math.MatrixStack; import net.minecraft.text.Text; import net.minecraft.util.Identifier; @Environment(EnvType.CLIENT) public class ScalableImageButton extends ButtonWidget { protected final Identifier texture; protected final int u; protected final int v; protected final int hoveredVOffset; protected final int textureWidth; protected final int textureHeight; protected final float scale; protected final int renderWidth; protected final int renderHeight; public ScalableImageButton(int x, int y, int widgetWidth, int widgetHeight, int renderWidth, int renderHeight, float scale, int u, int v, int hoveredVOffset, Identifier texture, ButtonWidget.PressAction pressAction) { this(x, y, widgetWidth, widgetHeight, renderWidth, renderHeight, scale, u, v, hoveredVOffset, texture, 256, 256, pressAction); } public ScalableImageButton(int x, int y, int widgetWidth, int widgetHeight, int renderWidth, int renderHeight, float scale, int u, int v, int hoveredVOffset, Identifier texture, int textureWidth, int textureHeight, ButtonWidget.PressAction pressAction) { this(x, y, widgetWidth, widgetHeight, renderWidth, renderHeight, scale, u, v, hoveredVOffset, texture, textureWidth, textureHeight, pressAction, NarratorManager.EMPTY); } public ScalableImageButton(int x, int y, int widgetWidth, int widgetHeight, int renderWidth, int renderHeight, float scale, int u, int v, int hoveredVOffset, Identifier texture, int textureWidth, int textureHeight, ButtonWidget.PressAction pressAction, Text text) { super(x, y, widgetWidth, widgetHeight, text, pressAction); this.textureWidth = textureWidth; this.textureHeight = textureHeight; this.u = u; this.v = v; this.hoveredVOffset = hoveredVOffset; this.texture = texture; this.scale = scale; this.renderWidth = renderWidth; this.renderHeight = renderHeight; } public void setPos(int x, int y) { this.x = x; this.y = y; } public void renderButton(MatrixStack matrices, int mouseX, int mouseY, float delta) { RenderSystem.setShaderTexture(0, this.texture); matrices.push(); RenderSystem.disableDepthTest(); int i = this.v; if (this.isHovered()) { i += this.hoveredVOffset; } matrices.translate(this.x, this.y, 0.0F); matrices.scale(this.scale, this.scale, this.scale); drawTexture(matrices, 0, 0, (float) this.u, (float) i, this.renderWidth, this.renderHeight, this.textureWidth, this.textureHeight); RenderSystem.enableDepthTest(); matrices.pop(); } }
WalterHu/DemonCat
app/src/main/java/com/demoncat/dcapp/mvp/impl/MvpActivity.java
<reponame>WalterHu/DemonCat /** * Copyright 2018 hubohua * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.demoncat.dcapp.mvp.impl; import android.view.View; import com.demoncat.dcapp.R; import com.demoncat.dcapp.mvp.BasePresenter; import com.demoncat.dcapp.mvp.BaseView; /** * @Class: MvpActivity * @Description: java类作用描述 * @Author: hubohua * @CreateDate: 2018/4/12 */ public class MvpActivity extends BaseView implements MvpPresenter.MvpView { private MvpPresenter mPresenter = new MvpPresenter(this); @Override protected BasePresenter[] getPresenter() { return new BasePresenter[]{mPresenter}; } @Override protected int getLayoutId() { return R.layout.activity_mvp; } @Override protected void initView() { findViewById(R.id.btn_action).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (mPresenter != null) { mPresenter.action1(); } } }); } @Override protected void initData() { // do some data initialize } }
45258E9F/IntPTI
src/org/sosy_lab/cpachecker/cpa/range/summary/RangeSummaryTransferRelation.java
<reponame>45258E9F/IntPTI /* * IntPTI: integer error fixing by proper-type inference * Copyright (c) 2017. * * Open-source component: * * CPAchecker * Copyright (C) 2007-2014 <NAME> * * Guava: Google Core Libraries for Java * Copyright (C) 2010-2006 Google * * */ package org.sosy_lab.cpachecker.cpa.range.summary; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Multimap; import org.sosy_lab.common.configuration.Configuration; import org.sosy_lab.common.configuration.InvalidConfigurationException; import org.sosy_lab.common.configuration.Option; import org.sosy_lab.common.configuration.Options; import org.sosy_lab.common.log.LogManager; import org.sosy_lab.cpachecker.cfa.CFA; import org.sosy_lab.cpachecker.cfa.ast.c.CArrayDesignator; import org.sosy_lab.cpachecker.cfa.ast.c.CArrayRangeDesignator; import org.sosy_lab.cpachecker.cfa.ast.c.CAssignment; import org.sosy_lab.cpachecker.cfa.ast.c.CBinaryExpression; import org.sosy_lab.cpachecker.cfa.ast.c.CBinaryExpression.BinaryOperator; import org.sosy_lab.cpachecker.cfa.ast.c.CDeclaration; import org.sosy_lab.cpachecker.cfa.ast.c.CDesignatedInitializer; import org.sosy_lab.cpachecker.cfa.ast.c.CDesignator; import org.sosy_lab.cpachecker.cfa.ast.c.CExpression; import org.sosy_lab.cpachecker.cfa.ast.c.CFieldDesignator; import org.sosy_lab.cpachecker.cfa.ast.c.CFunctionCall; import org.sosy_lab.cpachecker.cfa.ast.c.CFunctionCallAssignmentStatement; import org.sosy_lab.cpachecker.cfa.ast.c.CFunctionCallExpression; import org.sosy_lab.cpachecker.cfa.ast.c.CFunctionCallStatement; import org.sosy_lab.cpachecker.cfa.ast.c.CFunctionDeclaration; import org.sosy_lab.cpachecker.cfa.ast.c.CIdExpression; import org.sosy_lab.cpachecker.cfa.ast.c.CInitializer; import org.sosy_lab.cpachecker.cfa.ast.c.CInitializerExpression; import org.sosy_lab.cpachecker.cfa.ast.c.CInitializerList; import org.sosy_lab.cpachecker.cfa.ast.c.CLeftHandSide; import org.sosy_lab.cpachecker.cfa.ast.c.CParameterDeclaration; import org.sosy_lab.cpachecker.cfa.ast.c.CRightHandSide; import org.sosy_lab.cpachecker.cfa.ast.c.CSimpleDeclaration; import org.sosy_lab.cpachecker.cfa.ast.c.CStatement; import org.sosy_lab.cpachecker.cfa.ast.c.CStringLiteralExpression; import org.sosy_lab.cpachecker.cfa.ast.c.CVariableDeclaration; import org.sosy_lab.cpachecker.cfa.model.BlankEdge; import org.sosy_lab.cpachecker.cfa.model.CFAEdge; import org.sosy_lab.cpachecker.cfa.model.CFANode; import org.sosy_lab.cpachecker.cfa.model.FunctionEntryNode; import org.sosy_lab.cpachecker.cfa.model.FunctionExitNode; import org.sosy_lab.cpachecker.cfa.model.MultiEdge; import org.sosy_lab.cpachecker.cfa.model.c.CAssumeEdge; import org.sosy_lab.cpachecker.cfa.model.c.CDeclarationEdge; import org.sosy_lab.cpachecker.cfa.model.c.CFunctionCallEdge; import org.sosy_lab.cpachecker.cfa.model.c.CFunctionEntryNode; import org.sosy_lab.cpachecker.cfa.model.c.CFunctionReturnEdge; import org.sosy_lab.cpachecker.cfa.model.c.CFunctionSummaryEdge; import org.sosy_lab.cpachecker.cfa.model.c.CReturnStatementEdge; import org.sosy_lab.cpachecker.cfa.model.c.CStatementEdge; import org.sosy_lab.cpachecker.cfa.types.MachineModel; import org.sosy_lab.cpachecker.cfa.types.c.CArrayType; import org.sosy_lab.cpachecker.cfa.types.c.CComplexType.ComplexTypeKind; import org.sosy_lab.cpachecker.cfa.types.c.CCompositeType; import org.sosy_lab.cpachecker.cfa.types.c.CCompositeType.CCompositeTypeMemberDeclaration; import org.sosy_lab.cpachecker.cfa.types.c.CType; import org.sosy_lab.cpachecker.core.algorithm.summary.computer.RangeSummaryComputer; import org.sosy_lab.cpachecker.core.defaults.SingleEdgeTransferRelation; import org.sosy_lab.cpachecker.core.interfaces.AbstractState; import org.sosy_lab.cpachecker.core.interfaces.Precision; import org.sosy_lab.cpachecker.core.interfaces.TransferRelationWithNarrowingSupport; import org.sosy_lab.cpachecker.core.summary.instance.access.AccessFunctionInstance; import org.sosy_lab.cpachecker.core.summary.instance.access.AccessResult; import org.sosy_lab.cpachecker.core.summary.instance.access.AccessSummaryStore; import org.sosy_lab.cpachecker.core.summary.instance.range.RangeExternalLoopInstance; import org.sosy_lab.cpachecker.core.summary.instance.range.RangeFunctionInstance; import org.sosy_lab.cpachecker.core.summary.instance.range.RangeFunctionPrecondition; import org.sosy_lab.cpachecker.core.summary.instance.range.RangeInternalLoopInstance; import org.sosy_lab.cpachecker.core.summary.manage.FunctionSummaryStore; import org.sosy_lab.cpachecker.core.summary.manage.SummaryProvider; import org.sosy_lab.cpachecker.cpa.bind.BindState; import org.sosy_lab.cpachecker.cpa.range.CompInteger; import org.sosy_lab.cpachecker.cpa.range.ExpressionRangeVisitor; import org.sosy_lab.cpachecker.cpa.range.LeftHandAccessPathVisitor; import org.sosy_lab.cpachecker.cpa.range.Range; import org.sosy_lab.cpachecker.cpa.range.RangeState; import org.sosy_lab.cpachecker.cpa.range.TypeRangeVisitor; import org.sosy_lab.cpachecker.cpa.range.checker.RangeRefineVisitor; import org.sosy_lab.cpachecker.cpa.range.util.BindRefinePair; import org.sosy_lab.cpachecker.cpa.range.util.CompIntegers; import org.sosy_lab.cpachecker.cpa.range.util.RangeFunctionAdapter; import org.sosy_lab.cpachecker.cpa.range.util.Ranges; import org.sosy_lab.cpachecker.exceptions.CPATransferException; import org.sosy_lab.cpachecker.exceptions.UnrecognizedCCodeException; import org.sosy_lab.cpachecker.util.AbstractStates; import org.sosy_lab.cpachecker.util.CFAUtils; import org.sosy_lab.cpachecker.util.LoopStructure; import org.sosy_lab.cpachecker.util.LoopStructure.Loop; import org.sosy_lab.cpachecker.util.Pair; import org.sosy_lab.cpachecker.util.Types; import org.sosy_lab.cpachecker.util.access.AccessPath; import org.sosy_lab.cpachecker.util.access.ArrayConstIndexSegment; import org.sosy_lab.cpachecker.util.access.FieldAccessSegment; import org.sosy_lab.cpachecker.util.collections.tree.PathCopyingPersistentTree; import org.sosy_lab.cpachecker.util.collections.tree.PathCopyingPersistentTree.PersistentTreeNode; import org.sosy_lab.cpachecker.util.globalinfo.CFAInfo; import org.sosy_lab.cpachecker.util.globalinfo.GlobalInfo; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Queue; import java.util.Set; import javax.annotation.Nonnull; import javax.annotation.Nullable; @Options(prefix = "cpa.range.summary") public class RangeSummaryTransferRelation extends SingleEdgeTransferRelation implements TransferRelationWithNarrowingSupport { @Option(secure = true, description = "maximum number of array elements tracked for each array " + "object") private int maxTrackedArrayElements = 100; private final MachineModel machineModel; // perform value refinement along def-use chain (strong value update?) private final List<BindRefinePair> forFurtherRefine; private RangeSummaryComputer summaryComputer; /* **************** */ /* loop information */ /* **************** */ private static Map<CFAEdge, Loop> entry2Loop = null; private static Map<CFAEdge, Loop> exit2Loop = null; /* ************* */ /* summary store */ /* ************* */ // summary of current function (computed at the function exit) // to be cleared after finished a function entry private RangeFunctionInstance functionSummary = null; // loop entry -> its successor (must be in certain loop, and possibly a loop head) // to be cleared after finished a function entry private final Map<CFAEdge, CFANode> entry2Node = new HashMap<>(); // loop node -> range state // to be cleared after finished a function entry private final Map<CFANode, RangeStateStore> node2State = new HashMap<>(); // the state on the successor of loop exit (for external loop summary) // to be cleared after finished a function entry private final Map<CFAEdge, RangeState> exit2State = new HashMap<>(); // the set of all global names // this should never be cleared private static Set<String> globalNames = null; // we store the state after global declarations // if the main function is analyzed for multiple times, we skip traversing the global // declarations to improve the performance // Note: we do not use multi-map here for performance reason // this should never be cleared private static final HashMap<CFAEdge, RangeState> globalDeclarationStateMap = Maps.newHashMap(); /* Function summary is stored in the summary computer. For each function, its summary is updated soon after the computer finishes handling it. */ public RangeSummaryTransferRelation(Configuration pConfig, LogManager pLogger) throws InvalidConfigurationException { pConfig.inject(this); CFAInfo cfaInfo = GlobalInfo.getInstance().getCFAInfo().orNull(); if (cfaInfo == null) { throw new InvalidConfigurationException("CFA required for range summary analysis"); } CFA cfa = cfaInfo.getCFA(); machineModel = cfa.getMachineModel(); // build loop information LoopStructure loops = cfa.getLoopStructure().orNull(); ImmutableMap.Builder<CFAEdge, Loop> loopEntry = ImmutableMap.builder(); ImmutableMap.Builder<CFAEdge, Loop> loopExit = ImmutableMap.builder(); if (entry2Loop == null || exit2Loop == null) { if (loops != null) { for (final Loop loop : loops.getAllLoops()) { Iterable<CFAEdge> incomingEdges = FluentIterable.from(loop.getIncomingEdges()).filter (new Predicate<CFAEdge>() { @Override public boolean apply(CFAEdge pEdge) { if (pEdge instanceof CFunctionReturnEdge) { CFANode caller = ((CFunctionReturnEdge) pEdge).getSummaryEdge() .getPredecessor(); return !loop.getLoopNodes().contains(caller); } return true; } }); Iterable<CFAEdge> outgoingEdges = Iterables.filter(loop.getOutgoingEdges(), Predicates.not(Predicates.instanceOf(CFunctionCallEdge.class))); for (CFAEdge edge : incomingEdges) { loopEntry.put(edge, loop); } for (CFAEdge edge : outgoingEdges) { loopExit.put(edge, loop); } } } entry2Loop = loopEntry.build(); exit2Loop = loopExit.build(); } forFurtherRefine = Lists.newArrayList(); collectGlobalNames(cfa.getMainFunction()); } /** * Collect global names by traversing all global declarations in the main function block. */ private void collectGlobalNames(CFANode node) { if (globalNames != null) { return; } globalNames = new HashSet<>(); CFANode currentNode = node; while (currentNode.getNumLeavingEdges() == 1) { CFAEdge nextEdge = currentNode.getLeavingEdge(0); if (nextEdge instanceof BlankEdge) { currentNode = nextEdge.getSuccessor(); } else if (nextEdge instanceof CDeclarationEdge) { CDeclaration declaration = ((CDeclarationEdge) nextEdge).getDeclaration(); if (!declaration.isGlobal()) { break; } if (declaration instanceof CVariableDeclaration) { globalNames.add(declaration.getQualifiedName()); } currentNode = nextEdge.getSuccessor(); } else { break; } } } @Override public Collection<? extends AbstractState> getAbstractSuccessorsForEdge( AbstractState state, List<AbstractState> otherStates, Precision precision, CFAEdge cfaEdge) throws CPATransferException, InterruptedException { Collection<RangeState> results; if (cfaEdge instanceof MultiEdge) { MultiEdge multiEdge = (MultiEdge) cfaEdge; Queue<RangeState> processQueue = new ArrayDeque<>(); Queue<RangeState> resultQueue = new ArrayDeque<>(); processQueue.add((RangeState) state); for (CFAEdge edge : multiEdge) { while (!processQueue.isEmpty()) { RangeState nextState = processQueue.poll(); Collection<RangeState> nextResults = getAbstractSuccessorsForEdge0(nextState, otherStates, edge, false); resultQueue.addAll(nextResults); } while (!resultQueue.isEmpty()) { processQueue.add(resultQueue.poll()); } } results = ImmutableSet.copyOf(processQueue); } else { results = getAbstractSuccessorsForEdge0((RangeState) state, otherStates, cfaEdge, false); } return results; } private Collection<RangeState> getAbstractSuccessorsForEdge0( RangeState pState, List<AbstractState> pOtherStates, CFAEdge pCFAEdge, boolean forNarrow) throws CPATransferException { Collection<RangeState> successors; switch (pCFAEdge.getEdgeType()) { case DeclarationEdge: CDeclarationEdge declEdge = (CDeclarationEdge) pCFAEdge; CDeclaration declaration = declEdge.getDeclaration(); if (declaration instanceof CVariableDeclaration && declaration.isGlobal()) { if (globalDeclarationStateMap.containsKey(pCFAEdge)) { RangeState result = globalDeclarationStateMap.get(pCFAEdge); return Collections.singleton(result); } else { successors = handleDeclaration(pState, pOtherStates, (CDeclarationEdge) pCFAEdge); if (successors.size() > 0) { globalDeclarationStateMap.put(pCFAEdge, successors.iterator().next()); } return successors; } } successors = handleDeclaration(pState, pOtherStates, (CDeclarationEdge) pCFAEdge); break; case StatementEdge: successors = handleStatement(pState, pOtherStates, (CStatementEdge) pCFAEdge); break; case AssumeEdge: successors = handleAssumption(pState, pOtherStates, (CAssumeEdge) pCFAEdge); break; case FunctionCallEdge: successors = handleFunctionCall(pState, pOtherStates, (CFunctionCallEdge) pCFAEdge); break; case FunctionReturnEdge: successors = handleFunctionReturn(pState, (CFunctionReturnEdge) pCFAEdge, forNarrow); break; case ReturnStatementEdge: successors = handleReturnStatement(pState, pOtherStates, (CReturnStatementEdge) pCFAEdge, forNarrow); break; default: successors = ImmutableList.of(pState); } // handle loop summary here return forNarrow ? successors : collectLoopSummary(successors, pCFAEdge); } @Override public Collection<? extends AbstractState> getAbstractSuccessorsUnderNarrowing( AbstractState state, List<AbstractState> otherStates, Precision precision) throws CPATransferException, InterruptedException { throw new UnsupportedOperationException("Computing successors without edge not supported"); } @Override public Collection<? extends AbstractState> getAbstractSuccessorsForEdgeUnderNarrowing( AbstractState state, List<AbstractState> otherStates, Precision precision, CFAEdge cfaEdge) throws CPATransferException, InterruptedException { // under narrowing, it is unnecessary to collect summary info // the transfer relation should be the same as the ordinary range transfer Collection<RangeState> results; if (cfaEdge instanceof MultiEdge) { MultiEdge multiEdge = (MultiEdge) cfaEdge; Queue<RangeState> processQueue = new ArrayDeque<>(); Queue<RangeState> resultQueue = new ArrayDeque<>(); processQueue.add((RangeState) state); for (CFAEdge edge : multiEdge) { while (!processQueue.isEmpty()) { RangeState nextState = processQueue.poll(); Collection<RangeState> nextResults = getAbstractSuccessorsForEdge0(nextState, otherStates, edge, true); resultQueue.addAll(nextResults); } while (!resultQueue.isEmpty()) { processQueue.add(resultQueue.poll()); } } results = ImmutableSet.copyOf(processQueue); } else { results = getAbstractSuccessorsForEdge0((RangeState) state, otherStates, cfaEdge, true); } return results; } /* ***************** */ /* CFA edge handlers */ /* ***************** */ private Collection<RangeState> handleDeclaration( RangeState pState, List<AbstractState> pOtherStates, CDeclarationEdge pEdge) throws CPATransferException { CDeclaration declaration = pEdge.getDeclaration(); if (declaration instanceof CVariableDeclaration) { // we should create a back-up copy of the original state, otherwise the old state would be // overwritten RangeState newState = RangeState.copyOf(pState); CVariableDeclaration varDecl = (CVariableDeclaration) declaration; CType declaredType = varDecl.getType(); AccessPath declarationPath = getAccessPath(varDecl); boolean isGlobal = varDecl.isGlobal(); TypeRangeVisitor typeRangeVisitor = new TypeRangeVisitor(declarationPath, maxTrackedArrayElements, machineModel, isGlobal); PathCopyingPersistentTree<String, Range> newRanges = declaredType.accept(typeRangeVisitor); if (newRanges.isEmpty()) { // add necessary declaration to prevent false undeclared error newState.addRange(declarationPath, Range.UNBOUND, true); } else { newState.addAllRanges(newRanges); } // if the initializer exists, we should refine ranges of some fields according to the // expressions in the initializer CInitializer initializer = varDecl.getInitializer(); if (initializer != null) { handleInitializer(newState, pOtherStates, Lists.newArrayList(declarationPath), declaredType, initializer); } return Collections.singleton(newState); } // no need to copy range state here return Collections.singleton(pState); } /** * Update the state (as the first parameter of this method) by computing expressions in the * initializer. * * @param newState the state to be updated * @param otherStates other components of the state * @param declarationPaths the prefix access path * @param declaredType the type of current prefix access path * @param pInitializer the initializer to be handled */ private void handleInitializer( RangeState newState, List<AbstractState> otherStates, List<AccessPath> declarationPaths, CType declaredType, CInitializer pInitializer) throws UnrecognizedCCodeException { if (pInitializer instanceof CInitializerExpression) { CExpression exp = ((CInitializerExpression) pInitializer).getExpression(); // If the initializer is a string literal, we should treat it as an array list initializer. if (exp instanceof CStringLiteralExpression) { String content = ((CStringLiteralExpression) exp).getContentString(); for (int i = 0; i < content.length(); i++) { char ch = content.charAt(i); Range chRange = new Range(ch); for (AccessPath singlePath : declarationPaths) { AccessPath newPath = AccessPath.copyOf(singlePath); newPath.appendSegment(new ArrayConstIndexSegment(i)); newState.addRange(newPath, chRange, true); } } // Don't forget to append a ZERO segment at the tail of string for (AccessPath singlePath : declarationPaths) { AccessPath newPath = AccessPath.copyOf(singlePath); newPath.appendSegment(new ArrayConstIndexSegment(content.length())); newState.addRange(newPath, Range.ZERO, true); } } else { for (AccessPath singlePath : declarationPaths) { addRange(newState, otherStates, singlePath, exp, null); } } } else if (pInitializer instanceof CDesignatedInitializer) { // three kinds of designators: // (1) {@link CArrayDesignator}: [2] // (2) {@link CFieldDesignator}: .name // (3) {@link CArrayRangeDesignator}: [2 ... 4] CDesignatedInitializer designatedInitializer = (CDesignatedInitializer) pInitializer; List<CDesignator> designators = designatedInitializer.getDesignators(); CInitializer rightHandSide = designatedInitializer.getRightHandSide(); // Note: multiple access paths are led by array range designator List<AccessPath> accumulatedPaths = new ArrayList<>(declarationPaths); for (CDesignator designator : designators) { List<AccessPath> resultPaths = new ArrayList<>(); if (designator instanceof CArrayDesignator) { CExpression indexExp = ((CArrayDesignator) designator).getSubscriptExpression(); Range indexRange = evaluateRange(newState, otherStates, indexExp).compress(); CompInteger intNum = indexRange.numOfIntegers(); if (intNum.equals(CompInteger.ONE)) { Long concreteIndex = indexRange.getLow().longValue(); if (concreteIndex == null) { // we don't know what to do // so we just give up traversal of designators return; } ArrayConstIndexSegment newSegment = new ArrayConstIndexSegment(concreteIndex); for (AccessPath singlePath : accumulatedPaths) { AccessPath newPath = AccessPath.copyOf(singlePath); newPath.appendSegment(newSegment); resultPaths.add(newPath); } } else { return; } // for now, we do not support array subscript with uncertain index value CArrayType arrayType = Types.extractArrayType(declaredType); if (arrayType == null) { throw new UnsupportedOperationException("Unsupported type for array " + declaredType); } declaredType = arrayType.getType(); } else if (designator instanceof CFieldDesignator) { String fieldName = ((CFieldDesignator) designator).getFieldName(); FieldAccessSegment newSegment = new FieldAccessSegment(fieldName); for (AccessPath singlePath : accumulatedPaths) { AccessPath newPath = AccessPath.copyOf(singlePath); newPath.appendSegment(newSegment); resultPaths.add(newPath); } CCompositeType compositeType = Types.extractCompositeType(declaredType); if (compositeType == null) { throw new UnsupportedOperationException("Unsupported type for structure " + declaredType); } CCompositeTypeMemberDeclaration targetMember = Types.retrieveMemberByName (compositeType, fieldName); if (targetMember == null) { throw new UnsupportedOperationException("Specified field " + fieldName + " not found"); } declaredType = targetMember.getType(); } else { CArrayRangeDesignator rangeDesignator = (CArrayRangeDesignator) designator; CExpression floor = rangeDesignator.getFloorExpression(); CExpression ceil = rangeDesignator.getCeilExpression(); Range floorRange = evaluateRange(newState, otherStates, floor).compress(); Range ceilRange = evaluateRange(newState, otherStates, ceil).compress(); if (floorRange.numOfIntegers().equals(CompInteger.ONE) && ceilRange.numOfIntegers().equals(CompInteger.ONE)) { Long floorValue = floorRange.getLow().longValue(); Long ceilValue = ceilRange.getLow().longValue(); if (floorValue == null || ceilValue == null) { return; } int numOfTrackedValues = 1; for (long i = floorValue; i <= ceilValue; i++) { ArrayConstIndexSegment newSegment = new ArrayConstIndexSegment(i); for (AccessPath singlePath : accumulatedPaths) { AccessPath newPath = AccessPath.copyOf(singlePath); newPath.appendSegment(newSegment); resultPaths.add(newPath); } numOfTrackedValues++; if (numOfTrackedValues > maxTrackedArrayElements) { // if the number of tracked elements exceeds the threshold, we stop adding more values break; } } } else { return; } CArrayType arrayType = Types.extractArrayType(declaredType); if (arrayType == null) { throw new UnsupportedOperationException("Unsupported type for array " + declaredType); } declaredType = arrayType.getType(); } accumulatedPaths.clear(); accumulatedPaths.addAll(resultPaths); } handleInitializer(newState, otherStates, accumulatedPaths, declaredType, rightHandSide); } else { CCompositeType compositeType = Types.extractCompositeType(declaredType); CArrayType arrayType = Types.extractArrayType(declaredType); if ((compositeType == null) == (arrayType == null)) { // unexpected case return; } CInitializerList initializerList = (CInitializerList) pInitializer; List<CInitializer> initializers = initializerList.getInitializers(); if (arrayType != null) { // then, each value is treated as array element CType elementType = arrayType.getType(); /* FIX: index records the position for upcoming initializer without any designators /* Example: struct pointer { int x; int y }; /* struct pointer array[10] = { [2 ... 4].x = 3, [2 ... 3].y = 4, {8, 9} } /* Here, the last pointer structure writes on the slot of index 4! */ // NOTE: if index is -1, then we do not know the index of upcoming member without // explicit designator(s), and thus we exit this function long index = 0; for (CInitializer initializer : initializers) { if (initializer instanceof CDesignatedInitializer) { List<CDesignator> designatorList = ((CDesignatedInitializer) initializer) .getDesignators(); if (designatorList.size() > 0) { CDesignator firstDesignator = designatorList.get(0); if (firstDesignator instanceof CArrayDesignator) { CExpression indexExp = ((CArrayDesignator) firstDesignator) .getSubscriptExpression(); Range indexRange = evaluateRange(newState, otherStates, indexExp).compress(); if (indexRange.numOfIntegers().equals(CompInteger.ONE)) { Long indexValue = indexRange.getLow().longValue(); if (indexValue == null) { index = -1; continue; } index = indexValue + 1; } else { index = -1; } } else if (firstDesignator instanceof CArrayRangeDesignator) { CExpression ceilExp = ((CArrayRangeDesignator) firstDesignator).getCeilExpression(); Range ceilRange = evaluateRange(newState, otherStates, ceilExp).compress(); if (ceilRange.numOfIntegers().equals(CompInteger.ONE)) { Long indexValue = ceilRange.getLow().longValue(); if (indexValue == null) { index = -1; continue; } index = indexValue + 1; } else { index = -1; } } } handleInitializer(newState, otherStates, declarationPaths, arrayType, initializer); } else { // the case without any designator if (index < 0) { // that really confuses us return; } List<AccessPath> copiedPaths = new ArrayList<>(declarationPaths.size()); ArrayConstIndexSegment newSegment = new ArrayConstIndexSegment(index); for (AccessPath copiedPath : declarationPaths) { AccessPath newPath = AccessPath.copyOf(copiedPath); newPath.appendSegment(newSegment); copiedPaths.add(newPath); } index++; handleInitializer(newState, otherStates, copiedPaths, elementType, initializer); } } } else { Preconditions.checkNotNull(compositeType); List<CCompositeTypeMemberDeclaration> members = compositeType.getMembers(); // handle a special case here if (members.size() == 1) { CCompositeTypeMemberDeclaration onlyMember = Iterables.getOnlyElement(members); CArrayType onlyMemberType = Types.extractArrayType(onlyMember.getType()); if (onlyMemberType != null) { List<AccessPath> copiedPaths = new ArrayList<>(declarationPaths.size()); FieldAccessSegment newSegment = new FieldAccessSegment(onlyMember.getName()); for (AccessPath copiedPath : declarationPaths) { AccessPath newPath = AccessPath.copyOf(copiedPath); newPath.appendSegment(newSegment); copiedPaths.add(newPath); } handleInitializer(newState, otherStates, copiedPaths, onlyMemberType, initializerList); return; } } int index = 0; for (CInitializer initializer : initializers) { if (initializer instanceof CDesignatedInitializer) { List<CDesignator> designatorList = ((CDesignatedInitializer) initializer) .getDesignators(); if (designatorList.size() > 0) { CDesignator firstDesignator = designatorList.get(0); if (firstDesignator instanceof CFieldDesignator) { String fieldName = ((CFieldDesignator) firstDesignator).getFieldName(); for (int i = 0; i < members.size(); i++) { String memberName = members.get(i).getName(); if (memberName.equals(fieldName)) { index = i + 1; break; } } } } handleInitializer(newState, otherStates, declarationPaths, compositeType, initializer); } else { CCompositeTypeMemberDeclaration targetMember = members.get(index); index++; String targetName = targetMember.getName(); FieldAccessSegment newSegment = new FieldAccessSegment(targetName); List<AccessPath> copiedPaths = new ArrayList<>(declarationPaths.size()); for (AccessPath singlePath : declarationPaths) { AccessPath newPath = AccessPath.copyOf(singlePath); newPath.appendSegment(newSegment); copiedPaths.add(newPath); } handleInitializer(newState, otherStates, copiedPaths, targetMember.getType(), initializer); } } } } } private Collection<RangeState> handleStatement( RangeState pState, List<AbstractState> pOtherStates, CStatementEdge pEdge) throws CPATransferException { RangeState newState = RangeState.copyOf(pState); CStatement statement = pEdge.getStatement(); // If we encounter an function call, we never enter this function. Since the boundary analysis // skips this function automatically, the resultant state should be the state after the // function. We try to find the summary for certain function. If the summary is found, we // use summary information to update the state after skipping the function. Otherwise, we // assume that the returned value is TOP. if (statement instanceof CFunctionCall) { // no assignment, but possibly contains side-effect on (global variables) CFunctionCallExpression callExp = ((CFunctionCall) statement).getFunctionCallExpression(); CExpression nameExp = callExp.getFunctionNameExpression(); // try to get the name of the function String funcName = null; CFunctionDeclaration funcDecl = callExp.getDeclaration(); if (funcDecl != null) { funcName = funcDecl.getName(); } else { if (nameExp instanceof CIdExpression) { funcName = ((CIdExpression) nameExp).getName(); } } boolean needRelax = false; if (funcName != null) { // 1. handle stop function if (GlobalInfo.getInstance().queryStopFunction(funcName)) { return Collections.emptySet(); } // 2. if current function is registered in the function adapter, we directly evaluate // this function expression using function adapter if (RangeFunctionAdapter.instance(true).isRegistered(callExp)) { if (statement instanceof CFunctionCallStatement) { // we simply discard range for return variable evaluateRange(newState, pOtherStates, callExp); } else { CLeftHandSide lhs = ((CFunctionCallAssignmentStatement) statement).getLeftHandSide(); AccessPath leftPath = getAccessPath(newState, pOtherStates, lhs); if (leftPath != null) { addRange(newState, pOtherStates, leftPath, callExp, null); } } return Collections.singleton(newState); } // 3. unknown function: we change the state by applying function summary needRelax = applyFunctionSummary(newState, pOtherStates, funcName, (CFunctionCall) statement); } if (funcName == null || needRelax) { if (statement instanceof CFunctionCallAssignmentStatement) { CLeftHandSide lhs = ((CFunctionCallAssignmentStatement) statement).getLeftHandSide(); AccessPath leftPath = getAccessPath(newState, pOtherStates, lhs); if (leftPath != null) { TypeRangeVisitor typeRangeVisitor = new TypeRangeVisitor(leftPath, maxTrackedArrayElements, machineModel, false); CType leftType = lhs.getExpressionType(); PathCopyingPersistentTree<String, Range> newRanges = leftType.accept(typeRangeVisitor); newState.addAllRanges(newRanges); } } } } else if (statement instanceof CAssignment) { // the RHS of the assignment should NOT an expression with side-effects (i.e. containing // function call) CLeftHandSide lhs = ((CAssignment) statement).getLeftHandSide(); CRightHandSide rhs = ((CAssignment) statement).getRightHandSide(); AccessPath leftPath = getAccessPath(newState, pOtherStates, lhs); if (leftPath == null) { return Collections.singleton(newState); } addRange(newState, pOtherStates, leftPath, rhs, null); } return Collections.singleton(newState); } private Collection<RangeState> handleAssumption( RangeState state, List<AbstractState> otherStates, CAssumeEdge cfaEdge) throws CPATransferException { boolean truth = cfaEdge.getTruthAssumption(); // assumption should be a logical expression ranging in [0,1] CExpression assumption = cfaEdge.getExpression(); RangeState newState = RangeState.copyOf(state); Range assumeRange = evaluateRange(newState, otherStates, assumption); if (assumeRange.isEmpty() || (truth ? Range.ZERO : Range.ONE).equals(assumeRange)) { return Collections.emptySet(); } BinaryOperator op = ((CBinaryExpression) assumption).getOperator(); CExpression op1 = ((CBinaryExpression) assumption).getOperand1(); CExpression op2 = ((CBinaryExpression) assumption).getOperand2(); if (!truth) { op = op.getOppositeLogicalOperator(); } ExpressionRangeVisitor visitor = new ExpressionRangeVisitor(newState, otherStates, machineModel, true); Range range1 = op1.accept(visitor); Range range2 = op2.accept(visitor); Range restrict1, restrict2; switch (op) { case LESS_THAN: restrict1 = range1.limitUpperBoundBy(range2.minus(1L)); restrict2 = range2.limitLowerBoundBy(range1.plus(1L)); newState = op1.accept(new RangeRefineVisitor(newState, otherStates, restrict1, machineModel, true)); newState = op2.accept(new RangeRefineVisitor(newState, otherStates, restrict2, machineModel, true)); // refine more expressions according to def-use chain forFurtherRefine.add(new BindRefinePair(op1, restrict1)); forFurtherRefine.add(new BindRefinePair(op2, restrict2)); break; case LESS_EQUAL: restrict1 = range1.limitUpperBoundBy(range2); restrict2 = range2.limitLowerBoundBy(range1); newState = op1.accept(new RangeRefineVisitor(newState, otherStates, restrict1, machineModel, true)); newState = op2.accept(new RangeRefineVisitor(newState, otherStates, restrict2, machineModel, true)); forFurtherRefine.add(new BindRefinePair(op1, restrict1)); forFurtherRefine.add(new BindRefinePair(op2, restrict2)); break; case GREATER_THAN: restrict1 = range1.limitLowerBoundBy(range2.plus(1L)); restrict2 = range2.limitUpperBoundBy(range1.minus(1L)); newState = op1.accept(new RangeRefineVisitor(newState, otherStates, restrict1, machineModel, true)); newState = op2.accept(new RangeRefineVisitor(newState, otherStates, restrict2, machineModel, true)); forFurtherRefine.add(new BindRefinePair(op1, restrict1)); forFurtherRefine.add(new BindRefinePair(op2, restrict2)); break; case GREATER_EQUAL: restrict1 = range1.limitLowerBoundBy(range2); restrict2 = range2.limitUpperBoundBy(range1); newState = op1.accept(new RangeRefineVisitor(newState, otherStates, restrict1, machineModel, true)); newState = op2.accept(new RangeRefineVisitor(newState, otherStates, restrict2, machineModel, true)); forFurtherRefine.add(new BindRefinePair(op1, restrict1)); forFurtherRefine.add(new BindRefinePair(op2, restrict2)); break; case EQUALS: restrict1 = range1.intersect(range2); restrict2 = range2.intersect(range1); newState = op1.accept(new RangeRefineVisitor(newState, otherStates, restrict1, machineModel, true)); newState = op2.accept(new RangeRefineVisitor(newState, otherStates, restrict2, machineModel, true)); forFurtherRefine.add(new BindRefinePair(op1, restrict1)); forFurtherRefine.add(new BindRefinePair(op2, restrict2)); break; case NOT_EQUALS: Pair<Range, Range> splitResult = splitRanges(range1, range2); restrict1 = splitResult.getFirst(); restrict2 = splitResult.getSecond(); if (restrict1 != null) { // not null: the first range could be refined with restrict range 1 newState = op1.accept(new RangeRefineVisitor(newState, otherStates, restrict1, machineModel, true)); forFurtherRefine.add(new BindRefinePair(op1, restrict1)); } if (restrict2 != null) { newState = op2.accept(new RangeRefineVisitor(newState, otherStates, restrict2, machineModel, true)); forFurtherRefine.add(new BindRefinePair(op2, restrict2)); } break; default: throw new UnrecognizedCCodeException("unexpected operator in assumption", cfaEdge, assumption); } return Collections.singleton(newState); } private Collection<RangeState> handleFunctionCall( RangeState state, List<AbstractState> otherStates, CFunctionCallEdge cfaEdge) throws CPATransferException { RangeState newState = RangeState.copyOf(state); final List<CExpression> args = cfaEdge.getArguments(); final CFunctionEntryNode entryNode = cfaEdge.getSuccessor(); final List<CParameterDeclaration> params = entryNode.getFunctionParameters(); // the size of arguments should be no fewer than the size of parameter declarations final CFunctionSummaryEdge summaryEdge = cfaEdge.getSummaryEdge(); final CFunctionCall call = summaryEdge.getExpression(); final String funcName = entryNode.getFunctionName(); // 1. collect precondition of the function for (int i = 0; i < params.size(); i++) { CParameterDeclaration param = params.get(i); CExpression arg = args.get(i); AccessPath paramPath = getAccessPath(param); addRange(newState, otherStates, paramPath, arg, param.getType()); } RangeState preState = new RangeState(); // transfer ranges on parameters for (CParameterDeclaration param : params) { transferDeclarations(newState, preState, param); } // collect information of global variables // we do not collect global values for performance reason, for now // update precondition RangeFunctionPrecondition.updatePrecondition(funcName, preState); // 2. skip this function using existing range summary or access summary if (applyFunctionSummary(newState, otherStates, funcName, call)) { // if we reach here, we need to relax the return value if (call instanceof CAssignment) { CLeftHandSide lhs = ((CAssignment) call).getLeftHandSide(); AccessPath leftPath = getAccessPath(newState, otherStates, lhs); if (leftPath != null) { TypeRangeVisitor trVisitor = new TypeRangeVisitor(leftPath, maxTrackedArrayElements, machineModel, false); CType leftType = lhs.getExpressionType(); PathCopyingPersistentTree<String, Range> newRanges = leftType.accept(trVisitor); newState.addAllRanges(newRanges); } } } return Collections.singleton(newState); } private Collection<RangeState> handleFunctionReturn( RangeState state, CFunctionReturnEdge cfaEdge, boolean forNarrow) throws CPATransferException { // no successor is computed for function return edge // we create a new function summary instance for collecting invariant String funcName = cfaEdge.getFunctionEntry().getFunctionName(); collectFunctionSummary(funcName, state, cfaEdge.getFunctionEntry().getReturnVariable().orNull(), forNarrow); return Collections.emptySet(); } private Collection<RangeState> handleReturnStatement( RangeState state, List<AbstractState> otherStates, CReturnStatementEdge cfaEdge, boolean forNarrow) throws CPATransferException { RangeState newState = RangeState.copyOf(state); Optional<CAssignment> orAssign = cfaEdge.asAssignment(); if (orAssign.isPresent()) { CAssignment assign = orAssign.get(); CLeftHandSide leftHand = assign.getLeftHandSide(); CType returnType = leftHand.getExpressionType(); AccessPath path = getAccessPath(newState, otherStates, leftHand); // in general, the LHS should be a virtual return variable assert (path != null); addRange(newState, otherStates, path, assign.getRightHandSide(), returnType); } // if the current function is main (which does not have function return edge), we collect the // function summary here if (cfaEdge.getSuccessor().getNumLeavingEdges() == 0) { CFunctionEntryNode entry = (CFunctionEntryNode) cfaEdge.getSuccessor().getEntryNode(); collectFunctionSummary(entry.getFunctionName(), newState, entry.getReturnVariable().orNull (), forNarrow); } return Collections.singleton(newState); } /* *************** */ /* utility methods */ /* *************** */ private AccessPath getAccessPath(CSimpleDeclaration declaration) { return new AccessPath(declaration); } @Nullable private AccessPath getAccessPath( RangeState state, List<AbstractState> otherStates, CExpression expression) { if (expression instanceof CLeftHandSide) { LeftHandAccessPathVisitor visitor = new LeftHandAccessPathVisitor(new ExpressionRangeVisitor(state, otherStates, machineModel, true)); AccessPath path; try { path = ((CLeftHandSide) expression).accept(visitor).orNull(); } catch (UnrecognizedCCodeException e) { path = null; } return path; } // otherwise, there is no access path return null; } private void addRange( RangeState newState, List<AbstractState> pOtherStates, @Nullable AccessPath pPath, CRightHandSide e, @Nullable CType restrictType) throws UnrecognizedCCodeException { if (pPath != null) { AccessPath path = AccessPath.copyOf(pPath); if (e instanceof CStringLiteralExpression) { String content = ((CStringLiteralExpression) e).getContentString(); for (int i = 0; i < content.length(); i++) { AccessPath newPath = AccessPath.copyOf(path); newPath.appendSegment(new ArrayConstIndexSegment(i)); addRange(newState, newPath, new Range(content.charAt(i))); } // Moreover, append '\0' at the end AccessPath newPath = AccessPath.copyOf(path); newPath.appendSegment(new ArrayConstIndexSegment(content.length())); addRange(newState, newPath, Range.ZERO); } else if (e instanceof CLeftHandSide) { AccessPath rightPath = getAccessPath(newState, pOtherStates, (CLeftHandSide) e); if (rightPath != null) { AccessPath newPath = AccessPath.copyOf(path); newState.replaceAndCopy(rightPath, newPath, true); Range updatedRange = newState.getRange(newPath, machineModel); if (restrictType != null) { Range typeRange = Ranges.getTypeRange(restrictType, machineModel); if (!typeRange.contains(updatedRange)) { newState.addRange(newPath, typeRange, true); } } } } else { // other cases are trivial Range resultRange = evaluateRange(newState, pOtherStates, e); // we should sanitize values such as function parameters and return values if (restrictType != null) { Range typeRange = Ranges.getTypeRange(restrictType, machineModel); if (!typeRange.contains(resultRange)) { resultRange = typeRange; } } AccessPath newPath = AccessPath.copyOf(path); addRange(newState, newPath, resultRange); } // In most cases, we migrate the range information of this path to the left one // However, if the access path contains 'union' (union data structure), we should relax // fields in this union that have larger size than current union member. List<CType> typeList = path.parseTypeList(); int lastPos = typeList.size() - 1; AccessPath previousPath = AccessPath.copyOf(path); for (int i = lastPos; i >= 0; i--) { CType currentType = typeList.get(i); CCompositeType compositeType = Types.extractCompositeType(currentType); if (compositeType != null && compositeType.getKind() == ComplexTypeKind.UNION && i < lastPos) { // relax union members CType nextType = typeList.get(i + 1); int currentSize = machineModel.getSizeof(nextType); List<CCompositeTypeMemberDeclaration> members = compositeType.getMembers(); for (CCompositeTypeMemberDeclaration member : members) { if (member.getName().equals(previousPath.getLastSegment().getName())) { // avoid to re-compute the known access path continue; } CType memberType = member.getType(); int memberSize = machineModel.getSizeof(memberType); if (memberSize <= currentSize) { // we update this field if and only if: // (1) current type and specified type are equivalent // (2) current and specified types are both numerical types AND value override // corrupts all bits of the original value if (Types.isEquivalent(memberType, nextType) || (Types.isNumericalType(nextType) && Types.isNumericalType(memberType))) { AccessPath newPath = AccessPath.copyOf(path); newPath.appendSegment(new FieldAccessSegment(member.getName())); newState.replaceAndCopy(previousPath, newPath, true); } } else { // The data of this member is corrupted. Although we can restore its value // precisely in few cases, for efficiency purpose we fully relax this field AccessPath newPath = AccessPath.copyOf(path); newPath.appendSegment(new FieldAccessSegment(member.getName())); TypeRangeVisitor typeVisitor = new TypeRangeVisitor(newPath, maxTrackedArrayElements, machineModel, false); PathCopyingPersistentTree<String, Range> newRanges = memberType.accept(typeVisitor); newState.addAllRanges(newRanges); } } } previousPath = AccessPath.copyOf(path); path.removeLastSegment(); } } } private void addRange(RangeState newState, @Nullable AccessPath path, Range range) { if (path != null) { newState.addRange(path, range, true); } } private Range evaluateRange( RangeState readableState, List<AbstractState> pOtherStates, CRightHandSide expression) throws UnrecognizedCCodeException { return expression.accept(new ExpressionRangeVisitor(readableState, pOtherStates, machineModel, true)); } private Pair<Range, Range> splitRanges(Range r1, Range r2) { Range rs1 = null, rs2 = null; if (r1.isEmpty() || r2.isEmpty()) { // if one operand has empty range, the total range should be empty either and it is // unnecessary to split the range now return Pair.of(null, null); } if (r1.getLow().equals(r1.getHigh())) { CompInteger point = r1.getLow(); Range partOne = r2.intersect(Range.upperBoundedRange(point.subtract( CompIntegers.ALMOST_ZERO_DELTA))); Range partTwo = r2.intersect(Range.lowerBoundedRange(point.add(CompIntegers .ALMOST_ZERO_DELTA))); boolean isEmptyOne = partOne.isEmpty(); boolean isEmptyTwo = partTwo.isEmpty(); if (isEmptyOne && isEmptyTwo) { rs2 = Range.EMPTY; } else if (isEmptyOne) { rs2 = partTwo; } else if (isEmptyTwo) { rs2 = partOne; } } if (r2.getLow().equals(r2.getHigh())) { CompInteger point = r2.getLow(); Range partOne = r1.intersect(Range.upperBoundedRange(point.subtract(CompIntegers .ALMOST_ZERO_DELTA))); Range partTwo = r1.intersect(Range.lowerBoundedRange(point.add(CompIntegers .ALMOST_ZERO_DELTA))); boolean isEmptyOne = partOne.isEmpty(); boolean isEmptyTwo = partTwo.isEmpty(); if (isEmptyOne && isEmptyTwo) { rs1 = Range.EMPTY; } else if (isEmptyOne) { rs1 = partTwo; } else if (isEmptyTwo) { rs1 = partOne; } } // check if the refined range is strictly smaller than the original one if (rs1 != null) { if (rs1.equals(r1)) { rs1 = null; } } if (rs2 != null) { if (rs2.equals(r2)) { rs2 = null; } } return Pair.of(rs1, rs2); } /** * Transfer range tree of specific declaration. */ private void transferDeclarations(RangeState fromState, RangeState toState, CSimpleDeclaration pDeclaration) { String qualifiedName = pDeclaration.getQualifiedName(); PersistentTreeNode<String, Range> subtree = fromState.removeRangesWithPrefix(qualifiedName); if(subtree != null) { toState.addRangesWithPrefix(qualifiedName, subtree); } } @Override public Collection<? extends AbstractState> strengthen( AbstractState state, List<AbstractState> otherStates, @Nullable CFAEdge cfaEdge, Precision precision) throws CPATransferException, InterruptedException { if (cfaEdge == null) { return null; } RangeState rangeState = (RangeState) state; if (!forFurtherRefine.isEmpty()) { BindState bindState = AbstractStates.extractStateByType(otherStates, BindState.class); if (bindState != null) { for (BindRefinePair pair : forFurtherRefine) { CExpression expression = pair.getExpression(); Range restrict = pair.getRestrictRange(); AccessPath path = getAccessPath(rangeState, otherStates, expression); if (path != null) { List<CRightHandSide> associates = bindState.getBindedExpression(path, otherStates, 3); for (CRightHandSide exp : associates) { rangeState = exp.accept(new RangeRefineVisitor(rangeState, otherStates, restrict, machineModel, true)); } } } } forFurtherRefine.clear(); } return Collections.singleton(rangeState); } /* ******************* */ /* summary computation */ /* ******************* */ /** * apply function summary to change `state` (which means `state` is mutable in this method) * * @return whether the return value should be relaxed */ private boolean applyFunctionSummary( RangeState state, List<AbstractState> otherStates, String funcName, CFunctionCall funCall) throws CPATransferException { // 1. get function summary RangeFunctionInstance summary = summaryComputer.getFunctionSummary(funcName); if (summary != null) { RangeState stateForApply = summary.apply(); // if this is an assignment, we should also update the range of LHS if (funCall instanceof CAssignment) { CLeftHandSide lhs = ((CAssignment) funCall).getLeftHandSide(); AccessPath leftPath = getAccessPath(state, otherStates, lhs); if (leftPath != null) { PathCopyingPersistentTree<String, Range> returnTree = summary.getReturnSummary(); PersistentTreeNode<String, Range> returnRoot = returnTree.getRoot(); if (returnRoot != null) { Set<String> keys = returnRoot.getKeys(); String returnKey = Iterables.getOnlyElement(keys); PersistentTreeNode<String, Range> subtree = returnRoot.getChild(returnKey); stateForApply.addRangesWithPrefix(leftPath, subtree); } } } state.forcedUpdate(stateForApply); // do not relax return value return false; } else { // 2. no function summary // return value -> type range // global variables -> from access summary List<FunctionSummaryStore<?>> summaryStoreList = SummaryProvider.getFunctionSummary(); AccessSummaryStore accessStore = null; for (FunctionSummaryStore<?> summaryStore : summaryStoreList) { if (summaryStore instanceof AccessSummaryStore) { accessStore = (AccessSummaryStore) summaryStore; break; } } if (accessStore == null) { throw new IllegalArgumentException("Access summary required for computing range summary"); } AccessFunctionInstance instance = accessStore.query(funcName); if (instance != null) { AccessResult accessTree = instance.apply(); // consider write-tree here for (AccessPath accessPath : accessTree.writes) { if (accessPath.isGlobal()) { TypeRangeVisitor trVisitor = new TypeRangeVisitor(accessPath, maxTrackedArrayElements, machineModel, false); List<CType> typeList = accessPath.parseTypeList(); CType lastType = typeList.get(typeList.size() - 1); PathCopyingPersistentTree<String, Range> newRanges = lastType.accept(trVisitor); state.addAllRanges(newRanges); } } } // relax return value return true; } } private void collectFunctionSummary( String funcName, RangeState state, @Nullable CVariableDeclaration returnVar, boolean forNarrow) { // traverse all the declarations in the given state, and extract information on global // variables and the return value RangeFunctionInstance newInstance = new RangeFunctionInstance(funcName); // we collect no global values for performance reason if (returnVar != null) { String retName = returnVar.getQualifiedName(); PersistentTreeNode<String, Range> retTree = state.getSubTree(retName); if (retTree != null) { newInstance.addReturnSummary(retName, retTree); } } // merge with the existing function summary // Note: if the transfer works under the narrowing mode, the function summary will be // forcibly overwritten if (functionSummary == null || forNarrow) { functionSummary = newInstance; } else { functionSummary = functionSummary.merge(newInstance); } } private Collection<RangeState> collectLoopSummary( Collection<RangeState> states, CFAEdge cfaEdge) { // if multiple successors are derived, we should merge them into one before proceeding if (states.isEmpty()) { return states; } RangeState newState; if (states.size() == 1) { newState = Iterables.getOnlyElement(states); } else { final Iterator<RangeState> it = states.iterator(); newState = it.next(); while (it.hasNext()) { newState = newState.join(it.next()); } } if (entry2Loop.containsKey(cfaEdge)) { // current edge is loop entry edge // such edge is traversed for only once, and the state here represents the constraint // before running any times of iterations CFANode entryNode = cfaEdge.getSuccessor(); entry2Node.put(cfaEdge, entryNode); RangeStateStore stateStore = node2State.get(entryNode); if (stateStore == null) { // create a new state store here stateStore = new RangeStateStore(newState); node2State.put(entryNode, stateStore); } else { // it occurs when the current loop is enclosed by other loops if (!stateStore.initializeState(newState)) { return Collections.emptySet(); } } } else if (exit2Loop.containsKey(cfaEdge)) { // current edge is loop exit edge RangeState oldState = exit2State.get(cfaEdge); if (oldState == null) { exit2State.put(cfaEdge, newState); } else { // the exit constraint grows larger exit2State.put(cfaEdge, oldState.join(newState)); } } else { CFANode successorLoc = cfaEdge.getSuccessor(); // If the successor location is a possible implicit loop head, then we treat it as a loop // head. (This is necessary because CPAchecker may fail to recognize a real loop) if (isPossibleLoopHead(successorLoc)) { if (node2State.containsKey(successorLoc)) { RangeStateStore stateStore = node2State.get(successorLoc); stateStore.updateState(newState); // get the widened state newState = stateStore.getState(); } else { RangeStateStore stateStore = new RangeStateStore(newState); node2State.put(successorLoc, stateStore); } } } return Collections.singleton(newState); } private boolean isPossibleLoopHead(CFANode loc) { // (1) more than 1 edge can lead to certain location, // (2) certain location must have at least one successor, // (3) function entry/exit should not be loop head. (Otherwise, widening occurs in handling // function entry edge.) return !(loc instanceof FunctionEntryNode) && !(loc instanceof FunctionExitNode) && loc.getNumEnteringEdges() > 1 && loc.getNumLeavingEdges() > 0 && CFAUtils.allSuccessorsOf(loc).anyMatch(FORWARD_REACHABLE(loc)); } private Predicate<CFANode> FORWARD_REACHABLE(final CFANode to) { return new Predicate<CFANode>() { @Override public boolean apply(CFANode pCFANode) { Set<CFANode> visited = new HashSet<>(); Queue<CFANode> waitlist = new ArrayDeque<>(); waitlist.offer(pCFANode); while (!waitlist.isEmpty()) { CFANode current = waitlist.poll(); if (current.equals(to)) { return true; } if (visited.add(current)) { for (CFANode successor : CFAUtils.allSuccessorsOf(current)) { if (successor.getFunctionName().equals(to.getFunctionName())) { // such successor is not out of the current function scope waitlist.offer(successor); } } } } return false; } }; } /** * Set the instance of range summary computer for storing summary info. */ void setSummaryComputer(@Nonnull RangeSummaryComputer pComputer) { summaryComputer = pComputer; } void getLoopSummary( Multimap<Loop, Pair<CFAEdge, RangeState>> pRawInternalSummary, Multimap<Loop, Pair<CFAEdge, RangeState>> pRawExternalSummary) { for (Entry<CFAEdge, CFANode> entry : entry2Node.entrySet()) { CFAEdge entryEdge = entry.getKey(); CFANode entryNode = entry.getValue(); Loop loop = entry2Loop.get(entryEdge); RangeState state = node2State.get(entryNode).getTotalState(); pRawInternalSummary.put(loop, Pair.of(entryEdge, state)); } for (Entry<CFAEdge, RangeState> entry : exit2State.entrySet()) { CFAEdge exitEdge = entry.getKey(); RangeState state = entry.getValue(); Loop loop = exit2Loop.get(exitEdge); pRawExternalSummary.put(loop, Pair.of(exitEdge, state)); } } @Nullable RangeFunctionInstance getFunctionSummary() { // a function should have at least one exit edge return functionSummary; } void clearPerFunction() { functionSummary = null; entry2Node.clear(); node2State.clear(); exit2State.clear(); } RangeState initializeDeclaration(RangeState oldState, CSimpleDeclaration declaration) throws UnrecognizedCCodeException { AccessPath declaredPath = new AccessPath(declaration); TypeRangeVisitor visitor = new TypeRangeVisitor(declaredPath, maxTrackedArrayElements, machineModel, false); CType declaredType = declaration.getType(); PathCopyingPersistentTree<String, Range> rangeTree = declaredType.accept(visitor); RangeState newState = RangeState.copyOf(oldState); newState.addAllRanges(rangeTree); return newState; } void loadLoopSummary(CFAInfo cfaInfo, String funcName) { CFA cfa = cfaInfo.getCFA(); LoopStructure loopStructure = cfa.getLoopStructure().orNull(); if (loopStructure == null) { return; } Collection<Loop> loopInFunc = loopStructure.getLoopsForFunction(funcName); for (Loop loop : loopInFunc) { RangeInternalLoopInstance internalInstance = summaryComputer.getInternalLoopSummary(loop); if (internalInstance != null) { Map<CFAEdge, RangeState> innerSummary = internalInstance.getLoopSummary(); for (Entry<CFAEdge, RangeState> entry : innerSummary.entrySet()) { CFAEdge entryEdge = entry.getKey(); RangeState entryState = entry.getValue(); entry2Node.put(entryEdge, entryEdge.getSuccessor()); node2State.put(entryEdge.getSuccessor(), new RangeStateStore(entryState)); } } RangeExternalLoopInstance externalInstance = summaryComputer.getExternalLoopSummary(loop); if (externalInstance != null) { Map<CFAEdge, RangeState> innerSummary = externalInstance.getLoopSummary(); for (Entry<CFAEdge, RangeState> entry : innerSummary.entrySet()) { CFAEdge exitEdge = entry.getKey(); RangeState exitState = entry.getValue(); exit2State.put(exitEdge, exitState); } } } } }
guywmartin/genetic-constructor-ce
storage-ext/test/http/routes/version.js
"use strict"; var assert = require("assert"); var async = require("async"); var request = require("supertest"); var describeAppTest = require("../../api-app"); var versionRegEx = /^\d+\.\d+\.\d+$/; describeAppTest("http", function (app) { describe("version routes", function () { it("should return a version", function (done) { request(app.proxy) .get('/version') .expect(200) .end(function (err, res) { assert.ifError(err); assert(res); assert(res.text); assert(res.text.match(versionRegEx)); done(); }); }); }); });
MrStronger/TimeNote
src/routes/Hall/modules/hall.js
<filename>src/routes/Hall/modules/hall.js import { combineReducers } from 'redux' import CardReducer from './cardCase' export default combineReducers({ CardData: CardReducer })
junmin-zhu/chromium-rivertrail
ui/views/examples/scroll_view_example.h
<filename>ui/views/examples/scroll_view_example.h // Copyright (c) 2011 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef UI_VIEWS_EXAMPLES_SCROLL_VIEW_EXAMPLE_H_ #define UI_VIEWS_EXAMPLES_SCROLL_VIEW_EXAMPLE_H_ #include <string> #include "base/basictypes.h" #include "base/compiler_specific.h" #include "ui/views/controls/button/text_button.h" #include "ui/views/controls/scroll_view.h" #include "ui/views/examples/example_base.h" namespace views { namespace examples { class ScrollViewExample : public ExampleBase, public ButtonListener { public: ScrollViewExample(); virtual ~ScrollViewExample(); // Overridden from ExampleBase: virtual void CreateExampleView(View* container) OVERRIDE; private: // Overridden from ButtonListener: virtual void ButtonPressed(Button* sender, const ui::Event& event) OVERRIDE; // Control buttons to change the size of scrollable and jump to // predefined position. TextButton* wide_; TextButton* tall_; TextButton* big_square_; TextButton* small_square_; TextButton* scroll_to_; class ScrollableView; // The content of the scroll view. ScrollableView* scrollable_; // The scroll view to test. ScrollView* scroll_view_; DISALLOW_COPY_AND_ASSIGN(ScrollViewExample); }; } // namespace examples } // namespace views #endif // UI_VIEWS_EXAMPLES_SCROLL_VIEW_EXAMPLE_H_
Ayusummer/JuniorLessons_beta
BigDataMicroMajor/Python/DailyHomework/Lesson10_numpy/4_numpyNormalfunction.py
<gh_stars>1-10 # -*- coding: utf-8 -*- # @Time : 2020/11/10 18:39 # @Author : 咸鱼型233 # @File : 4_numpyNormalFunction.py # @Software: PyCharm # @Function: numpy求平均值,方差,标准差 import numpy as np height_arr = np.random.normal(169, 4, 10) # 10个身高 weight_arr = np.random.normal(105, 10, 10) # 10个体重 values_arr = np.vstack((height_arr, weight_arr)) print("10名同学的身高体重为:", values_arr) aver_value = np.mean(values_arr, axis=1) print("10名同学身高体重的平均值为:{0}".format(aver_value)) max_value = np.max(values_arr, axis=1) print("10名同学身高体重的最大值为:{0}".format(max_value)) min_value = np.min(values_arr, axis=1) print("10名同学身高体重的最小值为:{0}".format(min_value)) std_value = np.std(values_arr, axis=1) print("10名同学身高体重的标准差为:{0}".format(std_value)) var_value = np.var(values_arr, axis=1) print("10名同学身高体重的方差为:{0}".format(var_value))
erezrokah/appsmith
app/server/appsmith-interfaces/src/main/java/com/appsmith/external/models/Condition.java
package com.appsmith.external.models; import com.appsmith.external.constants.ConditionalOperator; import com.appsmith.external.constants.DataType; import com.appsmith.external.exceptions.pluginExceptions.AppsmithPluginError; import com.appsmith.external.exceptions.pluginExceptions.AppsmithPluginException; import com.fasterxml.jackson.annotation.JsonIgnore; import lombok.AllArgsConstructor; import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; import org.apache.commons.lang3.StringUtils; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import static com.appsmith.external.helpers.DataTypeStringUtils.stringToKnownDataTypeConverter; @Getter @Setter @AllArgsConstructor @NoArgsConstructor public class Condition { String path; ConditionalOperator operator; Object value; @JsonIgnore DataType valueDataType; public Condition(String path, String operator, String value) { this.path = path; this.operator = ConditionalOperator.valueOf(operator); this.value = value; } public static List<Condition> addValueDataType(List<Condition> conditionList) { return conditionList .stream() .map(condition -> { if (condition.getValue() instanceof String) { String value = (String) condition.getValue(); DataType dataType = stringToKnownDataTypeConverter(value); condition.setValueDataType(dataType); } return condition; }) .collect(Collectors.toList()); } public static Condition addValueDataType(Condition condition) { Object objValue = condition.getValue(); if (objValue instanceof String) { String value = (String) condition.getValue(); DataType dataType = stringToKnownDataTypeConverter(value); condition.setValueDataType(dataType); } else if (objValue instanceof List) { List<Condition> conditionList = (List<Condition>) objValue; List<Condition> updatedConditions = conditionList .stream() .map(subCondition -> addValueDataType(subCondition)) .collect(Collectors.toList()); condition.setValue(updatedConditions); } return condition; } public static Boolean isValid(Condition condition) { if (StringUtils.isEmpty(condition.getPath()) || (condition.getOperator() == null) || StringUtils.isEmpty((CharSequence) condition.getValue())) { return false; } return true; } public static List<Condition> generateFromConfiguration(List<Object> configurationList) { List<Condition> conditionList = new ArrayList<>(); for(Object config : configurationList) { Map<String, String> condition = (Map<String, String>) config; if (condition.entrySet().isEmpty()) { // Its an empty object set by the client for UX. Ignore the same continue; } else if (!condition.keySet().containsAll(Set.of("path", "operator", "value"))) { throw new AppsmithPluginException(AppsmithPluginError.PLUGIN_EXECUTE_ARGUMENT_ERROR, "Filtering Condition not configured properly"); } conditionList.add(new Condition( condition.get("path"), condition.get("operator"), condition.get("value") )); } return conditionList; } }
prasadrao82/angular-pioc
dist/out-tsc/app/@theme/components/switcher/switcher.component.js
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); var tslib_1 = require("tslib"); var core_1 = require("@angular/core"); var SwitcherComponent = /** @class */ (function () { function SwitcherComponent() { this.valueChange = new core_1.EventEmitter(); } SwitcherComponent.prototype.isFirstValue = function () { return this.value === this.firstValue; }; SwitcherComponent.prototype.isSecondValue = function () { return this.value === this.secondValue; }; SwitcherComponent.prototype.currentValueLabel = function () { return this.isFirstValue() ? this.firstValueLabel : this.secondValueLabel; }; SwitcherComponent.prototype.changeValue = function () { this.value = this.isFirstValue() ? this.secondValue : this.firstValue; this.valueChange.emit(this.value); }; tslib_1.__decorate([ core_1.Input(), tslib_1.__metadata("design:type", Object) ], SwitcherComponent.prototype, "firstValue", void 0); tslib_1.__decorate([ core_1.Input(), tslib_1.__metadata("design:type", Object) ], SwitcherComponent.prototype, "secondValue", void 0); tslib_1.__decorate([ core_1.Input(), tslib_1.__metadata("design:type", String) ], SwitcherComponent.prototype, "firstValueLabel", void 0); tslib_1.__decorate([ core_1.Input(), tslib_1.__metadata("design:type", String) ], SwitcherComponent.prototype, "secondValueLabel", void 0); tslib_1.__decorate([ core_1.Input(), tslib_1.__metadata("design:type", Boolean) ], SwitcherComponent.prototype, "vertical", void 0); tslib_1.__decorate([ core_1.Input(), tslib_1.__metadata("design:type", Object) ], SwitcherComponent.prototype, "value", void 0); tslib_1.__decorate([ core_1.Output(), tslib_1.__metadata("design:type", Object) ], SwitcherComponent.prototype, "valueChange", void 0); SwitcherComponent = tslib_1.__decorate([ core_1.Component({ selector: 'ngx-switcher', styleUrls: ['./switcher.component.scss'], template: "\n <label class=\"switch-label\" [class.vertical]=\"vertical\">\n <span class=\"first\" [class.active]=\"vertical || isFirstValue()\">\n {{vertical ? currentValueLabel() : firstValueLabel}}\n </span>\n\n <div class=\"switch\">\n <input type=\"checkbox\" [checked]=\"isSecondValue()\" (change)=\"changeValue()\">\n <span class=\"slider\"></span>\n </div>\n\n <span\n *ngIf=\"!vertical\"\n class=\"second\"\n [class.active]=\"isSecondValue()\"\n >\n {{secondValueLabel}}\n </span>\n </label>\n ", }) ], SwitcherComponent); return SwitcherComponent; }()); exports.SwitcherComponent = SwitcherComponent; //# sourceMappingURL=switcher.component.js.map
pasmuss/cmssw
Calibration/HcalAlCaRecoProducers/python/alcastreamHcalGammaJetOutput_cff.py
import FWCore.ParameterSet.Config as cms # output block for alcastream HCAL Dijets # output module # module alcastreamHcalGammaJetOutput = PoolOutputModule alcastreamHcalGammaJetOutput = cms.PSet( outputCommands = cms.untracked.vstring('drop *', 'keep *_GammaJetProd_*_*') )
TheSilverEcho/ZeroPointAPI
src/main/java/me/thesilverecho/zeropoint/api/ui/widgets/NotificationComponent.java
package me.thesilverecho.zeropoint.api.ui.widgets; public class NotificationComponent { private float x, y, w, h; public NotificationComponent(float x, float y, float w, float h) { this.x = x; this.y = y; this.w = w; this.h = h; } }
io7m/r2
com.io7m.r2.transforms/src/main/java/com/io7m/r2/transforms/R2TransformST.java
<filename>com.io7m.r2.transforms/src/main/java/com/io7m/r2/transforms/R2TransformST.java /* * Copyright © 2016 <<EMAIL>> http://io7m.com * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR * IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ package com.io7m.r2.transforms; import com.io7m.jnull.NullCheck; import com.io7m.jtensors.core.parameterized.matrices.PMatrices4x4D; import com.io7m.jtensors.core.parameterized.matrices.PMatrix4x4D; import com.io7m.jtensors.core.parameterized.vectors.PVector3D; import com.io7m.jtensors.core.parameterized.vectors.PVectors3D; import com.io7m.r2.core.api.watchable.R2Watchable; import com.io7m.r2.core.api.watchable.R2WatchableType; import com.io7m.r2.spaces.R2SpaceObjectType; import com.io7m.r2.spaces.R2SpaceType; import com.io7m.r2.spaces.R2SpaceWorldType; /** * <p>A transform represented by a scale, followed by a translation.</p> * * <p>The transform does not allow independent scaling on each axis and will * therefore produce matrices that are guaranteed to be orthogonal.</p> */ public final class R2TransformST implements R2TransformSTType { private final R2WatchableType<R2TransformOrthogonalReadableType> watchable; private PVector3D<R2SpaceWorldType> translation; private double scale; private R2TransformST( final double in_scale, final PVector3D<R2SpaceWorldType> in_translation) { this.scale = in_scale; this.translation = NullCheck.notNull(in_translation, "Translation"); this.watchable = R2Watchable.newWatchable(this); } /** * Construct a transform using the given initial values. * * @param in_scale The scale * @param in_translation The translation * * @return A new transform */ public static R2TransformST createWith( final double in_scale, final PVector3D<R2SpaceWorldType> in_translation) { return new R2TransformST(in_scale, in_translation); } /** * Construct a transform using the default values: The scale {@code 1.0}, and * the translation {@code (0, 0, 0)}. * * @return A new transform */ public static R2TransformST create() { return new R2TransformST(1.0, PVectors3D.zero()); } /** * @return A value representing scale */ @Override public double scale() { return this.scale; } /** * Set the uniform scale. * * @param x The scale value */ @Override public void setScale(final double x) { this.scale = x; this.watchable.watchableChanged(); } /** * @return A translation in world-space */ public PVector3D<R2SpaceWorldType> getTranslation() { return this.translation; } @Override public void setTranslation( final PVector3D<R2SpaceWorldType> t) { this.translation = NullCheck.notNull(t, "Translation"); this.watchable.watchableChanged(); } @SuppressWarnings("unchecked") @Override public PMatrix4x4D<R2SpaceObjectType, R2SpaceWorldType> transformMakeMatrix4x4F() { final PMatrix4x4D<Object, Object> m_trans = PMatrices4x4D.ofTranslation( this.translation.x(), this.translation.y(), this.translation.z()); final PMatrix4x4D<Object, Object> m_scale = PMatrices4x4D.ofScale(this.scale, this.scale, this.scale); return (PMatrix4x4D<R2SpaceObjectType, R2SpaceWorldType>) (Object) PMatrices4x4D.multiply(m_trans, m_scale); } @Override public <T extends R2SpaceType, U extends R2SpaceType> PMatrix4x4D<T, U> transformMakeViewMatrix4x4F() { final PVector3D<R2SpaceWorldType> inv = PVectors3D.negate(this.translation); return PMatrices4x4D.ofTranslation(inv.x(), inv.y(), inv.z()); } @Override @SuppressWarnings("unchecked") public R2WatchableType<R2TransformReadableType> transformGetWatchable() { return (R2WatchableType<R2TransformReadableType>) (Object) this.watchable; } @Override public R2WatchableType<R2TransformOrthogonalReadableType> transformOrthogonalGetWatchable() { return this.watchable; } @Override public PVector3D<R2SpaceWorldType> translation() { return this.translation; } }
zhangjun0x01/streamx
streamx-flink/streamx-flink-core/src/main/scala/com/streamxhub/streamx/flink/core/scala/failover/FailoverChecker.scala
/* * Copyright (c) 2019 The StreamX Project * <p> * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.streamxhub.streamx.flink.core.scala.failover import com.streamxhub.streamx.common.util.{Logger, ThreadUtils} import java.util.concurrent.{Executors, ScheduledExecutorService, ThreadFactory, TimeUnit} import scala.collection.JavaConversions._ import scala.collection.mutable.ListBuffer case class FailoverChecker(delayTime: Long) extends AutoCloseable with Logger { val sinkBuffers: ListBuffer[SinkBuffer] = ListBuffer[SinkBuffer]() val factory: ThreadFactory = ThreadUtils.threadFactory("FailoverChecker") val scheduledExecutorService: ScheduledExecutorService = Executors.newSingleThreadScheduledExecutor(factory) scheduledExecutorService.scheduleWithFixedDelay(getTask, delayTime, delayTime, TimeUnit.MILLISECONDS) logInfo(s"Build Sink scheduled checker, timeout (microSeconds) = $delayTime") def addSinkBuffer(buffer: SinkBuffer): Unit = { this.synchronized(sinkBuffers.add(buffer)) logDebug(s"Add SinkBuffer, target table = ${buffer.table}") } def getTask: Runnable = new Runnable { override def run(): Unit = { this synchronized { logDebug(s"Start checking buffers. Current count of buffers = ${sinkBuffers.size}") sinkBuffers.foreach(_.tryAddToQueue()) } } } override def close(): Unit = ThreadUtils.shutdownExecutorService(scheduledExecutorService) }
CyberQueenMara/baseband-research
okl4_kernel/okl4_2.1.1-patch.9/tools/pyelf/weaver/machine.py
############################################################################## # Copyright (c) 2007 Open Kernel Labs, Inc. (Copyright Holder). # All rights reserved. # # 1. Redistribution and use of OKL4 (Software) in source and binary # forms, with or without modification, are permitted provided that the # following conditions are met: # # (a) Redistributions of source code must retain this clause 1 # (including paragraphs (a), (b) and (c)), clause 2 and clause 3 # (Licence Terms) and the above copyright notice. # # (b) Redistributions in binary form must reproduce the above # copyright notice and the Licence Terms in the documentation and/or # other materials provided with the distribution. # # (c) Redistributions in any form must be accompanied by information on # how to obtain complete source code for: # (i) the Software; and # (ii) all accompanying software that uses (or is intended to # use) the Software whether directly or indirectly. Such source # code must: # (iii) either be included in the distribution or be available # for no more than the cost of distribution plus a nominal fee; # and # (iv) be licensed by each relevant holder of copyright under # either the Licence Terms (with an appropriate copyright notice) # or the terms of a licence which is approved by the Open Source # Initative. For an executable file, "complete source code" # means the source code for all modules it contains and includes # associated build and other files reasonably required to produce # the executable. # # 2. THIS SOFTWARE IS PROVIDED ``AS IS'' AND, TO THE EXTENT PERMITTED BY # LAW, ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR # PURPOSE, OR NON-INFRINGEMENT, ARE DISCLAIMED. WHERE ANY WARRANTY IS # IMPLIED AND IS PREVENTED BY LAW FROM BEING DISCLAIMED THEN TO THE # EXTENT PERMISSIBLE BY LAW: (A) THE WARRANTY IS READ DOWN IN FAVOUR OF # THE COPYRIGHT HOLDER (AND, IN THE CASE OF A PARTICIPANT, THAT # PARTICIPANT) AND (B) ANY LIMITATIONS PERMITTED BY LAW (INCLUDING AS TO # THE EXTENT OF THE WARRANTY AND THE REMEDIES AVAILABLE IN THE EVENT OF # BREACH) ARE DEEMED PART OF THIS LICENCE IN A FORM MOST FAVOURABLE TO # THE COPYRIGHT HOLDER (AND, IN THE CASE OF A PARTICIPANT, THAT # PARTICIPANT). IN THE LICENCE TERMS, "PARTICIPANT" INCLUDES EVERY # PERSON WHO HAS CONTRIBUTED TO THE SOFTWARE OR WHO HAS BEEN INVOLVED IN # THE DISTRIBUTION OR DISSEMINATION OF THE SOFTWARE. # # 3. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR ANY OTHER PARTICIPANT BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE # OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN # IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ Properties of the target machine. """ import math from weaver import MergeError from weaver.device import PhysicalDevice from weaver.bootinfo_elf import BI_DEFAULT_MEMORY, BI_CACHED_MEMORY, \ BI_UNCACHED_MEMORY, BI_WRITE_BACK_MEMORY, BI_WRITE_THROUGH_MEMORY, \ BI_COHERENT_MEMORY, BI_IO_MEMORY, BI_IO_COMBINED_MEMORY def _assert_no_overlaps(mem_map): """Raise an exception if any of the memory regions in the map overlap.""" # Extract all of the memory regions and record where they came from. # Sort by base address mem_array = sorted([(base, size, name) for (name, mem) in mem_map.items() for (base, size, mem_type) in mem]) highest_memory = -1L highest = () # Check for overlaps and give a meaningful error message if there # is one. for (base, size, name) in mem_array: if base <= highest_memory: raise MergeError, \ 'The machine memory region 0x%x--0x%x (size 0x%x) in ' \ '"%s" overlaps with region 0x%x--0x%x (size 0x%x) in ' \ '"%s".' % (base, base + size - 1, size, name, highest[0], highest[0] + highest[1] -1, highest[1], highest[2]) else: highest_memory = base + size - 1 highest = (base, size, name) class Machine: """Description of the image's target machine.""" def __init__(self): # Default if no machine is given in XML self.page_sizes = [0x1000] self.word_size = 32 self.virtual_mem = {} self.physical_mem = {} self.physical_device = {} self.physical_memory = [] self._heap_proximity = 64 * 1024 * 1024 # 64M is the ARM requirement. self.kernel_heap_align = None # Map between cache policy strings and L4 attribute types. self.cache_policies = { 'default' : BI_DEFAULT_MEMORY, 'cached' : BI_CACHED_MEMORY, 'uncached' : BI_UNCACHED_MEMORY, 'writeback' : BI_WRITE_BACK_MEMORY, 'writethrough' : BI_WRITE_THROUGH_MEMORY, 'coherent' : BI_COHERENT_MEMORY, 'device' : BI_IO_MEMORY, 'writecombining' : BI_IO_COMBINED_MEMORY } def set_kernel_heap_proximity(self, distance): """ The maximum distance, in bytes, that the kernel heap can be from the kernel. If the distance is None, then the current value is not changed. """ if distance is not None: self._heap_proximity = distance def get_kernel_heap_proximity(self): """ Return the maximum distance, in bytes, that the kernel heap can be from the kernel. """ return self._heap_proximity kernel_heap_proximity = property(get_kernel_heap_proximity, set_kernel_heap_proximity) def set_page_sizes(self, sizes): """Set the allowed page sizes for the machine.""" self.page_sizes = sizes[:] # Sort in descending order to make min_page_size() and # superpage_alignment's lives easier. self.page_sizes.sort(key=lambda x: -x) def add_cache_policies(self, policies): """Add extra cache policy values to the default map.""" for (name, value) in policies: self.cache_policies[name] = value def get_cache_policy(self, attr): """ Return the numeric value of a named cache policy or raise an exception. """ try: val = self.cache_policies[attr] except: raise MergeError, ("Unknown cache policy: '%s'." % attr) return val def add_virtual_mem(self, name, mem): """Add a named list of virtual memory ranges.""" self.virtual_mem[name] = mem _assert_no_overlaps(self.virtual_mem) def add_physical_mem(self, name, mem): """Add a named list of physical memory ranges.""" self.physical_mem[name] = mem _assert_no_overlaps(self.physical_mem) self.physical_memory.extend(mem) self.physical_memory.sort() def add_phys_device(self, name): """Add a named physical device.""" self.physical_device[name] = PhysicalDevice(name) return self.physical_device[name] def get_virtual_memory(self, name): """Get the names list of virtual memory ranges.""" if not self.virtual_mem.has_key(name): raise MergeError, "Virtual memory called %s not found." % name return self.virtual_mem[name] def get_physical_memory(self, name): """Get the names list of physical memory ranges.""" # First look for the physical memory in devices... for dev in self.physical_device.itervalues(): pm = dev.get_physical_mem(name) if pm is not None: return pm # ... then try to look for it in the machine if not self.physical_mem.has_key(name): raise MergeError, "Physical memory called %s not found." % name return self.physical_mem[name] def min_page_size(self): """Return the smallest allowable page size.""" return self.page_sizes[-1] def max_page_size(self): """Return the largest allowable page size.""" return self.page_sizes[0] def superpage_alignment(self, size): """Calculate the biggest alignment supported on the current machine for the given size.""" alignment = None for i in self.page_sizes: if i <= size: alignment = i break if alignment is None: alignment = self.page_sizes[-1] return alignment def natural_alignment(self, size): """ Return the natural alignment for the size. Eg 64K regions are are aligned to 64K boundaries. Items smaller than the min page size are min page size aligned and items larger than the max page size are max page sise aligned. """ # Align small objects to the page size. if size <= self.min_page_size(): return self.min_page_size() # Align large items to the max page size. if size > self.max_page_size(): return self.max_page_size() return 2 << (int(math.log(size, 2)) - 1)
sRNAworkbenchuea/UEA_sRNA_Workbench
src/main/java/uk/ac/uea/cmp/srnaworkbench/io/SRNAFastaReader.java
<gh_stars>1-10 /* * To change this template, choose Tools | Templates * and open the template in the editor. */ package uk.ac.uea.cmp.srnaworkbench.io; import java.io.*; import java.util.*; import java.util.Map.Entry; import java.util.logging.Level; import uk.ac.uea.cmp.srnaworkbench.utils.StringUtils; import static uk.ac.uea.cmp.srnaworkbench.utils.LOGGERS.WorkbenchLogger.LOGGER; import static org.apache.commons.io.IOUtils.LINE_SEPARATOR; /** * Reads a FastA file containing sRNA reads. * @author <NAME> */ public class SRNAFastaReader { private int r_count; private int nr_count; private BufferedReader br; private InputStreamReader isr; private FileInputStream fi = null; /** * Initialises a new reader object using the specified file * @param infile The file containing sRNA reads in FastA format to read from. * @throws IOException Thrown if there were can problems initialising readers to * the file. */ public SRNAFastaReader( File infile ) throws IOException { this.fi = new FileInputStream( infile ); this.isr = new InputStreamReader( fi ); this.br = new BufferedReader( isr ); r_count = 0; nr_count = 0; } /** * Retrieves the total number of reads found in the file * @return The total number of reads in the file */ public int getTotalReadCount() { return this.r_count; } /** * Retrieves the number of distinct reads found in the file * @return The number of distinct reads in the file */ public int getDistinctReadCount() { return this.nr_count; } /** * This has been modified to a temporary hack whilst we work out what to do with * processing non-integer data. The normal process() function now does extra work converting * Double abundances to Integer abundances for the sake of legacy calls to this function * that expect a String -> Integer Map. The actual processing of a file is now read in as if * all values are doubles. * @return * @throws IOException */ public HashMap<String, Integer> process() throws IOException { HashMap<String, Double> doubleData = processDoubles(); HashMap<String, Integer> intData = new HashMap<>(); for(Entry<String, Double> e : doubleData.entrySet()) { intData.put(e.getKey(), (int)Math.round( e.getValue() )); } return(intData); } /** * Starts reading from the file, creating a map keyed by distinct reads, mapping * to the number of reads found for that distinct sequence. * @return Map The map of distinct sequences mapped to abundance count * @throws IOException Thrown if there were any problems reading the file. */ public HashMap<String, Double> processDoubles() throws IOException { HashMap<String, Double> data = new HashMap<>(); String line = ""; boolean done = false; while ( !done ) { if ( line.startsWith( ">" ) ) { String id = ""; int idx; double abundance = 1; if ( ( idx = line.indexOf( "(" ) ) > -1 ) { id = line.substring( 1, idx ); if(line.indexOf( ")" ) > -1) { try{ abundance = Double.parseDouble( line.substring( idx + 1, line.indexOf( ")" ) ) ); } catch(NumberFormatException e) { LOGGER.log( Level.SEVERE, "Bad formatting in input file. {0}Line with problem:{1}", new Object[]{ LINE_SEPARATOR, line }); throw new IOException("Bad formatting in input file. Line with problem: " + LINE_SEPARATOR + line); } } else { LOGGER.log( Level.SEVERE, "Abundance information not available during decode of FASTA file. {0}Line with problem:{1}", new Object[]{ LINE_SEPARATOR, line }); } } // else // { // LOGGER.log( Level.SEVERE, // "Abundance information not available during decode of FASTA file. {0}Line with problem:{1}", // new Object[]{ LINE_SEPARATOR, line }); // } // Assume the next line is going to be there! StringBuilder sb = new StringBuilder(); while ( ( line = br.readLine() ) != null ) { if ( line.startsWith( ">" ) ) { break; } sb.append( line ); } String seq = sb.toString(); Double freq = data.get( seq ); double c = ( freq == null ) ? abundance : freq + abundance; data.put( seq, c ); } else { line = br.readLine(); } if ( line == null ) { done = true; } } // Probably not the most efficient way of getting the redundant count // it would probably be faster to include something in the reading loop // above but it's simple and clean, and I'm hoping won't impact performance // too much. for ( Double i : data.values() ) { r_count += i; } nr_count = data.size(); br.close(); isr.close(); fi.close(); return data; } //**************************************************************************** public static void main( String[] args ) { String fileName = "D:/LocalData/hugh/seq_data/irina_data/Organs/GSM118372.fa"; try { SRNAFastaReader sfr = new SRNAFastaReader( new File( fileName ) ); java.util.Map<String, Integer> map = sfr.process(); } catch ( IOException ex ) { System.err.println( ex ); } } }
sunadm/ClickHouse
dbms/src/Storages/System/StorageSystemQuotaUsage.cpp
#include <Storages/System/StorageSystemQuotaUsage.h> #include <DataTypes/DataTypeString.h> #include <DataTypes/DataTypesNumber.h> #include <DataTypes/DataTypeUUID.h> #include <DataTypes/DataTypeDateTime.h> #include <DataTypes/DataTypeNullable.h> #include <Interpreters/Context.h> #include <Access/AccessControlManager.h> #include <Access/QuotaContext.h> #include <ext/range.h> namespace DB { NamesAndTypesList StorageSystemQuotaUsage::getNamesAndTypes() { NamesAndTypesList names_and_types{ {"name", std::make_shared<DataTypeString>()}, {"id", std::make_shared<DataTypeUUID>()}, {"key", std::make_shared<DataTypeString>()}, {"duration", std::make_shared<DataTypeNullable>(std::make_shared<DataTypeUInt64>())}, {"end_of_interval", std::make_shared<DataTypeNullable>(std::make_shared<DataTypeDateTime>())}}; for (auto resource_type : ext::range_with_static_cast<Quota::ResourceType>(Quota::MAX_RESOURCE_TYPE)) { DataTypePtr data_type; if (resource_type == Quota::EXECUTION_TIME) data_type = std::make_shared<DataTypeFloat64>(); else data_type = std::make_shared<DataTypeUInt64>(); String column_name = Quota::resourceTypeToColumnName(resource_type); names_and_types.push_back({column_name, std::make_shared<DataTypeNullable>(data_type)}); names_and_types.push_back({String("max_") + column_name, std::make_shared<DataTypeNullable>(data_type)}); } return names_and_types; } void StorageSystemQuotaUsage::fillData(MutableColumns & res_columns, const Context & context, const SelectQueryInfo &) const { const auto & access_control = context.getAccessControlManager(); for (const auto & info : access_control.getQuotaUsageInfo()) { for (const auto & interval : info.intervals) { size_t i = 0; res_columns[i++]->insert(info.quota_name); res_columns[i++]->insert(info.quota_id); res_columns[i++]->insert(info.quota_key); res_columns[i++]->insert(std::chrono::seconds{interval.duration}.count()); res_columns[i++]->insert(std::chrono::system_clock::to_time_t(interval.end_of_interval)); for (auto resource_type : ext::range(Quota::MAX_RESOURCE_TYPE)) { if (resource_type == Quota::EXECUTION_TIME) { res_columns[i++]->insert(Quota::executionTimeToSeconds(interval.used[resource_type])); res_columns[i++]->insert(Quota::executionTimeToSeconds(interval.max[resource_type])); } else { res_columns[i++]->insert(interval.used[resource_type]); res_columns[i++]->insert(interval.max[resource_type]); } } } if (info.intervals.empty()) { size_t i = 0; res_columns[i++]->insert(info.quota_name); res_columns[i++]->insert(info.quota_id); res_columns[i++]->insert(info.quota_key); for (size_t j = 0; j != Quota::MAX_RESOURCE_TYPE * 2 + 2; ++j) res_columns[i++]->insertDefault(); } } } }
tongji4m3/boying
documents/课程文档/10.软工/第11组——博影--娱乐票务平台/项目源码/后端/boying/boying-user/src/main/java/com/tongji/boying/service/UserService.java
<gh_stars>10-100 package com.tongji.boying.service; import com.tongji.boying.dto.userParam.*; import com.tongji.boying.model.BoyingUser; import org.springframework.security.core.userdetails.UserDetails; import org.springframework.transaction.annotation.Transactional; /** * 用户管理Service * * @author tongji4m3 */ public interface UserService { void register(UserRegisterParam param); String login(UsernameLoginParam param); String telephoneLogin(TelephoneLoginParam param); String authCodeLogin(AuthCodeLoginParam param); void updateInfo(UpdateInfoParam param); void updatePassword(UpdatePasswordParam param); /** * 根据用户名获取用户 */ BoyingUser getByUsername(String username); /** * 生成验证码 */ void generateAuthCode(String telephone); /** * 获取当前登录用户 */ BoyingUser getCurrentUser(); /** * 获取用户信息 */ UserDetails loadUserByUsername(String username); /** * 刷新token */ String refreshToken(String token); }
dalisoft/dev-env
backend/fastify/src/fastify-plugins/fastify-apollo-server.js
<reponame>dalisoft/dev-env import { ApolloServer } from 'apollo-server-fastify'; import schema from '../graphql/schema/index.js'; import { dev, graphiql, corsWhitelist } from '../config.js'; export default async (fastify) => { // Create instance const apollo = new ApolloServer({ schema, playground: graphiql && { version: '1.7.25' } }); // Add subscription support if (fastify.server) { apollo.installSubscriptionHandlers(fastify.server); } let path = ''; if (process.env.NETLIFY_ENV) { if (dev) { path = '/graphql'; } else { path = '.netlify/functions/graphql'; } } else { path = '/graphql'; } return fastify.register( apollo.createHandler({ path, cors: { origin: [...corsWhitelist], // Avoid side-effects as this variable can be used elsewhere methods: ['GET', 'POST'], allowedHeaders: ['Content-Type', 'Origin', 'Accept'], credentials: true } }) ); };
dshorthouse/occurrence
occurrence-search/src/main/java/org/gbif/occurrence/search/clb/NameUsageMatchingServiceClient.java
<filename>occurrence-search/src/main/java/org/gbif/occurrence/search/clb/NameUsageMatchingServiceClient.java package org.gbif.occurrence.search.clb; import org.gbif.api.model.checklistbank.NameUsageMatch; import org.gbif.api.model.common.LinneanClassification; import org.gbif.api.service.checklistbank.NameUsageMatchingService; import org.gbif.api.vocabulary.Rank; import java.util.HashMap; import java.util.Map; import javax.annotation.Nullable; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; @RequestMapping("/species/match/") public interface NameUsageMatchingServiceClient extends NameUsageMatchingService { @Override default NameUsageMatch match( String scientificName, @Nullable Rank rank, @Nullable LinneanClassification classification, boolean strict, boolean verbose ) { Map<String,String> parameters = new HashMap(); parameters.put("name", scientificName); parameters.put("strict", Boolean.toString(strict)); parameters.put("verbose", Boolean.toString(verbose)); if (classification != null) { parameters.put("kingdom", classification.getKingdom()); parameters.put("phylum", classification.getPhylum()); parameters.put("class", classification.getClazz()); parameters.put("order", classification.getOrder()); parameters.put("family", classification.getFamily()); parameters.put("genus", classification.getGenus()); parameters.put("subgenus", classification.getSubgenus()); } if (rank != null) { parameters.put("rank", rank.name()); } return match(parameters); } @GetMapping NameUsageMatch match(@RequestParam Map<String,String> params); }
kmelodi/EasyBimehLanding_JAVA
src/main/java/ir/notifaano/server/models/BaseModelUploadBuilder.java
/* * EasyBimehLandingLib * * This file was automatically generated by APIMATIC v2.0 ( https://apimatic.io ). */ package ir.notifaano.server.models; import java.util.*; public class BaseModelUploadBuilder { //the instance to build private BaseModelUpload baseModelUpload; /** * Default constructor to initialize the instance */ public BaseModelUploadBuilder() { baseModelUpload = new BaseModelUpload(); } /** * وضعیت موفقیت درخواست */ public BaseModelUploadBuilder isSuccess(boolean isSuccess) { baseModelUpload.setIsSuccess(isSuccess); return this; } /** * کد وضعیت درخواست */ public BaseModelUploadBuilder status(int status) { baseModelUpload.setStatus(status); return this; } /** * بدنه ی اصلی درخواست که با توجه به نوع درخواست، تغییر می کند */ public BaseModelUploadBuilder message(Object message) { baseModelUpload.setMessage(message); return this; } /** * اطلاعات اضافه ی درخواست */ public BaseModelUploadBuilder extraData(String extraData) { baseModelUpload.setExtraData(extraData); return this; } /** * اطلاعات خطاهای رخ داده */ public BaseModelUploadBuilder exception(String exception) { baseModelUpload.setException(exception); return this; } /** * Build the instance with the given values */ public BaseModelUpload build() { return baseModelUpload; } }
BryanNoller/OrionUO
OrionUO/GLEngine/GLTexture.cpp
<reponame>BryanNoller/OrionUO // This is an open source non-commercial project. Dear PVS-Studio, please check it. // PVS-Studio Static Code Analyzer for C, C++ and C#: http://www.viva64.com /*********************************************************************************** ** ** GLTexture.cpp ** ** Copyright (C) August 2016 Hotride ** ************************************************************************************ */ //---------------------------------------------------------------------------------- #include "stdafx.h" //---------------------------------------------------------------------------------- CGLTexture::CGLTexture() { } //---------------------------------------------------------------------------------- CGLTexture::~CGLTexture() { WISPFUN_DEBUG("c36_f1"); Clear(); } //---------------------------------------------------------------------------------- void CGLTexture::Draw(int x, int y, bool checktrans) { WISPFUN_DEBUG("c36_f2"); if (Texture != 0) { if (checktrans) { glEnable(GL_BLEND); glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); g_GL_Draw(*this, x, y); glDisable(GL_BLEND); glEnable(GL_STENCIL_TEST); g_GL_Draw(*this, x, y); glDisable(GL_STENCIL_TEST); } else g_GL_Draw(*this, x, y); } } //---------------------------------------------------------------------------------- void CGLTexture::Draw(int x, int y, int width, int height, bool checktrans) { WISPFUN_DEBUG("c36_f3"); if (Texture != 0) { if (!width) width = Width; if (!height) height = Height; if (checktrans) { glEnable(GL_BLEND); glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); g_GL_DrawStretched(*this, x, y, width, height); glDisable(GL_BLEND); glEnable(GL_STENCIL_TEST); g_GL_DrawStretched(*this, x, y, width, height); glDisable(GL_STENCIL_TEST); } else g_GL_DrawStretched(*this, x, y, width, height); } } //---------------------------------------------------------------------------------- void CGLTexture::DrawRotated(int x, int y, float angle) { WISPFUN_DEBUG("c36_f4"); if (Texture != 0) g_GL_DrawRotated(*this, x, y, angle); } //---------------------------------------------------------------------------------- void CGLTexture::DrawTransparent(int x, int y, bool stencil) { WISPFUN_DEBUG("c36_f5"); if (Texture != 0) { glEnable(GL_BLEND); glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); glColor4f(1.0f, 1.0f, 1.0f, 0.25f); g_GL_Draw(*this, x, y); glDisable(GL_BLEND); if (stencil) { glEnable(GL_STENCIL_TEST); g_GL_Draw(*this, x, y); glDisable(GL_STENCIL_TEST); } } } //---------------------------------------------------------------------------------- void CGLTexture::Clear() { WISPFUN_DEBUG("c36_f6"); Width = 0; Height = 0; m_HitMap.clear(); if (Texture != 0) { glDeleteTextures(1, &Texture); Texture = 0; } if (VertexBuffer != 0) { glDeleteBuffers(1, &VertexBuffer); VertexBuffer = 0; } if (MirroredVertexBuffer != 0) { glDeleteBuffers(1, &MirroredVertexBuffer); MirroredVertexBuffer = 0; } } //---------------------------------------------------------------------------------- bool CGLTexture::Select(int x, int y, bool pixelCheck) { x = g_MouseManager.Position.X - x; y = g_MouseManager.Position.Y - y; if (x >= 0 && y >= 0 && x < Width && y < Height) { if (!pixelCheck) return true; int pos = (y * Width) + x; if (pos < (int)m_HitMap.size()) return (m_HitMap[pos] != 0); } return false; } //----------------------------------------------------------------------------------
johannes-darms/OLS
ols-apps/ols-config-importer/src/main/java/uk/ac/ebi/spot/ols/YamlConfigParsingException.java
<reponame>johannes-darms/OLS<gh_stars>10-100 package uk.ac.ebi.spot.ols; /** * @author <NAME> * @date 20/08/2015 * Samples, Phenotypes and Ontologies Team, EMBL-EBI */ public class YamlConfigParsingException extends RuntimeException { public YamlConfigParsingException(String message) { super(message); } }
xmf-xmodeler/MosaicFX
src/tool/clients/undo/UndoPlugin.java
<reponame>xmf-xmodeler/MosaicFX package tool.clients.undo; // TODO: Auto-generated Javadoc /** * The Class UndoPlugin. */ public class UndoPlugin { /** The plugin. */ private static UndoPlugin plugin; /** * Instantiates a new undo plugin. */ public UndoPlugin() { plugin = this; } /** * Gets the default. * * @return the default */ public static UndoPlugin getDefault() { return plugin; } /** * Early startup. */ public void earlyStartup() { // UndoClient undoClient = new UndoClient(); // XmfPlugin.xos.newMessageClient("com.ceteva.undo",undoClient); } }
AmrDeveloper/Astro
src/astro/executor/StreamConsumer.java
<gh_stars>10-100 package astro.executor; import java.io.IOException; import java.io.InputStream; public class StreamConsumer extends Thread { private InputStream mInputStream; private IOException mExceptionIO; private StringBuilder mOutputBuilder; public StreamConsumer(InputStream stream) { mInputStream = stream; } public String getCodeResult() { return mOutputBuilder.toString(); } public IOException getCodeException() { return mExceptionIO; } public boolean hasException() { return !(mExceptionIO == null); } @Override public void run() { mOutputBuilder = new StringBuilder(); try { int input; while ((input = mInputStream.read()) != -1) { mOutputBuilder.append((char) input); } } catch (IOException ex) { mExceptionIO = ex; } } }
dogma-io/skeema
src/validators/utils.js
<reponame>dogma-io/skeema /** @flow */ import type {State} from '../types' export function initState(): State { return { errors: [], warnings: [], } } export function mergeState(...states: Array<State>): State { return states.reduce( (accumulator: State, currentValue: State): State => ({ errors: accumulator.errors.concat(currentValue.errors), warnings: accumulator.warnings.concat(currentValue.warnings), }), initState(), ) } export function validateSchema( type?: string, schema: Object, // eslint-disable-line flowtype/no-weak-types path: string, requiredKeys: Array<string>, allowedKeys?: Array<string> = [], ): State { const newState = initState() const allAllowedKeys = [ '$comment', 'description', 'examples', 'title', ].concat(allowedKeys) requiredKeys.forEach((key: string) => { if (!(key in schema)) { newState.errors.push({ message: `required key "${key}" is missing`, path, }) } }) Object.keys(schema).forEach((key: string) => { if (!requiredKeys.includes(key) && !allAllowedKeys.includes(key)) { newState.warnings.push({ message: `unknown key "${key}"`, path, }) } }) if (type !== undefined && schema.type !== type) { newState.errors.push({ message: `type must be string literal "${type}"`, path: `${path}.type`, }) } const {$comment, description, examples, title} = schema if ($comment !== undefined && typeof $comment !== 'string') { newState.errors.push({ message: '$comment must be a string', path: `${path}.$comment`, }) } if (description !== undefined && typeof description !== 'string') { newState.errors.push({ message: 'description must be a string', path: `${path}.description`, }) } if (examples !== undefined && !Array.isArray(examples)) { newState.errors.push({ message: 'examples must be an array', path: `${path}.examples`, }) } if (title !== undefined && typeof title !== 'string') { newState.errors.push({ message: 'title must be a string', path: `${path}.title`, }) } return newState }
ShaiRoitman/clu
clu/Utils/BlockingTaskQueue.cpp
<gh_stars>0 #include "BlockingTaskQueue.h" #include <string> #include <chrono> #include <thread> #include <iostream> #include <deque> #include <mutex> #include <atomic> using namespace std; static int Created = 0; static int Running = 1; static int Stopping = 2; static int Stopped = 3; BlockingTaskQueue::BlockingTaskQueue(int queueSize) { this->queueSize = queueSize; this->state.exchange(Created); } void BlockingTaskQueue::Start(int numberOfThreads) { this->state.exchange(Running); for (int i = 0; i < numberOfThreads ; ++i) { auto newThread = new thread([=]() { this->WorkerThread(); }); this->threads.push_back(newThread); } } void BlockingTaskQueue::WorkerThread() { while (this->state.load() == Running || this->state.load() == Stopping) { Task* task; { std::unique_lock<std::mutex> flk(this->tasksMutex); this->cv.wait(flk, [=]() { return this->tasks.size() > 0 || this->state.load() == Stopping; }); if (this->tasks.size() > 0) { task = *this->tasks.begin(); this->tasks.pop_front(); this->queueFull.notify_all(); } else { task = nullptr; } } if (task != nullptr) { (*task)(); delete task; } else { if (this->state.load() == Stopping) { this->queueFull.notify_all(); break; } } } } bool BlockingTaskQueue::Add(Task* task) { if (this->state.load() == Stopping || this->state.load() == Stopped) return false; std::unique_lock<std::mutex> flk(this->fullQueueMutex); this->queueFull.wait(flk, [=]() { return this->tasks.size() < this->queueSize; }); { std::lock_guard<std::mutex> lk(this->tasksMutex); this->tasks.push_back(task); } this->cv.notify_all(); return true; } void BlockingTaskQueue::Drain() { this->state.compare_exchange_strong(Running, Stopping); { std::unique_lock<std::mutex> flk(this->tasksMutex); this->queueFull.wait(flk, [=]() { return this->tasks.size() == 0; }); } this->cv.notify_all(); for (vector<thread*>::iterator iter = this->threads.begin(); iter != this->threads.end(); ++iter) { auto t = (*iter); t->join(); } this->state.exchange(Stopped); }