repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
Nageek15/jcodec
src/main/proj/src/org/jcodec/codecs/vpx/vp8/enums/MBLvlFeatures.java
<reponame>Nageek15/jcodec package org.jcodec.codecs.vpx.vp8.enums; /** * This class is part of JCodec ( www.jcodec.org ) This software is distributed * under FreeBSD License. * * The class is a direct java port of libvpx's * (https://github.com/webmproject/libvpx) relevant VP8 code with significant * java oriented refactoring. * * @author The JCodec project * */ public enum MBLvlFeatures { ALT_Q, /* Use alternate Quantizer .... */ ALT_LF; /* Use alternate loop filter value... */ public static final int featureCount = MBLvlFeatures.values().length; }
siy/reactive-toolbox
async-io/src/test/java/org/reactivetoolbox/io/examples/ddg/RequestHandler_Test.java
<filename>async-io/src/test/java/org/reactivetoolbox/io/examples/ddg/RequestHandler_Test.java<gh_stars>1-10 /* * Copyright (c) 2020 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.reactivetoolbox.io.examples.ddg; public class RequestHandler_Test { // // public ResponseObject handler1(final Parameters requestParameters) { // final Dependency1 dependency1 = dependencyService1.retrieveDependency(requestParameters); // final Dependency2 dependency2 = dependencyService2.retrieveDependency(requestParameters); // // return buildResponse(dependency1, dependency2); // } // // public ResponseObject handler2(final Parameters requestParameters) { // Dependency1 dependency = dependencyService1.retrieveDependency(requestParameters); // // if (dependency == null) { // dependency = dependencyService2.retrieveDependency(requestParameters); // } // // return buildResponse(dependency); // } // // public ResponseObject handler3(final Parameters requestParameters) { // final Dependency1 dependency1 = dependencyService1.retrieveDependency(requestParameters); // Dependency2 dependency2 = dependencyService2.retrieveDependency(requestParameters); // // if (dependency2 == null) { // dependency2 = dependencyService3.retrieveDependency(requestParameters); // } // // return buildResponse(dependency1, dependency2); // } // }
xmuriqui/muriqui
codopt/iquad.cpp
#include "OPT_solvers.hpp" #include "OPT_tools.hpp" #if OPT_HAVE_IQUAD #include "iquad.hpp" using namespace iquad; #endif using namespace optsolvers; OPT_Iquad::OPT_Iquad() { initialize(); } OPT_Iquad::~OPT_Iquad() { deallocateSolverEnv(); } void OPT_Iquad::deallocateSolverEnv() { #if OPT_HAVE_IQUAD OPT_secDelete(bb); #endif OPT_MyNLPSolver::deallocateSolverEnv(); } bool OPT_Iquad::getMinusLambdaOnLagran() { return false; } OPT_LISTSOLVERS OPT_Iquad::getSolverCode() { return optsolvers::OPT_IQUAD; } void OPT_Iquad::initialize() { OPT_MyNLPSolver::initialize(); #if OPT_HAVE_IQUAD bb = NULL; #endif } int OPT_Iquad::getNumberOfIterations(long unsigned int& niter) #if OPT_HAVE_IQUAD { niter = bb->out_number_of_iterations; return 0; } #else { OPT_LIBNOTAVAILABLERET(getSolverCode()); } #endif int OPT_Iquad::initSolverEnv(const int maxConstrs, const int maxVars, const int maxQuadNz) #if OPT_HAVE_IQUAD { /*const int r = OPT_MyNLPSolver::initSolverEnv(maxConstrs, maxVars, maxQuadNz); if( r != 0 ) { #if OPT_DEBUG_MODE OPT_PRINTERRORNUMBER(r); #endif return r; } */ __desallocateSolverEnv(); bb = new (std::nothrow) IQD_BranchAndBound; if( !bb ) { #if OPT_DEBUG_MODE OPT_PRINTMEMERROR; #endif return OPT_MEMORY_ERROR; } return 0; } #else { OPT_LIBNOTAVAILABLERET(getSolverCode()); } #endif int OPT_Iquad::setObjCutLowerBound(const double objLBound) #if OPT_HAVE_IQUAD { bb->in_lower_bound = objLBound; return 0; } #else { OPT_LIBNOTAVAILABLERET(getSolverCode()); } #endif int OPT_Iquad::setObjCutUpperBound(const double objUBound) #if OPT_HAVE_IQUAD { bb->in_upper_bound = objUBound; return 0; } #else { OPT_LIBNOTAVAILABLERET(getSolverCode()); } #endif int OPT_Iquad::setMaxCPUTime(const double time) #if OPT_HAVE_IQUAD { bb->in_max_cpu_time = time; return 0; } #else { OPT_LIBNOTAVAILABLERET(getSolverCode()); } #endif int OPT_Iquad::setMaxTime(const double time) #if OPT_HAVE_IQUAD { bb->in_max_time = time; return 0; } #else { OPT_LIBNOTAVAILABLERET(getSolverCode()); } #endif int OPT_Iquad::setNumberOfThreads(const int nthreads) #if OPT_HAVE_IQUAD { bb->in_number_of_threads = nthreads; return 0; } #else { OPT_LIBNOTAVAILABLERET(getSolverCode()); } #endif int OPT_Iquad::setOutputLevel( const int level ) #if OPT_HAVE_IQUAD { bb->in_print_level = level; return 0; } #else { OPT_LIBNOTAVAILABLERET(getSolverCode()); } #endif int OPT_Iquad::setRelativeDualTol( const double tol ) #if OPT_HAVE_IQUAD { return OPT_OPERATION_NOT_SUPPORTED; } #else { OPT_LIBNOTAVAILABLERET(getSolverCode()); } #endif int OPT_Iquad::setRelativeOptimalityTol( const double tol ) #if OPT_HAVE_IQUAD { bb->in_relative_convergence_tol = tol; return 0; } #else { OPT_LIBNOTAVAILABLERET(getSolverCode()); } #endif int OPT_Iquad::setRelativePrimalTol( const double tol ) #if OPT_HAVE_IQUAD { return OPT_OPERATION_NOT_SUPPORTED; } #else { OPT_LIBNOTAVAILABLERET(getSolverCode()); } #endif int OPT_Iquad::setDoubleParameter(const char *param, const double value) #if OPT_HAVE_IQUAD { const int r = bb->setDoubleParameter(param, value); if( r != 0 ) { printDblParamErrorMsg(r, param, value ); return OPT_BAD_INPUT; } return 0; } #else { OPT_LIBNOTAVAILABLERET(getSolverCode()); } #endif int OPT_Iquad::setIntegerParameter(const char *param, const int value ) #if OPT_HAVE_IQUAD { const int r = bb->setIntegerParameter(param, value); if( r != 0 ) { printIntParamErrorMsg(r, param, value ); return OPT_BAD_INPUT; } return 0; } #else { OPT_LIBNOTAVAILABLERET(getSolverCode()); } #endif int OPT_Iquad::setStringParameter(const char *param, const char *value) #if OPT_HAVE_IQUAD { const int r = bb->setStringParameter(param, value); if( r != 0 ) { printStrParamErrorMsg(r, param, value ); return OPT_BAD_INPUT; } return 0; } #else { OPT_LIBNOTAVAILABLERET(getSolverCode()); } #endif int OPT_Iquad::solve(const bool resetSol, const bool storeSol, const bool storeConstrs, const bool storeDualSol) #if OPT_HAVE_IQUAD { return solveWParams(resetSol, storeSol, storeConstrs, storeDualSol, NULL, NULL); } #else { OPT_LIBNOTAVAILABLERET(getSolverCode()); } #endif int OPT_Iquad::solveWParams(const bool resetSol, const bool storeSol, const bool storeConstrs, const bool storeDualSol, OPT_GeneralSolverParams *subSolverParams, OPT_GeneralSolverParams *sdpParams) #if OPT_HAVE_IQUAD { const int n = prob.n; if( resetSol ) this->resetSol(); else { retCode = OPT_UNDEFINED; origSolverRetCode = INT_MAX; feasSol = false; } origSolverRetCode = bb->run( prob, subSolverParams, sdpParams ); switch( origSolverRetCode ) { case IQD_OPTIMAL_SOLUTION: retCode = OPT_OPTIMAL_SOLUTION; break; case IQD_INFEASIBLE_PROBLEM: retCode = OPT_INFEASIBLE_PROBLEM; break; case IQD_MAX_TIME_STOP: retCode = OPT_MAX_TIME; break; case IQD_MAX_ITERATIONS_STOP: retCode = OPT_MAX_ITERATIONS; #if OPT_PRINT_MAX_ITER_WARNING if( numberOfWarningsByIterLimit < maxNumberOfWarningsByIterLimit ) { std::cerr << OPT_PREPRINT "Warning: Maximum iteration achieved on Iquad solving!\n"; numberOfWarningsByIterLimit++; if( numberOfWarningsByIterLimit == maxNumberOfWarningsByIterLimit ) std::cerr << OPT_PREPRINT "Warning: Maximum number of warnings by maximum iteration achieved! Stopping these warnings.\n"; } #endif break; case IQD_UNBOUNDED_PROBLEM: retCode = OPT_UNBOUNDED_PROBLEM; break; case IQD_MEMORY_ERROR: retCode = OPT_MEMORY_ERROR; break; case IQD_BAD_DEFINITIONS: retCode = OPT_BAD_INPUT; break; case IQD_CALLBACK_FUNCTION_ERROR: retCode = OPT_CALLBACK_FUNCTION_ERROR; break; case IQD_LIBRARY_NOT_AVAILABLE: retCode = OPT_LIBRARY_NOT_AVAILABLE; break; case IQD_SDP_SOLVING_ERROR: case IQD_QCP_SOLVER_ERROR: case IQD_NLP_SOLVER_ERROR: retCode = OPT_SUBSOLVER_ERROR; break; default: retCode = OPT_UNDEFINED_ERROR; } feasSol = bb->out_feasible_sol; objValue = bb->out_best_obj; dualObjValue = bb->out_lower_bound; if(storeSol) OPT_copyArray(n, bb->out_best_sol, sol); if( storeConstrs && feasSol ) { if( nmChg ) { OPT_setAllArray(prob.m, auxCEval, true); nmChg = false; } prob.constraintsEval( threadNumber, true, auxCEval, bb->out_best_sol, constr ); } if( prob.objFactor < 0 ) { objValue = -objValue; dualObjValue = -dualObjValue; } return retCode; } #else { OPT_LIBNOTAVAILABLERET(getSolverCode()); } #endif
dmgerman/gerrit
java/com/google/gerrit/extensions/api/GerritApi.java
begin_unit|revision:0.9.5;language:Java;cregit-version:0.0.1 begin_comment comment|// Copyright (C) 2013 The Android Open Source Project end_comment begin_comment comment|// end_comment begin_comment comment|// Licensed under the Apache License, Version 2.0 (the "License"); end_comment begin_comment comment|// you may not use this file except in compliance with the License. end_comment begin_comment comment|// You may obtain a copy of the License at end_comment begin_comment comment|// end_comment begin_comment comment|// http://www.apache.org/licenses/LICENSE-2.0 end_comment begin_comment comment|// end_comment begin_comment comment|// Unless required by applicable law or agreed to in writing, software end_comment begin_comment comment|// distributed under the License is distributed on an "AS IS" BASIS, end_comment begin_comment comment|// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. end_comment begin_comment comment|// See the License for the specific language governing permissions and end_comment begin_comment comment|// limitations under the License. end_comment begin_package DECL|package|com.google.gerrit.extensions.api package|package name|com operator|. name|google operator|. name|gerrit operator|. name|extensions operator|. name|api package|; end_package begin_import import|import name|com operator|. name|google operator|. name|gerrit operator|. name|extensions operator|. name|api operator|. name|accounts operator|. name|Accounts import|; end_import begin_import import|import name|com operator|. name|google operator|. name|gerrit operator|. name|extensions operator|. name|api operator|. name|changes operator|. name|Changes import|; end_import begin_import import|import name|com operator|. name|google operator|. name|gerrit operator|. name|extensions operator|. name|api operator|. name|config operator|. name|Config import|; end_import begin_import import|import name|com operator|. name|google operator|. name|gerrit operator|. name|extensions operator|. name|api operator|. name|groups operator|. name|Groups import|; end_import begin_import import|import name|com operator|. name|google operator|. name|gerrit operator|. name|extensions operator|. name|api operator|. name|plugins operator|. name|Plugins import|; end_import begin_import import|import name|com operator|. name|google operator|. name|gerrit operator|. name|extensions operator|. name|api operator|. name|projects operator|. name|Projects import|; end_import begin_import import|import name|com operator|. name|google operator|. name|gerrit operator|. name|extensions operator|. name|restapi operator|. name|NotImplementedException import|; end_import begin_interface DECL|interface|GerritApi specifier|public interface|interface name|GerritApi block|{ DECL|method|accounts () name|Accounts name|accounts parameter_list|() function_decl|; DECL|method|changes () name|Changes name|changes parameter_list|() function_decl|; DECL|method|config () name|Config name|config parameter_list|() function_decl|; DECL|method|groups () name|Groups name|groups parameter_list|() function_decl|; DECL|method|projects () name|Projects name|projects parameter_list|() function_decl|; DECL|method|plugins () name|Plugins name|plugins parameter_list|() function_decl|; comment|/** * A default implementation which allows source compatibility when adding new methods to the * interface. */ DECL|class|NotImplemented class|class name|NotImplemented implements|implements name|GerritApi block|{ annotation|@ name|Override DECL|method|accounts () specifier|public name|Accounts name|accounts parameter_list|() block|{ throw|throw operator|new name|NotImplementedException argument_list|() throw|; block|} annotation|@ name|Override DECL|method|changes () specifier|public name|Changes name|changes parameter_list|() block|{ throw|throw operator|new name|NotImplementedException argument_list|() throw|; block|} annotation|@ name|Override DECL|method|config () specifier|public name|Config name|config parameter_list|() block|{ throw|throw operator|new name|NotImplementedException argument_list|() throw|; block|} annotation|@ name|Override DECL|method|groups () specifier|public name|Groups name|groups parameter_list|() block|{ throw|throw operator|new name|NotImplementedException argument_list|() throw|; block|} annotation|@ name|Override DECL|method|projects () specifier|public name|Projects name|projects parameter_list|() block|{ throw|throw operator|new name|NotImplementedException argument_list|() throw|; block|} annotation|@ name|Override DECL|method|plugins () specifier|public name|Plugins name|plugins parameter_list|() block|{ throw|throw operator|new name|NotImplementedException argument_list|() throw|; block|} block|} block|} end_interface end_unit
xiao-akatsuki/xiaoTools
src/main/java/com/xiaoTools/core/io/lineSeparator/LineSeparator.java
<gh_stars>1-10 package com.xiaoTools.core.io.lineSeparator; /** * [换行符枚举](Linefeed enumeration) * @description zh - 换行符枚举 * @description en - Linefeed enumeration * @version V1.0 * @author XiaoXunYao * @since 2021-10-29 22:04:50 */ public enum LineSeparator { /** Mac系统换行符:"\r" */ MAC("\r"), /** Linux系统换行符:"\n" */ LINUX("\n"), /** Windows系统换行符:"\r\n" */ WINDOWS("\r\n"); private final String value; LineSeparator(String lineSeparator) { this.value = lineSeparator; } public String getValue() { return this.value; } }
finnjk/YADA
yada-api/src/main/java/com/novartis/opensource/yada/security/Gatekeeper.java
/** * Copyright 2016 Novartis Institutes for BioMedical Research Inc. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Copyright 2016 Novartis Institutes for BioMedical Research Inc. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.novartis.opensource.yada.security; import java.io.StringReader; import java.lang.reflect.InvocationTargetException; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import com.auth0.jwt.JWT; import com.auth0.jwt.algorithms.Algorithm; import com.auth0.jwt.exceptions.JWTVerificationException; import com.novartis.opensource.yada.ConnectionFactory; import com.novartis.opensource.yada.Finder; import com.novartis.opensource.yada.JSONParams; import com.novartis.opensource.yada.JSONParamsEntry; import com.novartis.opensource.yada.QueryManager; import com.novartis.opensource.yada.Service; import com.novartis.opensource.yada.YADAConnectionException; import com.novartis.opensource.yada.YADAException; import com.novartis.opensource.yada.YADAFinderException; import com.novartis.opensource.yada.YADAQuery; import com.novartis.opensource.yada.YADAQueryConfigurationException; import com.novartis.opensource.yada.YADARequest; import com.novartis.opensource.yada.YADARequestException; import com.novartis.opensource.yada.YADASQLException; import com.novartis.opensource.yada.plugin.AbstractPreprocessor; import com.novartis.opensource.yada.plugin.YADAPluginException; import com.novartis.opensource.yada.util.YADAUtils; import net.sf.jsqlparser.JSQLParserException; import net.sf.jsqlparser.expression.Expression; import net.sf.jsqlparser.expression.operators.conditional.AndExpression; import net.sf.jsqlparser.parser.CCJSqlParserManager; import net.sf.jsqlparser.parser.CCJSqlParserUtil; import net.sf.jsqlparser.statement.select.PlainSelect; import net.sf.jsqlparser.statement.select.Select; /** * A Preprocess plugin to evaluate user authorization for query execution. * * @author <NAME> * @since 7.0.0 * */ @SecurityPreprocessor public class Gatekeeper extends AbstractPreprocessor { /** * Local logger handle */ private static final Logger LOG = LoggerFactory.getLogger(Gatekeeper.class); /** * Constant equal to {@value} */ protected static final String DEFAULT_AUTH_TOKEN_PROPERTY = "security.token"; /** * Constant equal to {@value} */ protected static final String EXECUTION_POLICY_COLUMNS = "execution.policy.columns"; /** * Constant equal to {@value} */ protected static final String EXECUTION_POLICY_INDICES = "execution.policy.indices"; /** * Constant equal to {@value} */ protected static final String EXECUTION_POLICY_INDEXES = "execution.policy.indexes"; /** * Constant equal to {@value} */ protected static final String CONTENT_POLICY_PREDICATE = "content.policy.predicate"; /** * Constant equal to {@value} * @since 9.0.0 */ protected static final String COLUMNS = "columns"; /** * Constant equal to {@value} * @since 9.0.0 */ protected static final String INDICES = "indices"; /** * Constant equal to {@value} * @since 9.0.0 */ protected static final String INDEXES = "indexes"; /** * Constant equal to {@value} * @since 9.0.0 */ protected static final String PREDICATE = "predicate"; /** * Constant equal to {@value} * * @since 8.1.0 * @deprecated since 9.0.0 moved */ @Deprecated protected static final String RX_COL_INJECTION = "(([a-zA-Z0-9_]+):)?(get[A-Z][a-zA-Z0-9_]+\\([A-Za-z0-9_]*\\))"; /** * Constant equal to {@value} * * @since 8.1.0 * @deprecated since 9.0.0 moved */ @Deprecated protected static final String RX_IDX_INJECTION = "(([0-9]+):)?(get[A-Z][a-zA-Z0-9_]+\\([A-Za-z0-9_]*\\))"; // -------------------------------------------------------------------------------- // TODO: Change these to system properties // -------------------------------------------------------------------------------- /** * Constant with value: {@value} SourceExchanger plugin reference * * @since 1.0 */ private final static String SOURCE_EXCHANGER = "SourceExchanger"; // -------------------------------------------------------------------------------- /** * Contains the list of allow qualifiers from A11N */ private ArrayList<String> allowList = new ArrayList<String>(); /** * Contains the list of deny qualifiers from A11N */ private ArrayList<String> denyList = new ArrayList<String>(); /** * Contains the user identity data from authority */ private Object identity = new Object(); /** * Contains the synchronization token */ private String syncToken = new String(); /** * Contains the conditions specified in A11N. */ private JSONObject locks = new JSONObject(); /** * Contains the user grant from authority */ private Object grant = new Object(); /** * Contains the list of groups for Content Policy */ private ArrayList<String> ships = new ArrayList<String>(); /** * Validates the request host, user, security params, and security query * execution results * * @throws YADAPluginException when plugin processing fails * @throws YADASecurityException when the user is unauthorized or there is an * error in policy processing * @see com.novartis.opensource.yada.plugin.AbstractPreprocessor#engage(com.novartis.opensource.yada.YADARequest, * com.novartis.opensource.yada.YADAQuery) */ @Override public void engage(YADARequest yadaReq, YADAQuery yq) throws YADAPluginException, YADASecurityException { super.engage(yadaReq, yq); // Make header available try { this.setHTTPHeaders(YADA_HDR_AUTH_NAMES); setSyncToken(obtainSyncToken(yadaReq)); } catch (YADARequestException e) { // Gatekeeper prevents access on header setting or syncToken // obtaining/setting errors throw new YADASecurityException("Unauthorized."); } validateYADARequest(); } /** * Checking header then cookie for token to set * * @throws YADASecurityException when token cannot be successfully obtained */ @Override public void obtainToken(YADARequest yadaReq) throws YADASecurityException { // Check header for token Pattern rxAuthTkn = Pattern.compile(RX_HDR_AUTH_TKN_PREFIX); if (this.hasHttpHeaders()) { for (int i = 0; i < this.getHttpHeaders().names().length(); i++) { Matcher m1 = rxAuthTkn .matcher((CharSequence) this.getHttpHeaders().get(this.getHttpHeaders().names().getString(i))); if (m1.matches() && m1.groupCount() == 3) {// valid header this.setToken(m1.group(3)); } } } else { // Check cookie for token this.setToken(getCookie(YADA_CK_TKN)); } if (!hasToken()) { // Always require a token for Gatekeeper access throw new YADASecurityException("Unauthorized."); } } /** * Checking header then cookie for token to set * * @param yadaReq the {@link YADARequest} containing the token header to process * @return the value of the {@link Authorization#YADA_HDR_SYNC_TKN} header */ public String obtainSyncToken(YADARequest yadaReq) { // Check header for sync token String result = new String(); if (this.hasHttpHeaders()) { JSONObject object = this.getHttpHeaders(); JSONArray keys = object.names(); for (int i = 0; i < keys.length(); ++i) { if (keys.getString(i).equalsIgnoreCase(YADA_HDR_SYNC_TKN)) { result = object.getString(keys.getString(i)); } } } return result; } /** * Overrides {@link TokenValidator#validateToken()}. * * @throws YADASecurityException when the {@link #DEFAULT_AUTH_TOKEN_PROPERTY} * is not set */ @Override public void validateToken() throws YADASecurityException { // validate token as well-formed try { JWT.require(Algorithm.HMAC512(System.getProperty(JWSKEY))).withIssuer(System.getProperty(JWTISS)).build() .verify((String) this.getToken()); } catch (JWTVerificationException | IllegalArgumentException exception) { // UTF-8 encoding not supported String msg = "Validation Error "; throw new YADASecurityException(msg, exception); } } /** * * @return the identity object from the identity cache * @since 8.7.6 */ public Object obtainIdentity() { Object result = getCacheEntry((String) this.getToken()); return result; } /** * Obtain specified GRANT(KEYS) from current identity * * @param app the YADA app for which to obtain grants * @return a {@link JSONArray} as an {@link Object} containing the {@link Authorization#YADA_IDENTITY_KEYS} * @throws YADASecurityException when the user's identity is malformed, i.e., invalid json * @since 8.7.6 */ public Object obtainGrant(String app) throws YADASecurityException { JSONObject jo = null; try { jo = new JSONObject((String) getIdentity()); } catch (JSONException e) { String msg = "Identity is malformed."; throw new YADASecurityException(msg, e); } JSONArray ja = jo.getJSONArray(YADA_IDENTITY_GRANTS); // find the app JSONArray keys = new JSONArray(); for (int i = 0; i < ja.length(); i++) { if (app.equals(ja.getJSONObject(i).getString(YADA_IDENTITY_APP).toString())) { for (int ii = 0; ii < ja.getJSONObject(i).getJSONArray(YADA_IDENTITY_KEYS).length(); ii++) { keys.put(ja.getJSONObject(i).getJSONArray(YADA_IDENTITY_KEYS).getJSONObject(ii).getString(YADA_IDENTITY_KEY)); } } } return keys; } /** * Authorization of query use for given context * {@link Authorization#authorize()} * * @since 8.7.6 */ @Override public void authorize() throws YADASecurityException { boolean authorized = false; boolean blacklist = false; // Check authority for identity Object ident = obtainIdentity(); setIdentity(ident); // Check a11n table for locks try { setLocks(obtainLocks()); } catch (YADASecurityException e) { String msg = "Unauthorized. Unable to set query locks."; throw new YADASecurityException(msg, e); } //TODO there may need to be an array of "locks" i.e., qualifier:type pairs in the // authorization policy spec. This would mimic multiple rows in the a11n table. if (hasLocks()) { JSONArray key = getLocks().names(); for (int i = 0; i < key.length(); ++i) { String grant = key.getString(i); String listtype = getLocks().getString(grant); if (listtype.equals(AUTH_TYPE_WHITELIST)) { // Add whitelist locks to allowList addAllowListEntry(grant); } else { // Remove blacklist locks and require key by setting blacklist to true removeAllowListEntry(grant); blacklist = true; } } } if (hasToken() && hasSyncToken()) { // Check header.syncToken == identity.syncToken // https://en.wikipedia.org/wiki/Cross-site_request_forgery#Synchronizer_token_pattern JSONObject jo = new JSONObject((String) getIdentity()); if (jo.getString(YADA_HDR_SYNC_TKN).equals(getSyncToken())) { // What app (context) are we interested in? String app = ""; YADARequest ryq = this.getYADARequest(); if (ryq.getPluginConfig().containsKey(SOURCE_EXCHANGER)) { // connection source app - from SourceExchanger argument app = ryq.getPluginConfig().get(SOURCE_EXCHANGER).get(0); } else { // connection source app - from query YADAQuery ayq = this.getYADAQuery(); app = ayq.getApp(); } try { // Obtain a relevant GRANT if it exists within IDENTITY setGrant(obtainGrant(app)); } catch (YADASecurityException e) { String msg = "User is not authorized"; throw new YADASecurityException(msg); } // Is there a GRANT for this APP or is there a blacklist entry requiring // a valid lock? if (hasGrants() || blacklist == true) { if (getAllowList().size() > 0) { // Do we have a GRANT containing a KEY fitting the LOCK (specified // in a11n table 'A' row) protecting this query? for (int i = 0; i < ((JSONArray) getGrant()).length(); i++) { if (getAllowList().contains(((JSONArray) getGrant()).get(i).toString())) { authorized = true; } } } else { // GRANT exists for this APP and no LOCK specified authorized = true; } } } } // Check for failed authorization and throw error if (!authorized) { String msg = "Authorization Error."; throw new YADASecurityException(msg); } } /** * * @return yadaauth {Role: [whitelist/blacklist]} * @throws YADASecurityException when a database-backed yada index request cannot be processed * @since 8.7.6 */ public JSONObject obtainLocks() throws YADASecurityException { JSONObject result = new JSONObject(); String type; String qualifier; List<?> qualifierList; YADASecuritySpec spec = this.getSecuritySpec(); if(Finder.hasYADALib()) { if(null != spec && spec.hasAuthorizationPolicy()) { Map<String, Object> policy = spec != null ? this.getSecuritySpec().getAuthorizationPolicy() : null; type = (String) policy.get(YADASecuritySpec.KEY_TYPE); qualifierList = (List<?>) policy.get(YADASecuritySpec.KEY_QUALIFIER); for(Object q : qualifierList) { result.put((String) q, type); } } } else { // get the security params associated to the query // TODO: Replace prepared statement with YADA query String qname = getYADAQuery().getQname(); try (ResultSet rs = YADAUtils.executePreparedStatement(YADA_A11N_QUERY, new Object[] { qname });) { while (rs.next()) { // YADA_A11N.POLICY == "A" if ("A".equals(rs.getString(2))) { type = rs.getString(3); // YADA_A11N.TYPE qualifier = rs.getString(4); // YADA_A11N.QNAME (a role name) result.put(qualifier, type); } } ConnectionFactory.releaseResources(rs); } catch (SQLException | YADAConnectionException | YADASQLException e) { String msg = "There was a problem executing the authorization query."; throw new YADASecurityException(msg, e); } } return result; } /** * Returns {@code true} if {@link #WHITELIST} or {@link #BLACKLIST} is stored in * the {@code YSEC_PARAMS} table corresponding to the security target * * @param policy the value of the {@code YSEC_PARAM_NAME} field in the * {@code YSEC_PARAMS} table * @return {@code true} if {@link #WHITELIST} or {@link #BLACKLIST} is set */ protected boolean hasValidPolicy(String policy) { return isWhitelist(policy) || isBlacklist(policy); } /** * Retrieves and processes the security query, and validates the results per the * security specification s * * * @throws YADASecurityException when there is an issue retrieving or processing * the security query */ @SuppressWarnings("unchecked") @Override public void applyExecutionPolicy() throws YADASecurityException { // TODO the security query executes for every iteration of the qname // in the current request. a flag needs to be set somewhere to indicate // clearance has already been granted. This can't be in YADAQuery because of // caching. // TODO needs to support app targets as well as qname targets // TODO tests for auth failure, i.e., unauthorized // TODO tests for ignoring attempted plugin overrides // TODO make it impossible to execute a protector query as a primary query // without a server-side flag set, or // perhaps some authorization (i.e., for testing, maybe with a content // policy) // This will close an attack vector. // TODO support dependency injection for other methods in addition to token // for execution policy List<SecurityPolicyRecord> spec = null; List<SecurityPolicyRecord> prunedSpec = new ArrayList<>(); // process security spec // query can be standard or json // if json, need name of column to map to token // if standard, need list of relevant indices String policyColumns = null; String policyIndices = null; if(this.getSecuritySpec() != null) { // this is a crap way to do it--store as list, convert to space-del string, then split later, but // it's the quickest route to valhalla if(this.getSecuritySpec().hasExecutionPolicy()) { List<String> polcols = (List<String>) this.getSecuritySpec().getExecutionPolicy().get(COLUMNS); List<String> polind = (List<String>) this.getSecuritySpec().getExecutionPolicy().get(INDICES); if(null != polcols && polcols.size() > 0) policyColumns = String.join(" ", polcols).replaceAll("[\\[\\]\"]",""); if(null != polind && polind.size() > 0) policyIndices = String.join(" ", polind).replaceAll("[\\[\\]\"]",""); if(null == policyIndices) { polind = (List<String>) this.getSecuritySpec().getExecutionPolicy().get(INDEXES); if(null != polind && polind.size() > 0) policyIndices = String.join(" ", polind).replaceAll("[\\[\\]\"]",""); } Map<String,Object> execPol = this.getSecuritySpec().getExecutionPolicy(); spec = new ArrayList<SecurityPolicyRecord>(); String qname = getYADAQuery().getQname(); String type = (String) execPol.get(YADASecuritySpec.KEY_TYPE); String protector = (String) execPol.get(YADASecuritySpec.KEY_PROTECTOR); spec.add(new SecurityPolicyRecord(qname,EXECUTION_POLICY_CODE,type,protector)); } } else { policyColumns = getArgumentValue(EXECUTION_POLICY_COLUMNS); policyIndices = getArgumentValue(EXECUTION_POLICY_INDICES); if(policyIndices == null) policyIndices = getArgumentValue(EXECUTION_POLICY_INDEXES); spec = getSecurityPolicyRecords(EXECUTION_POLICY_CODE); } String polColParams_rx = YADASecuritySpec.RX_IDX; String polColJSONParams_rx = YADASecuritySpec.RX_COL; String result = ""; int index = -1; String injectedIndex = ""; boolean policyHasParams = false; boolean policyHasJSONParams = false; boolean reqHasParams = getYADARequest().getParams() == null || getYADARequest().getParams().length == 0 ? false : true; boolean reqHasJSONParams = YADAUtils.hasJSONParams(getYADARequest()); for (SecurityPolicyRecord secRec: spec) { // Are params required for security query? if (policyIndices != null && policyIndices.split(" ")[0].matches(polColParams_rx)) { policyHasParams = true; } if (policyColumns != null && policyColumns.split(" ")[0].matches(polColJSONParams_rx)) { policyHasJSONParams = true; } // request and policy must have syntax compatibility, i.e., matching param // syntax, or no params if ((policyHasParams && !reqHasJSONParams) || (policyHasJSONParams && !reqHasParams) || (!policyHasParams && reqHasJSONParams) || (!policyHasJSONParams && reqHasParams) || !(policyHasParams || reqHasParams || policyHasJSONParams || reqHasJSONParams)) { // confirm sec spec is config properly if (hasValidPolicy(secRec.getType())) // whitelist or blacklist { if(!Finder.hasYADALib()) { // confirm sec spec is mapped to requested query try { new Finder().getQuery(secRec.getA11nQname()); } catch (YADAFinderException e) { String msg = "Unauthorized. Authorization qname not found."; throw new YADASecurityException(msg); } catch (YADAConnectionException | YADAQueryConfigurationException e) { String msg = "Unauthorized. Unable to check for security query. This could be a temporary issue."; throw new YADASecurityException(msg, e); } } // security query exists } else { String msg = "Unauthorized, due to policy misconfiguration. Must be \"blacklist\" or \"whitelist.\""; throw new YADASecurityException(msg); } prunedSpec.add(secRec); } } // kill the query if there aren't any compatible specs if (prunedSpec.size() == 0) { String msg = "Unauthorized. Request parameter syntax is incompatible with policy."; throw new YADASecurityException(msg); } // process the relevant specs for (SecurityPolicyRecord secRec: prunedSpec) // policy code (E,C), policy // type (white,black), target // (qname), // A11nqname { String a11nQname = secRec.getA11nQname(); String policyType = secRec.getType(); // policy has params and req has compatible params if (policyHasParams && !reqHasJSONParams) { // split on space to process each polCol separately String[] polCols = policyIndices.split("\\s"); StringBuilder polVals = new StringBuilder(); if (reqHasParams) { for (int i = 0; i < polCols.length; i++) { // handle as params // 1. get params from query List<String> vals = getYADAQuery().getVals(0); try { index = Integer.parseInt(polCols[i]); } catch (NumberFormatException e) { injectedIndex = polCols[i]; } // 2. pass user column if (polVals.length() > 0) polVals.append(","); if (injectedIndex.equals("") && index > -1) { if (index >= vals.size()) // insert token by default if current polCol index exceeds // length of request's param list for this query polVals.append((String) getToken()); else polVals.append(vals.get(index)); } else { Pattern rxInjection = Pattern.compile(YADASecuritySpec.RX_IDX_INJECTION); Matcher m1 = rxInjection.matcher(injectedIndex); if (m1.matches() && m1.groupCount() == 3) // injection { // parse regex: this is where the method value is injected // String colIdx = m1.group(2); String colval = m1.group(2); String arg = m1.group(3); // find and execute injected method String method = colval.substring(0, colval.indexOf('(')); // String arg = colval.substring(colval.indexOf('(') + 1, colval.indexOf(')')); Object val = null; try { if (arg.equals("")) val = getClass().getMethod(method).invoke(this, new Object[] {}); else val = getClass().getMethod(method, new Class[] { java.lang.String.class }).invoke(this, new Object[] { arg }); } catch (NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) { String msg = "Unathorized. Injected method invocation failed."; throw new YADASecurityException(msg, e); } // add/replace item in dataRow polVals.append(val); } } index = -1; injectedIndex = ""; } // 3. execute the security query try { result = YADAUtils.executeYADAGet(new String[] { a11nQname }, new String[] { polVals.toString() }); } catch (YADAException e) { // TODO Auto-generated catch block e.printStackTrace(); } } else { for (int i = 0; i < polCols.length; i++) { injectedIndex = polCols[i]; if(polVals.length() > 0) { polVals.append(","); } // insert token by default since here, polCol index exceeds // zero-length of request's param list for this query // polVals.append((String) getToken()); Pattern rxInjection = Pattern.compile(YADASecuritySpec.RX_IDX_INJECTION); Matcher m1 = rxInjection.matcher(injectedIndex); if (m1.matches() && m1.groupCount() == 3) // injection { // parse regex: this is where the method value is injected // String colIdx = m1.group(2); String colval = m1.group(2); String arg = m1.group(3); // find and execute injected method String method = colval.substring(0, colval.indexOf('(')); // String arg = colval.substring(colval.indexOf('(') + 1, colval.indexOf(')')); Object val = null; try { if (arg.equals("")) val = getClass().getMethod(method).invoke(this, new Object[] {}); else val = getClass().getMethod(method, new Class[] { java.lang.String.class }).invoke(this, new Object[] { arg }); } catch (NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) { String msg = "Unathorized. Injected method invocation failed."; throw new YADASecurityException(msg, e); } // add/replace item in dataRow polVals.append(val); } else { // default to token, although this is probably never going to be called // and will fail if a real auth token is in use polVals.append((String) getToken()); } } try { result = YADAUtils.executeYADAGet(new String[] { a11nQname }, new String[] { polVals.toString() }); } catch (YADAException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } // policy has JSONParams and req has compatible JSONParams else if (policyHasJSONParams && reqHasJSONParams) { LOG.debug("Could not parse protector column value into integer, assuming it's a String"); // handle as JSONParams // 1. get JSONParams from query (params) LinkedHashMap<String, String[]> dataRow = getYADAQuery().getDataRow(0); // 2. add user column if necessary String[] polCols = policyColumns.split("\\s"); for (String colspec: polCols) { // dataRow can look like, e.g.: {COL1:val1,COL2:val2} // polCols can look like, e.g.: COL2 APP:getValue(TARGET) Pattern rxInjection = Pattern.compile(YADASecuritySpec.RX_COL_INJECTION); Matcher m1 = rxInjection.matcher(colspec); if (m1.matches() && m1.groupCount() == 3) // injection { // parse regex: this is where the method value is injected String colname = m1.group(1); String colval = m1.group(2); String arg = m1.group(3); // find and execute injected method String method = colval.substring(0, colval.indexOf('(')); Object val = null; try { if (arg.equals("")) val = getClass().getMethod(method).invoke(this, new Object[] {}); else val = getClass().getMethod(method, new Class[] { java.lang.String.class }).invoke(this, new Object[] { arg }); } catch (NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) { String msg = "Unathorized. Injected method invocation failed."; throw new YADASecurityException(msg, e); } // add/replace item in dataRow dataRow.put(colname, new String[] { (String) val }); } else { if (!dataRow.containsKey(colspec)) // no injection AND no parameter { String msg = "Unathorized. Injected method invocation failed."; throw new YADASecurityException(msg); } } } // 3. execute the security query JSONParamsEntry jpe = new JSONParamsEntry(); // dataRow now contains injected values () or passed values // if values were injected, they've overwritten the passed in version jpe.addData(dataRow); JSONParams jp = new JSONParams(a11nQname, jpe); try { result = YADAUtils.executeYADAGetWithJSONParamsNoStats(jp); } catch (YADAException e) { // TODO Auto-generated catch block e.printStackTrace(); } } else { // no parameters to pass to execution.policy query try { result = YADAUtils.executeYADAGet(new String[] { a11nQname }, new String[0]); } catch (YADAException e) { // TODO Auto-generated catch block e.printStackTrace(); } } // parse result int count = new JSONObject(result).getJSONObject("RESULTSET").getInt("records"); // Reject if necessary if ((isWhitelist(policyType) && count == 0) || (isBlacklist(policyType) && count > 0)) throw new YADASecurityException("Unauthorized."); } this.clearSecurityPolicy(); } /** * Modifies the original query by appending a dynamic predicate * <p> * Recall the {@link Service}{@code .engagePreprocess} method will recall * {@link QueryManager}{@code .endowQuery()} to reconform the code after this * {@link Preprocess} disengages. * * * @throws YADASecurityException when token retrieval fails */ @Override public void applyContentPolicy() throws YADASecurityException { String SPACE = " "; StringBuilder contentPolicy = new StringBuilder(); Pattern rxInjection = Pattern.compile(YADASecuritySpec.RX_COL_INJECTION); String rawPolicy; Matcher m1; int start = 0; if(this.getSecuritySpec() != null) { rawPolicy = this.getSecuritySpec().getContentPolicy().get(PREDICATE); } else { rawPolicy = getArgumentValue(CONTENT_POLICY_PREDICATE); } // TODO make it impossible to reset args and preargs dynamically if pl class // implements SecurityPolicy // this will close an attack vector // field = getToken // field = getCookie(string) // field = getHeader(string) // field = getUser() // field = getRandom(string) m1 = rxInjection.matcher(rawPolicy); if (!m1.find()) { String msg = "Unathorized. Injected method invocation failed."; throw new YADASecurityException(msg); } m1.reset(); while (m1.find()) { int rxStart = m1.start(); int rxEnd = m1.end(); contentPolicy.append(rawPolicy.substring(start, rxStart)); String frag = rawPolicy.substring(rxStart, rxEnd); String method = frag.substring(0, frag.indexOf('(')); String arg = frag.substring(frag.indexOf('(') + 1, frag.indexOf(')')); Object val = null; try { if (arg.equals("")) val = getClass().getMethod(method).invoke(this, new Object[] {}); else val = getClass().getMethod(method, new Class[] { java.lang.String.class }).invoke(this, new Object[] { arg }); } catch (NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) { String msg = "Unathorized. Injected method invocation failed."; throw new YADASecurityException(msg, e); } contentPolicy.append((String) val + SPACE); start = rxEnd; } Expression parsedContentPolicy; try { parsedContentPolicy = CCJSqlParserUtil.parseCondExpression(contentPolicy.toString()); } catch (JSQLParserException e) { String msg = "Unauthorized. Content policy is not valid."; throw new YADASecurityException(msg, e); } PlainSelect sql = (PlainSelect) ((Select) getYADAQuery().getStatement()).getSelectBody(); Expression where = sql.getWhere(); if (where != null) { AndExpression and = new AndExpression(where, parsedContentPolicy); sql.setWhere(and); } else { sql.setWhere(parsedContentPolicy); } try { CCJSqlParserManager parserManager = new CCJSqlParserManager(); sql = (PlainSelect) ((Select) parserManager.parse(new StringReader(sql.toString()))).getSelectBody(); } catch (JSQLParserException e) { String msg = "Unauthorized. Content policy is not valid."; throw new YADASecurityException(msg, e); } getYADAQuery().setYADACode(sql.toString()); this.clearSecurityPolicy(); } /** * Utility function for content policy * * @return the auth token wrapped in single quotes * @throws YADASecurityException when the token can't retrieved */ public String getQToken() throws YADASecurityException { String quote = "'"; return quote + getToken() + quote; } /* * @since 8.7.6 */ @Override public String getLoggedUser() throws YADASecurityException { String user = ""; try { user = new JSONObject(obtainIdentity().toString()).getString(Authorization.YADA_IDENTITY_SUB); } catch (JSONException e) { String msg = "There was a problem obtaining the user identity."; throw new YADASecurityException(msg, e); } return user; } /** * Utility function for content policy * * @return the auth token wrapped in single quotes * @throws YADASecurityException if the logged user value cannot be obtained * @since 8.1.0 */ public String getQLoggedUser() throws YADASecurityException { String user = ""; try { user = getLoggedUser(); } catch (YADASecurityException e) { String msg = "There was a problem obtaining the user identity."; throw new YADASecurityException(msg, e); } String quote = "'"; return quote + user + quote; } /** * Utility function for content policy * * @param cookie the desired HTTP request cookie * @return the value of {@code cookie} wrapped in single quotes */ public String getQCookie(String cookie) { String quote = "'"; String val = super.getCookie(cookie); return quote + val + quote; } /** * Utility function for content policy * * @param header the desired HTTP request header * @return the value of {@code header} wrapped in single quotes */ public String getQHeader(String header) { String quote = "'"; String val = super.getHeader(header); return quote + val + quote; } /** * Sets the local {@link TokenValidator} to {@code this} */ @Override public void setTokenValidator() throws YADASecurityException { setTokenValidator(this); } /** * @return the {@link #allowList} */ public ArrayList<String> getAllowList() { return this.allowList; } /** * @param grant the privilege to allow */ public void addAllowListEntry(String grant) { this.allowList.add(grant); } /** * @param grant the privilege to deny */ public void removeAllowListEntry(String grant) { this.allowList.remove(grant); } /** * @return {@link #denyList} */ public ArrayList<String> getDenyList() { return this.denyList; } /** * @param grant the privilege to deny */ public void addDenyListEntry(String grant) { this.denyList.add(grant); } /** * @return the identity TODO: To Authorization */ public Object getIdentity() { return this.identity; } /** * @param identity the identity to set */ public void setIdentity(Object identity) { this.identity = identity; } /** * @return the locks */ public JSONObject getLocks() { return this.locks; } /** * @param locks the locks to set */ public void setLocks(JSONObject locks) { this.locks = locks; } /** * @return the grant */ public Object getGrant() { return this.grant; } /** * @param grant the grant to set */ public void setGrant(Object grant) { this.grant = grant; } /** * @return the ships */ public ArrayList<String> getShips() { return this.ships; } /** * @param ships the ships to set */ public void setShips(ArrayList<String> ships) { this.ships = ships; } /** * @return {@code true} if {@link AbstractPreprocessor#getToken} returns a non-null, non-empty {@link String} * @since 8.7.6 */ public boolean hasToken() { if (null != this.getToken() && !"".equals(this.getToken())) { return true; } return false; } /** * @return {@code true} if the {@link #syncToken} variable is a non-null, non-empty {@link String} * @since 8.7.6 */ public boolean hasSyncToken() { if (null != this.getSyncToken() && !"".equals(this.getSyncToken())) { return true; } return false; } /** * @return {@code true} if {@link #locks} has at least 1 entry, otherwise * {@code false} * @since 8.7.6 */ public boolean hasLocks() { if (getLocks().length() > 0) { return true; } return false; } /** * @return {@code true} if {@link #grant} has at least 1 entry, otherwise * {@code false} * @since 8.7.6 */ public boolean hasGrants() { if (((JSONArray) getGrant()).length() > 0) { return true; } return false; } /** * @return {@code true} if {@link #identity} is set, otherwise {@code false} * @since 8.7.6 */ public boolean hasIdentity() { if (null != getIdentity() && !"".equals(getIdentity())) { return true; } return false; } /** * @return {@code true} if {@link #allowList} has at least 1 entry, otherwise * {@code false} * @since 8.7.6 */ public boolean hasAllowList() { if (getAllowList().size() > 0) { return true; } return false; } /** * @return the syncToken */ public String getSyncToken() { return syncToken; } /** * @param syncToken the syncToken to set */ public void setSyncToken(String syncToken) { this.syncToken = syncToken; } }
ziqew/kcc
pegasus/src/curriculum_course.rb
Unit = Struct.new(:number, :title, :description) Lesson = Struct.new(:number, :url, :title, :description, :path) class CurriculumCourse PRODUCTION_COURSES = %w(course1 course2 course3 course4 msm algebra misc unplugged science) COURSES_WITHOUT_UNIT_NUMBERS = %w(course1 course2 course3 course4 msm algebra misc) COURSES_WITH_PDF_GENERATION = %w(course1 course2 course3 course4 msm algebra misc csp) def initialize(kind) @kind = kind @dir = sites_dir("virtual/curriculum-#{@kind}") end def url "/curriculum/#{@kind}" end def path "curriculum-#{@kind}" end def has_units? !COURSES_WITHOUT_UNIT_NUMBERS.include? @kind end def get_course_info YAML.load_file(File.join(@dir, 'info.yml')) end def get_units return [] unless has_units? units = [] course_info = get_course_info unit_dirs = Dir.entries(@dir).select {|entry| valid_lesson_directory?(entry)} unit_dirs.map! {|unit| unit.match(/^[\d]*/).to_s.to_i} unit_dirs.sort! unit_dirs.uniq! unit_dirs.each do |unit_number| unit = Unit.new unit.number = unit_number unit.title = course_info[unit_number]['title'] unless course_info[unit_number].nil? unit.description = course_info[unit_number]['description'] unless course_info[unit_number].nil? units.push unit end units end # Retrieves all lessons of a given course, even if divided up into units def get_lessons return get_lessons_for_unit(nil) unless has_units? [].tap do |lessons| get_units.each do |unit| get_lessons_for_unit(unit.number).each do |lesson| lessons << lesson end end end end def get_lessons_for_unit(unit_number_filter = nil) lessons = [] lesson_dirs = Dir.entries(@dir) lesson_dirs.select! {|lesson_id| valid_lesson_directory?(lesson_id)} lesson_dirs.select! {|lesson_id| lesson_in_unit?(lesson_id, unit_number_filter)} unless unit_number_filter.nil? lesson_dirs.each do |lesson_id| yaml_path = File.join(@dir, lesson_id, 'info.yml') next unless File.file?(yaml_path) lesson_info = YAML.load_file(yaml_path) lesson = Lesson.new lesson.number = lesson_number(lesson_id) lesson.url = "#{url}/#{lesson_id}" lesson.title = lesson_info['title'] lesson.description = lesson_info['description'] lesson.path = File.join(path, lesson_id) lessons.push lesson end # Just push all the lessons without a number to the end. lessons.sort_by! {|k| k[:number] != "" ? k[:number].to_i : 10000} lessons end def lesson_number(lesson_id) lesson_id.scan(/\d+/).last.to_s end def lesson_in_unit?(lesson_id, unit_number) lesson_id.match(/^[\d]*/).to_s == unit_number.to_s end def valid_lesson_directory?(lesson_id) File.directory?(File.join(@dir, lesson_id)) && valid_lesson_directory_name(lesson_id) end def valid_lesson_directory_name(lesson_dirname) lesson_dirname != '.' && lesson_dirname != '..' && lesson_dirname.chars.first != '_' end def self.virtual_to_v3_path(local_virtual_path) local_virtual_path.sub(pegasus_dir('sites/virtual/curriculum-'), sites_v3_dir('code.org/public/curriculum/')) end end
qma/pants
src/python/pants/backend/android/tasks/android_task.py
# coding=utf-8 # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) from pants.backend.android.distribution.android_distribution import AndroidDistribution from pants.backend.core.tasks.task import Task class AndroidTask(Task): """Base class for Android tasks that may require the Android SDK.""" @classmethod def register_options(cls, register): super(AndroidTask, cls).register_options(register) register('--sdk-path', help='Use the Android SDK at this path.') def __init__(self, *args, **kwargs): super(AndroidTask, self).__init__(*args, **kwargs) self._sdk_path = self.get_options().sdk_path or None @property def android_sdk(self): """Instantiate an Android SDK distribution that provides tools to android tasks.""" return AndroidDistribution.cached(self._sdk_path)
VishalKandala/Cantera-1.7
Cantera/python/src/cttransport_methods.cpp
<gh_stars>0 /** * Create a new Transport object. */ static PyObject * py_transport_new(PyObject *self, PyObject *args) { char* model; int ph; int loglevel; if (!PyArg_ParseTuple(args, "sii:transport_new", &model, &ph, &loglevel)) return NULL; int n = newTransport(model, ph, loglevel); if (n < 0) return reportError(n); return Py_BuildValue("i",n); } /** * Delete the Phase object. */ static PyObject* py_transport_delete(PyObject *self, PyObject *args) { int tr; if (!PyArg_ParseTuple(args, "i:transport_delete", &tr)) return NULL; delTransport(tr); return Py_BuildValue("i",0); } static PyObject* py_setParameters(PyObject *self, PyObject *args) { int n, k, typ; PyObject* parray; if (!PyArg_ParseTuple(args, "iiiO:py_setParameters", &n, &typ, &k, &parray)) return NULL; PyArrayObject* a = (PyArrayObject*) PyArray_ContiguousFromObject(parray, PyArray_DOUBLE, 1, 1); double* xd = (double*)a->data; int ok = trans_setParameters(n, typ, k, xd); Py_DECREF(a); if (ok < 0) return reportError(ok); return Py_BuildValue("i",ok); } static PyObject* py_viscosity(PyObject *self, PyObject *args) { int n; if (!PyArg_ParseTuple(args, "i:py_viscosity", &n)) return NULL; double mu = trans_viscosity(n); if (mu < 0.0) return reportError(int(mu)); return Py_BuildValue("d",mu); } static PyObject* py_thermalConductivity(PyObject *self, PyObject *args) { int n; if (!PyArg_ParseTuple(args, "i:py_thermalConductivity", &n)) return NULL; double lambda = trans_thermalConductivity(n); if (lambda < 0.0) return reportError(int(lambda)); return Py_BuildValue("d",lambda); } static PyObject* py_thermalDiffCoeffs(PyObject *self, PyObject *args) { int n, idt; if (!PyArg_ParseTuple(args, "ii:py_thermalDiffCoeffs", &n, &idt)) return NULL; PyArrayObject* dt = (PyArrayObject*)PyArray_FromDims(1, &idt, PyArray_DOUBLE); int iok = trans_getThermalDiffCoeffs(n, idt, (double*)dt->data); if (iok < 0) return reportError(iok); return PyArray_Return(dt); } static PyObject* py_binaryDiffCoeffs(PyObject *self, PyObject *args) { int n, id; if (!PyArg_ParseTuple(args, "ii:py_binaryDiffCoeffs", &n, &id)) return NULL; int idim[2]; idim[0] = id; idim[1] = id; PyArrayObject* d = (PyArrayObject*)PyArray_FromDims(2, idim, PyArray_DOUBLE); int iok = trans_getBinDiffCoeffs(n, id, (double*)d->data); if (iok < 0) return reportError(iok); return PyArray_Return(d); } static PyObject* py_mixDiffCoeffs(PyObject *self, PyObject *args) { int n, id; if (!PyArg_ParseTuple(args, "ii:py_mixDiffCoeffs", &n, &id)) return NULL; PyArrayObject* d = (PyArrayObject*)PyArray_FromDims(1, &id, PyArray_DOUBLE); int iok = trans_getMixDiffCoeffs(n, id, (double*)d->data); if (iok < 0) return reportError(iok); return PyArray_Return(d); } static PyObject* py_multiDiffCoeffs(PyObject *self, PyObject *args) { int n, id; if (!PyArg_ParseTuple(args, "ii:py_multiDiffCoeffs", &n, &id)) return NULL; //vector_int idim(2,id); int idim[2]; idim[0] = id; idim[1] = id; PyArrayObject* d = (PyArrayObject*)PyArray_FromDims(2, idim, PyArray_DOUBLE); int iok = trans_getMultiDiffCoeffs(n, id, (double*)d->data); if (iok < 0) return reportError(iok); return PyArray_Return(d); } static PyObject* py_getMolarFluxes(PyObject *self, PyObject *args) { int n, id; PyObject *state1, *state2; double delta; if (!PyArg_ParseTuple(args, "iiOOd:py_getMolarFluxes", &n, &id, &state1, &state2, &delta)) return NULL; PyArrayObject* state1array = (PyArrayObject*)state1; PyArrayObject* state2array = (PyArrayObject*)state2; double* d1 = (double*)state1array->data; double* d2 = (double*)state2array->data; PyArrayObject* f = (PyArrayObject*)PyArray_FromDims(1, &id, PyArray_DOUBLE); double* fd = (double*)f->data; int iok = trans_getMolarFluxes(n, d1, d2, delta, fd); if (iok < 0) return reportError(iok); return PyArray_Return(f); }
c0rrigan/poo
Transpmex/src/main/java/me/sebas/transpomex/facturas/Factura.java
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package me.sebas.transpomex.facturas; import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Base64; /** * * @author deckard */ public class Factura { /** * @return the matrTransporte */ public String getMatrTransporte() { return matrTransporte; } /** * @param matrTransporte the matrTransporte to set */ public void setMatrTransporte(String matrTransporte) { this.matrTransporte = matrTransporte; } /** * @return the id */ public int getId() { return id; } /** * @param id the id to set */ public void setId(int id) { this.id = id; } /** * @return the key */ public String getKey() { return key; } /** * @param key the key to set */ public void setKey(String key) { this.key = key; } /** * @return the dir */ public Direccion getDir() { return dir; } /** * @param dir the dir to set */ public void setDir(Direccion dir) { this.dir = dir; } /** * @return the tipoEnvio */ public String getTipoEnvio() { return tipoEnvio; } /** * @param tipoEnvio the tipoEnvio to set */ public void setTipoEnvio(String tipoEnvio) { this.tipoEnvio = tipoEnvio; } /** * @return the volCarga */ public float getVolCarga() { return volCarga; } /** * @param volCarga the volCarga to set */ public void setVolCarga(float volCarga) { this.volCarga = volCarga; } /** * @return the pesoCarga */ public float getPesoCarga() { return pesoCarga; } /** * @param pesoCarga the pesoCarga to set */ public void setPesoCarga(float pesoCarga) { this.pesoCarga = pesoCarga; } /** * @return the fechaSalida */ public Fecha getFechaSalida() { return fechaSalida; } /** * @param fechaSalida the fechaSalida to set */ public void setFechaSalida(Fecha fechaSalida) { this.fechaSalida = fechaSalida; } /** * @return the fechaEntrega */ public Fecha getFechaEntrega() { return fechaEntrega; } /** * @param fechaEntrega the fechaEntrega to set */ public void setFechaEntrega(Fecha fechaEntrega) { this.fechaEntrega = fechaEntrega; } /** * @return the retraso */ public short getRetraso() { return retraso; } /** * @param retraso the retraso to set */ public void setRetraso(short retraso) { this.retraso = retraso; } /** * @return the precioNeto */ public float getPrecioNeto() { return precioNeto; } /** * @param precioNeto the precioNeto to set */ public void setPrecioNeto(float precioNeto) { this.precioNeto = precioNeto; } private int id; private String key; private Direccion dir; private String tipoEnvio; private float volCarga; private float pesoCarga; private Fecha fechaSalida; private Fecha fechaEntrega; private short retraso; private String matrTransporte; private float precioNeto; public Factura() { int id = 0; int retraso = 0; } /** * Constructor para una nueva instancia de factura * @param id * @param key * @param dir * @param tipoEnvio * @param volCarga * @param pesoCarga * @param fechaSalida * @param fechaEntrega * @param retraso * @param precioNeto */ public Factura(int id, String key, Direccion dir, String tipoEnvio, float volCarga, float pesoCarga,Fecha fechaSalida,Fecha fechaEntrega, short retraso, float precioNeto) { this.id = id; this.dir = dir; this.tipoEnvio = tipoEnvio; this.volCarga = volCarga; this.pesoCarga = pesoCarga; this.fechaSalida = fechaSalida; this.fechaEntrega = fechaEntrega; this.retraso = retraso; this.precioNeto = precioNeto; if(key.isEmpty()){ this.key = obtenerHash().toUpperCase(); }else{ this.key = key; } } private String obtenerHash(){ byte res[] = {0}; try { MessageDigest md = MessageDigest.getInstance("SHA-256"); md.update((dir.getCalle()+dir.getColonia()+fechaSalida+volCarga+pesoCarga).getBytes(StandardCharsets.UTF_8)); res = md.digest(); } catch (NoSuchAlgorithmException e) { System.out.println("Algoritmo no encontrado\n"+e); } return Base64.getEncoder().encodeToString(res).substring(0, 6); } public void hash(){ key = obtenerHash(); } }
viewdy/phantomjs
src/qt/qtwebkit/Source/WebCore/platform/network/soup/AuthenticationChallengeSoup.cpp
<gh_stars>1-10 /* * Copyright (C) 2012 <NAME>.L. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY IGALIA S.L. ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "config.h" #include "AuthenticationChallenge.h" #include "ResourceError.h" #include <libsoup/soup.h> namespace WebCore { static ProtectionSpaceServerType protectionSpaceServerTypeFromURI(SoupURI* uri, bool isForProxy) { if (uri->scheme == SOUP_URI_SCHEME_HTTPS) return isForProxy ? ProtectionSpaceProxyHTTPS : ProtectionSpaceServerHTTPS; if (uri->scheme == SOUP_URI_SCHEME_HTTP) return isForProxy ? ProtectionSpaceProxyHTTP : ProtectionSpaceServerHTTP; if (uri->scheme == SOUP_URI_SCHEME_FTP) return isForProxy ? ProtectionSpaceProxyFTP : ProtectionSpaceServerFTP; return isForProxy ? ProtectionSpaceProxyHTTP : ProtectionSpaceServerHTTP; } static ProtectionSpace protectionSpaceFromSoupAuthAndMessage(SoupAuth* soupAuth, SoupMessage* message) { const char* schemeName = soup_auth_get_scheme_name(soupAuth); ProtectionSpaceAuthenticationScheme scheme; if (!g_ascii_strcasecmp(schemeName, "basic")) scheme = ProtectionSpaceAuthenticationSchemeHTTPBasic; else if (!g_ascii_strcasecmp(schemeName, "digest")) scheme = ProtectionSpaceAuthenticationSchemeHTTPDigest; else if (!g_ascii_strcasecmp(schemeName, "ntlm")) scheme = ProtectionSpaceAuthenticationSchemeNTLM; else if (!g_ascii_strcasecmp(schemeName, "negotiate")) scheme = ProtectionSpaceAuthenticationSchemeNegotiate; else scheme = ProtectionSpaceAuthenticationSchemeUnknown; SoupURI* soupURI = soup_message_get_uri(message); return ProtectionSpace(String::fromUTF8(soup_uri_get_host(soupURI)), soup_uri_get_port(soupURI), protectionSpaceServerTypeFromURI(soupURI, soup_auth_is_for_proxy(soupAuth)), String::fromUTF8(soup_auth_get_realm(soupAuth)), scheme); } AuthenticationChallenge::AuthenticationChallenge(SoupSession* soupSession, SoupMessage* soupMessage, SoupAuth* soupAuth, bool retrying, AuthenticationClient* client) : AuthenticationChallengeBase(protectionSpaceFromSoupAuthAndMessage(soupAuth, soupMessage), Credential(), // proposedCredentials retrying ? 1 : 0, // previousFailureCount soupMessage, // failureResponse ResourceError::authenticationError(soupMessage)) , m_soupSession(soupSession) , m_soupMessage(soupMessage) , m_soupAuth(soupAuth) , m_authenticationClient(client) { } bool AuthenticationChallenge::platformCompare(const AuthenticationChallenge& a, const AuthenticationChallenge& b) { return a.soupSession() == b.soupSession() && a.soupMessage() == b.soupMessage() && a.soupAuth() == b.soupAuth(); } } // namespace WebCore
KuveytTurk/api-java-sdk
src/test/java/tr/com/kuveytturk/api/test/domains/identification/IdentityServerAPITest.java
/* * Copyright (c) 2020 * KUVEYT TÜRK PARTICIPATION BANK INC. * * Author: <NAME> * * Project: Java SDK for Accessing API Endpoints */ package tr.com.kuveytturk.api.test.domains.identification; import com.consol.citrus.annotations.CitrusTest; import org.junit.Test; import tr.com.kuveytturk.api.client.sdk.model.callresult.AccessTokenRetrievalResult; import tr.com.kuveytturk.api.test.domains.base.AbstractAPITest; import java.util.List; import static org.junit.Assert.assertTrue; /** * Class implementing routines for testing authorization code and acces token retrieval related operations. * * @author <NAME> * @version 1.0 * @since 2020-01-12 */ public class IdentityServerAPITest extends AbstractAPITest { @Test @CitrusTest public void testAccessTokenRetrievalInAuthorizationCodeFlow() { List<Integer> testCustomersList = super.getTestCustomerList(); assertTrue("Test Müşteri numaraları çekilemedi.", testCustomersList != null && !testCustomersList.isEmpty()); assertTrue("Yeterli sayıda test müşterisi çekilemedi.", testCustomersList.size() >= 2); int randomIndex = getRandomInt(0, testCustomersList.size() - 1); int testCustomerId = new Integer(testCustomersList.get(randomIndex).intValue()); AccessTokenRetrievalResult accessTokenResult = super.fetchAccessTokenWithAuthorizationCodeFlow(testCustomerId); assertTrue(testCustomerId + " nolu müşteri için authorization code kullanarak access token alınırken hata oluştu!\nHATA DETAYI:\n" + accessTokenResult.getErrorDetailsIfAny(), accessTokenResult.isSuccessful()); } @Test @CitrusTest public void testAccessTokenRetrievalInClientCredentialsFlow() { List<Integer> testCustomersList = super.getTestCustomerList(); assertTrue("Test Müşteri numaraları çekilemedi.", testCustomersList != null && !testCustomersList.isEmpty()); assertTrue("Yeterli sayıda test müşterisi çekilemedi.", testCustomersList.size() >= 2); int randomIndex = getRandomInt(0, testCustomersList.size() - 1); int testCustomerId = new Integer(testCustomersList.get(randomIndex).intValue()); AccessTokenRetrievalResult accessTokenResult = super.fetchAccessTokenWithClientCredentialsFlow(); assertTrue("Client Credential Flow'u ile access token alınırken hata oluştu!\nHATA DETAYI:\n" + accessTokenResult.getErrorDetailsIfAny(), accessTokenResult.isSuccessful()); } @Test @CitrusTest public void testAccessTokenRetrievalWithRefreshToken() { List<Integer> testCustomersList = super.getTestCustomerList(); assertTrue("Test Müşteri numaraları çekilemedi.", testCustomersList != null && !testCustomersList.isEmpty()); assertTrue("Yeterli sayıda test müşterisi çekilemedi.", testCustomersList.size() >= 2); int randomIndex = getRandomInt(0, testCustomersList.size() - 1); int testCustomerId = new Integer(testCustomersList.get(randomIndex).intValue()); AccessTokenRetrievalResult accessTokenResult = super.fetchAccessTokenWithAuthorizationCodeFlow(testCustomerId); assertTrue("Client Credential Flow'u ile access token alınırken hata oluştu!\nHATA DETAYI:\n" + accessTokenResult.getErrorDetailsIfAny(), accessTokenResult.isSuccessful()); AccessTokenRetrievalResult accessTokenWithRefreshTokenResult = super.fetchAccessTokenWithRefreshToken(accessTokenResult.getRefreshToken()); assertTrue("Refresh token kullanarak access token alınırken hata oluştu!\nHATA DETAYI:\n" + accessTokenWithRefreshTokenResult.getErrorDetailsIfAny(), accessTokenWithRefreshTokenResult.isSuccessful()); } }
Davidfind/rt-thread
bsp/rx/RPDL/interrupt_sci.c
<filename>bsp/rx/RPDL/interrupt_sci.c /*""FILE COMMENT""******************************************************* * System Name : Interrupt program for RX62Nxx * File Name : Interrupt_SCI.c * Version : 1.02 * Contents : Interrupt handlers for all SCI channels * Customer : * Model : * Order : * CPU : RX * Compiler : RXC * OS : Nothing * Programmer : * Note : ************************************************************************ * Copyright, 2011. Renesas Electronics Corporation * and Renesas Solutions Corporation ************************************************************************ * History : 2011.04.08 * : Ver 1.02 * : CS-5 release. *""FILE COMMENT END""**************************************************/ #include "r_pdl_sci.h" #include "r_pdl_definitions.h" #include "r_pdl_user_definitions.h" /*""FUNC COMMENT""*************************************************** * Module outline: SCIn receive data error interrupt processing *------------------------------------------------------------------- * Declaration : void Interrupt_SCIn_ERIn(void) *------------------------------------------------------------------- * Function : *------------------------------------------------------------------- * Argument : Nothing *------------------------------------------------------------------- * Return value : Nothing *------------------------------------------------------------------- * Output : SSR for that SCI channel *------------------------------------------------------------------- * Use function : rpdl_SCI_RX_Error_callback_func *------------------------------------------------------------------- * Notes : *------------------------------------------------------------------- * History : 2011.04.08 * : Ver 1.02 * : CS-5 release. *""FUNC COMMENT END""**********************************************/ #if FAST_INTC_VECTOR == VECT_SCI0_ERI0 __fast_interrupt void Interrupt_SCI0_ERI0(void) #else #pragma vector = VECT_SCI0_ERI0 __interrupt void Interrupt_SCI0_ERI0(void) #endif { /* Will the user handle the errors? */ if (rpdl_SCI_RX_Error_callback_func[0] != PDL_NO_FUNC) { /* Call the error handler */ rpdl_SCI_RX_Error_callback_func[0](); } else { /* Clear the error flags */ SCI0.SSR.BYTE = (uint8_t)(BIT_7 | BIT_6); } } #if FAST_INTC_VECTOR == VECT_SCI1_ERI1 __fast_interrupt void Interrupt_SCI1_ERI1(void) #else #pragma vector = VECT_SCI1_ERI1 __interrupt void Interrupt_SCI1_ERI1(void) #endif { /* Will the user handle the errors? */ if (rpdl_SCI_RX_Error_callback_func[1] != PDL_NO_FUNC) { /* Call the error handler */ rpdl_SCI_RX_Error_callback_func[1](); } else { /* Clear the error flags */ SCI1.SSR.BYTE = (uint8_t)(BIT_7 | BIT_6); } } #if 0 #if FAST_INTC_VECTOR == VECT_SCI2_ERI2 __fast_interrupt void Interrupt_SCI2_ERI2(void) #else #pragma vector = VECT_SCI2_ERI2 __interrupt void Interrupt_SCI2_ERI2(void) #endif { /* Will the user handle the errors? */ if (rpdl_SCI_RX_Error_callback_func[2] != PDL_NO_FUNC) { /* Call the error handler */ rpdl_SCI_RX_Error_callback_func[2](); } else { /* Clear the error flags */ SCI2.SSR.BYTE = (uint8_t)(BIT_7 | BIT_6); } } #endif #if FAST_INTC_VECTOR == VECT_SCI3_ERI3 __fast_interrupt void Interrupt_SCI3_ERI3(void) #else #pragma vector = VECT_SCI3_ERI3 __interrupt void Interrupt_SCI3_ERI3(void) #endif { /* Will the user handle the errors? */ if (rpdl_SCI_RX_Error_callback_func[3] != PDL_NO_FUNC) { /* Call the error handler */ rpdl_SCI_RX_Error_callback_func[3](); } else { /* Clear the error flags */ SCI3.SSR.BYTE = (uint8_t)(BIT_7 | BIT_6); } } #if FAST_INTC_VECTOR == VECT_SCI5_ERI5 __fast_interrupt void Interrupt_SCI5_ERI5(void) #else #pragma vector = VECT_SCI5_ERI5 __interrupt void Interrupt_SCI5_ERI5(void) #endif { /* Will the user handle the errors? */ if (rpdl_SCI_RX_Error_callback_func[5] != PDL_NO_FUNC) { /* Call the error handler */ rpdl_SCI_RX_Error_callback_func[5](); } else { /* Clear the error flags */ SCI5.SSR.BYTE = (uint8_t)(BIT_7 | BIT_6); } } #if FAST_INTC_VECTOR == VECT_SCI6_ERI6 __fast_interrupt void Interrupt_SCI6_ERI6(void) #else #pragma vector = VECT_SCI6_ERI6 __interrupt void Interrupt_SCI6_ERI6(void) #endif { /* Will the user handle the errors? */ if (rpdl_SCI_RX_Error_callback_func[6] != PDL_NO_FUNC) { /* Call the error handler */ rpdl_SCI_RX_Error_callback_func[6](); } else { /* Clear the error flags */ SCI6.SSR.BYTE = (uint8_t)(BIT_7 | BIT_6); } } /*""FUNC COMMENT""*************************************************** * Module outline: SCIn receive data interrupt processing *------------------------------------------------------------------- * Declaration : void Interrupt_SCIn_RXIn(void) *------------------------------------------------------------------- * Function : *------------------------------------------------------------------- * Argument : Nothing *------------------------------------------------------------------- * Return value : Nothing *------------------------------------------------------------------- * Output : *------------------------------------------------------------------- * Use function : rpdl_SCI_RX_End_callback_func *------------------------------------------------------------------- * Notes : *------------------------------------------------------------------- * History : 2011.04.08 * : Ver 1.02 * : CS-5 release. *""FUNC COMMENT END""**********************************************/ #if FAST_INTC_VECTOR == VECT_SCI0_RXI0 __fast_interrupt void Interrupt_SCI0_RXI0(void) #else #pragma vector = VECT_SCI0_RXI0 __interrupt void Interrupt_SCI0_RXI0(void) #endif { /* Ok to process the data? */ if (rpdl_SCI_rx_using_irq[0] == true) { /* check if ID reception in Multiprocessor mode */ if (rpdl_SCI_MP_mode[0] == 2) { /* check if ID cycle ? */ if (SCI0.SSR.BIT.MPB == 1) { uint8_t id; /* Read the ID */ id = SCI0.RDR; /* ID matching ? */ if (id != rpdl_SCI_MP_rx_stationID[0]) { /* ID does not match */ /* MPIE = 1 */ SCI0.SCR.BIT.MPIE = 1; } else { /* ID matches */ /* Disable interrupt request generation, and try to disable reception */ SCI0.SCR.BYTE &= (uint8_t)(INV_BIT_6 & INV_BIT_4); /* Notify the user */ if (rpdl_SCI_RX_End_callback_func[0] != PDL_NO_FUNC) { rpdl_SCI_RX_End_callback_func[0](); } } /* Exit ISR */ return; } } /* Read and store the character */ *rpdl_SCI_rx_string_pointer[0] = SCI0.RDR; /* Increment the character counter */ rpdl_SCI_rx_counter[0]++; /* More characters expected? */ if (rpdl_SCI_rx_counter[0] < rpdl_SCI_rx_threshold[0]) { /* Move to the next location in the buffer */ rpdl_SCI_rx_string_pointer[0]++; } else { /* Disable interrupt request generation, and try to disable reception */ SCI0.SCR.BYTE &= (uint8_t)(INV_BIT_6 & INV_BIT_4); /* Async MP mode ? */ if (((SCI0.SMR.BIT.MP) != 0) && ((rpdl_SCI_MP_mode[0]) != 0)) { /* Set MPIE = 1 (multiprocessor mode reception) */ SCI0.SCR.BIT.MPIE = 1; } /* Notify the user */ if (rpdl_SCI_RX_End_callback_func[0] != PDL_NO_FUNC) { rpdl_SCI_RX_End_callback_func[0](); } } } /* Either the DMAC or DTC has passed on the interrupt */ else { /* Call the callback function */ if (rpdl_SCI_RX_End_callback_func[0] != PDL_NO_FUNC) { rpdl_SCI_RX_End_callback_func[0](); } } } #if FAST_INTC_VECTOR == VECT_SCI1_RXI1 __fast_interrupt void Interrupt_SCI1_RXI1(void) #else #pragma vector = VECT_SCI1_RXI1 __interrupt void Interrupt_SCI1_RXI1(void) #endif { /* Ok to process the data? */ if (rpdl_SCI_rx_using_irq[1] == true) { /* check if ID reception in Multiprocessor mode */ if (rpdl_SCI_MP_mode[1] == 2) { /* check if ID cycle ? */ if (SCI1.SSR.BIT.MPB == 1) { uint8_t id; /* Read the ID */ id = SCI1.RDR; /* ID matching ? */ if (id != rpdl_SCI_MP_rx_stationID[1]) { /* ID does not match */ /* MPIE = 1 */ SCI1.SCR.BIT.MPIE = 1; } else { /* ID matches */ /* Disable interrupt request generation, and try to disable reception */ SCI1.SCR.BYTE &= (uint8_t)(INV_BIT_6 & INV_BIT_4); /* Notify the user */ if (rpdl_SCI_RX_End_callback_func[1] != PDL_NO_FUNC) { rpdl_SCI_RX_End_callback_func[1](); } } /* Exit ISR */ return; } } /* Read and store the character */ *rpdl_SCI_rx_string_pointer[1] = SCI1.RDR; /* Increment the character counter */ rpdl_SCI_rx_counter[1]++; /* More characters expected? */ if (rpdl_SCI_rx_counter[1] < rpdl_SCI_rx_threshold[1]) { /* Move to the next location in the buffer */ rpdl_SCI_rx_string_pointer[1]++; } else { /* Disable interrupt request generation, and try to disable reception */ SCI1.SCR.BYTE &= (uint8_t)(INV_BIT_6 & INV_BIT_4); /* Async MP mode ? */ if (((SCI1.SMR.BIT.MP) != 0) && ((rpdl_SCI_MP_mode[1]) != 0)) { /* Set MPIE = 1 (multiprocessor mode reception) */ SCI1.SCR.BIT.MPIE = 1; } /* Notify the user */ if (rpdl_SCI_RX_End_callback_func[1] != PDL_NO_FUNC) { rpdl_SCI_RX_End_callback_func[1](); } } } /* Either the DMAC or DTC has passed on the interrupt */ else { /* Call the callback function */ if (rpdl_SCI_RX_End_callback_func[1] != PDL_NO_FUNC) { rpdl_SCI_RX_End_callback_func[1](); } } } #if 0 #if FAST_INTC_VECTOR == VECT_SCI2_RXI2 __fast_interrupt void Interrupt_SCI2_RXI2(void) #else #pragma vector = VECT_SCI2_RXI2 __interrupt void Interrupt_SCI2_RXI2(void) #endif { /* Ok to process the data? */ if (rpdl_SCI_rx_using_irq[2] == true) { /* check if ID reception in Multiprocessor mode */ if (rpdl_SCI_MP_mode[2] == 2) { /* check if ID cycle ? */ if (SCI2.SSR.BIT.MPB == 1) { uint8_t id; /* Read the ID */ id = SCI2.RDR; /* ID matching ? */ if (id != rpdl_SCI_MP_rx_stationID[2]) { /* ID does not match */ /* MPIE = 1 */ SCI2.SCR.BIT.MPIE = 1; } else { /* ID matches */ /* Disable interrupt request generation, and try to disable reception */ SCI2.SCR.BYTE &= (uint8_t)(INV_BIT_6 & INV_BIT_4); /* Notify the user */ if (rpdl_SCI_RX_End_callback_func[2] != PDL_NO_FUNC) { rpdl_SCI_RX_End_callback_func[2](); } } /* Exit ISR */ return; } } /* Read and store the character */ *rpdl_SCI_rx_string_pointer[2] = SCI2.RDR; /* Increment the character counter */ rpdl_SCI_rx_counter[2]++; /* More characters expected? */ if (rpdl_SCI_rx_counter[2] < rpdl_SCI_rx_threshold[2]) { /* Move to the next location in the buffer */ rpdl_SCI_rx_string_pointer[2]++; } else { /* Disable interrupt request generation, and try to disable reception */ SCI2.SCR.BYTE &= (uint8_t)(INV_BIT_6 & INV_BIT_4); /* Async MP mode ? */ if (((SCI2.SMR.BIT.MP) != 0) && ((rpdl_SCI_MP_mode[2]) != 0)) { /* Set MPIE = 1 (multiprocessor mode reception) */ SCI2.SCR.BIT.MPIE = 1; } /* Notify the user */ if (rpdl_SCI_RX_End_callback_func[2] != PDL_NO_FUNC) { rpdl_SCI_RX_End_callback_func[2](); } } } /* Either the DMAC or DTC has passed on the interrupt */ else { /* Call the callback function */ if (rpdl_SCI_RX_End_callback_func[2] != PDL_NO_FUNC) { rpdl_SCI_RX_End_callback_func[2](); } } } #endif #if FAST_INTC_VECTOR == VECT_SCI3_RXI3 __fast_interrupt void Interrupt_SCI3_RXI3(void) #else #pragma vector = VECT_SCI3_RXI3 __interrupt void Interrupt_SCI3_RXI3(void) #endif { /* Ok to process the data? */ if (rpdl_SCI_rx_using_irq[3] == true) { /* check if ID reception in Multiprocessor mode */ if (rpdl_SCI_MP_mode[3] == 2) { /* check if ID cycle ? */ if (SCI3.SSR.BIT.MPB == 1) { uint8_t id; /* Read the ID */ id = SCI3.RDR; /* ID matching ? */ if (id != rpdl_SCI_MP_rx_stationID[3]) { /* ID does not match */ /* MPIE = 1 */ SCI3.SCR.BIT.MPIE = 1; } else { /* ID matches */ /* Disable interrupt request generation, and try to disable reception */ SCI3.SCR.BYTE &= (uint8_t)(INV_BIT_6 & INV_BIT_4); /* Notify the user */ if (rpdl_SCI_RX_End_callback_func[3] != PDL_NO_FUNC) { rpdl_SCI_RX_End_callback_func[3](); } } /* Exit ISR */ return; } } /* Read and store the character */ *rpdl_SCI_rx_string_pointer[3] = SCI3.RDR; /* Increment the character counter */ rpdl_SCI_rx_counter[3]++; /* More characters expected? */ if (rpdl_SCI_rx_counter[3] < rpdl_SCI_rx_threshold[3]) { /* Move to the next location in the buffer */ rpdl_SCI_rx_string_pointer[3]++; } else { /* Disable interrupt request generation, and try to disable reception */ SCI3.SCR.BYTE &= (uint8_t)(INV_BIT_6 & INV_BIT_4); /* Async MP mode ? */ if (((SCI3.SMR.BIT.MP) != 0) && ((rpdl_SCI_MP_mode[3]) != 0)) { /* Set MPIE = 1 (multiprocessor mode reception) */ SCI3.SCR.BIT.MPIE = 1; } /* Notify the user */ if (rpdl_SCI_RX_End_callback_func[3] != PDL_NO_FUNC) { rpdl_SCI_RX_End_callback_func[3](); } } } /* Either the DMAC or DTC has passed on the interrupt */ else { /* Call the callback function */ if (rpdl_SCI_RX_End_callback_func[3] != PDL_NO_FUNC) { rpdl_SCI_RX_End_callback_func[3](); } } } #if FAST_INTC_VECTOR == VECT_SCI5_RXI5 __fast_interrupt void Interrupt_SCI5_RXI5(void) #else #pragma vector = VECT_SCI5_RXI5 __interrupt void Interrupt_SCI5_RXI5(void) #endif { /* Ok to process the data? */ if (rpdl_SCI_rx_using_irq[5] == true) { /* check if ID reception in Multiprocessor mode */ if (rpdl_SCI_MP_mode[5] == 2) { /* check if ID cycle ? */ if (SCI5.SSR.BIT.MPB == 1) { uint8_t id; /* Read the ID */ id = SCI5.RDR; /* ID matching ? */ if (id != rpdl_SCI_MP_rx_stationID[5]) { /* ID does not match */ /* MPIE = 1 */ SCI5.SCR.BIT.MPIE = 1; } else { /* ID matches */ /* Disable interrupt request generation, and try to disable reception */ SCI5.SCR.BYTE &= (uint8_t)(INV_BIT_6 & INV_BIT_4); /* Notify the user */ if (rpdl_SCI_RX_End_callback_func[5] != PDL_NO_FUNC) { rpdl_SCI_RX_End_callback_func[5](); } } /* Exit ISR */ return; } } /* Read and store the character */ *rpdl_SCI_rx_string_pointer[5] = SCI5.RDR; /* Increment the character counter */ rpdl_SCI_rx_counter[5]++; /* More characters expected? */ if (rpdl_SCI_rx_counter[5] < rpdl_SCI_rx_threshold[5]) { /* Move to the next location in the buffer */ rpdl_SCI_rx_string_pointer[5]++; } else { /* Disable interrupt request generation, and try to disable reception */ SCI5.SCR.BYTE &= (uint8_t)(INV_BIT_6 & INV_BIT_4); /* Async MP mode ? */ if (((SCI5.SMR.BIT.MP) != 0) && ((rpdl_SCI_MP_mode[5]) != 0)) { /* Set MPIE = 1 (multiprocessor mode reception) */ SCI5.SCR.BIT.MPIE = 1; } /* Notify the user */ if (rpdl_SCI_RX_End_callback_func[5] != PDL_NO_FUNC) { rpdl_SCI_RX_End_callback_func[5](); } } } /* Either the DMAC or DTC has passed on the interrupt */ else { /* Call the callback function */ if (rpdl_SCI_RX_End_callback_func[5] != PDL_NO_FUNC) { rpdl_SCI_RX_End_callback_func[5](); } } } #if FAST_INTC_VECTOR == VECT_SCI6_RXI6 __fast_interrupt void Interrupt_SCI6_RXI6(void) #else #pragma vector = VECT_SCI6_RXI6 __interrupt void Interrupt_SCI6_RXI6(void) #endif { /* Ok to process the data? */ if (rpdl_SCI_rx_using_irq[6] == true) { /* check if ID reception in Multiprocessor mode */ if (rpdl_SCI_MP_mode[6] == 2) { /* check if ID cycle ? */ if (SCI6.SSR.BIT.MPB == 1) { uint8_t id; /* Read the ID */ id = SCI6.RDR; /* ID matching ? */ if (id != rpdl_SCI_MP_rx_stationID[6]) { /* ID does not match */ /* MPIE = 1 */ SCI6.SCR.BIT.MPIE = 1; } else { /* ID matches */ /* Disable interrupt request generation, and try to disable reception */ SCI6.SCR.BYTE &= (uint8_t)(INV_BIT_6 & INV_BIT_4); /* Notify the user */ if (rpdl_SCI_RX_End_callback_func[6] != PDL_NO_FUNC) { rpdl_SCI_RX_End_callback_func[6](); } } /* Exit ISR */ return; } } /* Read and store the character */ *rpdl_SCI_rx_string_pointer[6] = SCI6.RDR; /* Increment the character counter */ rpdl_SCI_rx_counter[6]++; /* More characters expected? */ if (rpdl_SCI_rx_counter[6] < rpdl_SCI_rx_threshold[6]) { /* Move to the next location in the buffer */ rpdl_SCI_rx_string_pointer[6]++; } else { /* Disable interrupt request generation, and try to disable reception */ SCI6.SCR.BYTE &= (uint8_t)(INV_BIT_6 & INV_BIT_4); /* Async MP mode ? */ if (((SCI6.SMR.BIT.MP) != 0) && ((rpdl_SCI_MP_mode[6]) != 0)) { /* Set MPIE = 1 (multiprocessor mode reception) */ SCI6.SCR.BIT.MPIE = 1; } /* Notify the user */ if (rpdl_SCI_RX_End_callback_func[6] != PDL_NO_FUNC) { rpdl_SCI_RX_End_callback_func[6](); } } } /* Either the DMAC or DTC has passed on the interrupt */ else { /* Call the callback function */ if (rpdl_SCI_RX_End_callback_func[6] != PDL_NO_FUNC) { rpdl_SCI_RX_End_callback_func[6](); } } } /*""FUNC COMMENT""*************************************************** * Module outline: SCIn transmit data interrupt processing *------------------------------------------------------------------- * Declaration : void Interrupt_SCIn_TXIn(void) *------------------------------------------------------------------- * Function : *------------------------------------------------------------------- * Argument : Nothing *------------------------------------------------------------------- * Return value : Nothing *------------------------------------------------------------------- * Output : TDR for that SCI channel *------------------------------------------------------------------- * Use function : *------------------------------------------------------------------- * Notes : *------------------------------------------------------------------- * History : 2011.04.08 * : Ver 1.02 * : CS-5 release. *""FUNC COMMENT END""**********************************************/ #if FAST_INTC_VECTOR == VECT_SCI0_TXI0 __fast_interrupt void Interrupt_SCI0_TXI0(void) #else #pragma vector = VECT_SCI0_TXI0 __interrupt void Interrupt_SCI0_TXI0(void) #endif { /* Ok to process the string? */ if (rpdl_SCI_tx_using_irq[0] == true) { /* Another character to be sent? */ if ( /* Sending a string and next character is not a Null? */ ((rpdl_SCI_tx_threshold[0] == 0) && (*rpdl_SCI_tx_string_pointer[0] != NULL)) || /* Not reached the byte total? */ (rpdl_SCI_tx_counter[0] < rpdl_SCI_tx_threshold[0]) ) { /* Send the character */ SCI0.TDR = *rpdl_SCI_tx_string_pointer[0]; /* Increment the pointer */ rpdl_SCI_tx_string_pointer[0]++; /* Increment the counter */ rpdl_SCI_tx_counter[0]++; } else { /* Disable interrupt request generation */ SCI0.SCR.BIT.TIE = 0; /* Smart card mode? */ if (SCI0.SCMR.BIT.SMIF == 1) { /* Disable transmission and interrupt request generation */ SCI0.SCR.BYTE &= (uint8_t)(INV_BIT_7 & INV_BIT_5); /* Call the callback function */ if (rpdl_SCI_TX_End_callback_func[0] != PDL_NO_FUNC) { rpdl_SCI_TX_End_callback_func[0](); } } } } /* Either the DMAC or DTC has passed on the interrupt */ else { /* Call the callback function */ if (rpdl_SCI_TX_End_callback_func[0] != PDL_NO_FUNC) { rpdl_SCI_TX_End_callback_func[0](); } } } #if FAST_INTC_VECTOR == VECT_SCI1_TXI1 __fast_interrupt void Interrupt_SCI1_TXI1(void) #else #pragma vector = VECT_SCI1_TXI1 __interrupt void Interrupt_SCI1_TXI1(void) #endif { /* Ok to process the string? */ if (rpdl_SCI_tx_using_irq[1] == true) { /* Another character to be sent? */ if ( /* Sending a string and next character is not a Null? */ ((rpdl_SCI_tx_threshold[1] == 0) && (*rpdl_SCI_tx_string_pointer[1] != NULL)) || /* Not reached the byte total? */ (rpdl_SCI_tx_counter[1] < rpdl_SCI_tx_threshold[1]) ) { /* Send the character */ SCI1.TDR = *rpdl_SCI_tx_string_pointer[1]; /* Increment the pointer */ rpdl_SCI_tx_string_pointer[1]++; /* Increment the counter */ rpdl_SCI_tx_counter[1]++; } else { /* Disable interrupt request generation */ SCI1.SCR.BIT.TIE = 0; /* Smart card mode? */ if (SCI1.SCMR.BIT.SMIF == 1) { /* Disable transmission and interrupt request generation */ SCI1.SCR.BYTE &= (uint8_t)(INV_BIT_7 & INV_BIT_5); /* Call the callback function */ if (rpdl_SCI_TX_End_callback_func[1] != PDL_NO_FUNC) { rpdl_SCI_TX_End_callback_func[1](); } } } } /* Either the DMAC or DTC has passed on the interrupt */ else { /* Call the callback function */ if (rpdl_SCI_TX_End_callback_func[1] != PDL_NO_FUNC) { rpdl_SCI_TX_End_callback_func[1](); } } } #if FAST_INTC_VECTOR == VECT_SCI2_TXI2 __fast_interrupt void Interrupt_SCI2_TXI2(void) #else #pragma vector = VECT_SCI2_TXI2 __interrupt void Interrupt_SCI2_TXI2(void) #endif { /* Ok to process the string? */ if (rpdl_SCI_tx_using_irq[2] == true) { /* Another character to be sent? */ if ( /* Sending a string and next character is not a Null? */ ((rpdl_SCI_tx_threshold[2] == 0) && (*rpdl_SCI_tx_string_pointer[2] != NULL)) || /* Not reached the byte total? */ (rpdl_SCI_tx_counter[2] < rpdl_SCI_tx_threshold[2]) ) { /* Send the character */ SCI2.TDR = *rpdl_SCI_tx_string_pointer[2]; /* Increment the pointer */ rpdl_SCI_tx_string_pointer[2]++; /* Increment the counter */ rpdl_SCI_tx_counter[2]++; } else { /* Disable interrupt request generation */ SCI2.SCR.BIT.TIE = 0; /* Smart card mode? */ if (SCI2.SCMR.BIT.SMIF == 1) { /* Disable transmission and interrupt request generation */ SCI2.SCR.BYTE &= (uint8_t)(INV_BIT_7 & INV_BIT_5); /* Call the callback function */ if (rpdl_SCI_TX_End_callback_func[2] != PDL_NO_FUNC) { rpdl_SCI_TX_End_callback_func[2](); } } } } /* Either the DMAC or DTC has passed on the interrupt */ else { /* Call the callback function */ if (rpdl_SCI_TX_End_callback_func[2] != PDL_NO_FUNC) { rpdl_SCI_TX_End_callback_func[2](); } } } #if FAST_INTC_VECTOR == VECT_SCI3_TXI3 __fast_interrupt void Interrupt_SCI3_TXI3(void) #else #pragma vector = VECT_SCI3_TXI3 __interrupt void Interrupt_SCI3_TXI3(void) #endif { /* Ok to process the string? */ if (rpdl_SCI_tx_using_irq[3] == true) { /* Another character to be sent? */ if ( /* Sending a string and next character is not a Null? */ ((rpdl_SCI_tx_threshold[3] == 0) && (*rpdl_SCI_tx_string_pointer[3] != NULL)) || /* Not reached the byte total? */ (rpdl_SCI_tx_counter[3] < rpdl_SCI_tx_threshold[3]) ) { /* Send the character */ SCI3.TDR = *rpdl_SCI_tx_string_pointer[3]; /* Increment the pointer */ rpdl_SCI_tx_string_pointer[3]++; /* Increment the counter */ rpdl_SCI_tx_counter[3]++; } else { /* Disable interrupt request generation */ SCI3.SCR.BIT.TIE = 0; /* Smart card mode? */ if (SCI3.SCMR.BIT.SMIF == 1) { /* Disable transmission and interrupt request generation */ SCI3.SCR.BYTE &= (uint8_t)(INV_BIT_7 & INV_BIT_5); /* Call the callback function */ if (rpdl_SCI_TX_End_callback_func[3] != PDL_NO_FUNC) { rpdl_SCI_TX_End_callback_func[3](); } } } } /* Either the DMAC or DTC has passed on the interrupt */ else { /* Call the callback function */ if (rpdl_SCI_TX_End_callback_func[3] != PDL_NO_FUNC) { rpdl_SCI_TX_End_callback_func[3](); } } } #if FAST_INTC_VECTOR == VECT_SCI5_TXI5 __fast_interrupt void Interrupt_SCI5_TXI5(void) #else #pragma vector = VECT_SCI5_TXI5 __interrupt void Interrupt_SCI5_TXI5(void) #endif { /* Ok to process the string? */ if (rpdl_SCI_tx_using_irq[5] == true) { /* Another character to be sent? */ if ( /* Sending a string and next character is not a Null? */ ((rpdl_SCI_tx_threshold[5] == 0) && (*rpdl_SCI_tx_string_pointer[5] != NULL)) || /* Not reached the byte total? */ (rpdl_SCI_tx_counter[5] < rpdl_SCI_tx_threshold[5]) ) { /* Send the character */ SCI5.TDR = *rpdl_SCI_tx_string_pointer[5]; /* Increment the pointer */ rpdl_SCI_tx_string_pointer[5]++; /* Increment the counter */ rpdl_SCI_tx_counter[5]++; } else { /* Disable interrupt request generation */ SCI5.SCR.BIT.TIE = 0; /* Smart card mode? */ if (SCI5.SCMR.BIT.SMIF == 1) { /* Disable transmission and interrupt request generation */ SCI5.SCR.BYTE &= (uint8_t)(INV_BIT_7 & INV_BIT_5); /* Call the callback function */ if (rpdl_SCI_TX_End_callback_func[5] != PDL_NO_FUNC) { rpdl_SCI_TX_End_callback_func[5](); } } } } /* Either the DMAC or DTC has passed on the interrupt */ else { /* Call the callback function */ if (rpdl_SCI_TX_End_callback_func[5] != PDL_NO_FUNC) { rpdl_SCI_TX_End_callback_func[5](); } } } #if FAST_INTC_VECTOR == VECT_SCI6_TXI6 __fast_interrupt void Interrupt_SCI6_TXI6(void) #else #pragma vector = VECT_SCI6_TXI6 __interrupt void Interrupt_SCI6_TXI6(void) #endif { /* Ok to process the string? */ if (rpdl_SCI_tx_using_irq[6] == true) { /* Another character to be sent? */ if ( /* Sending a string and next character is not a Null? */ ((rpdl_SCI_tx_threshold[6] == 0) && (*rpdl_SCI_tx_string_pointer[6] != NULL)) || /* Not reached the byte total? */ (rpdl_SCI_tx_counter[6] < rpdl_SCI_tx_threshold[6]) ) { /* Send the character */ SCI6.TDR = *rpdl_SCI_tx_string_pointer[6]; /* Increment the pointer */ rpdl_SCI_tx_string_pointer[6]++; /* Increment the counter */ rpdl_SCI_tx_counter[6]++; } else { /* Disable interrupt request generation */ SCI6.SCR.BIT.TIE = 0; /* Smart card mode? */ if (SCI6.SCMR.BIT.SMIF == 1) { /* Disable transmission and interrupt request generation */ SCI6.SCR.BYTE &= (uint8_t)(INV_BIT_7 & INV_BIT_5); /* Call the callback function */ if (rpdl_SCI_TX_End_callback_func[6] != PDL_NO_FUNC) { rpdl_SCI_TX_End_callback_func[6](); } } } } /* Either the DMAC or DTC has passed on the interrupt */ else { /* Call the callback function */ if (rpdl_SCI_TX_End_callback_func[6] != PDL_NO_FUNC) { rpdl_SCI_TX_End_callback_func[6](); } } } /*""FUNC COMMENT""*************************************************** * Module outline: SCIn transmit end interrupt processing *------------------------------------------------------------------- * Declaration : void Interrupt_SCIn_TEIn(void) *------------------------------------------------------------------- * Function : *------------------------------------------------------------------- * Argument : Nothing *------------------------------------------------------------------- * Return value : Nothing *------------------------------------------------------------------- * Output : SCR for that channel *------------------------------------------------------------------- * Use function : rpdl_SCI_TX_End_callback_func() *------------------------------------------------------------------- * Notes : *------------------------------------------------------------------- * History : 2011.04.08 * : Ver 1.02 * : CS-5 release. *""FUNC COMMENT END""**********************************************/ #if FAST_INTC_VECTOR == VECT_SCI0_TEI0 __fast_interrupt void Interrupt_SCI0_TEI0(void) #else #pragma vector = VECT_SCI0_TEI0 __interrupt void Interrupt_SCI0_TEI0(void) #endif { /* Disable interrupt request generation, and try to disable transmission */ SCI0.SCR.BYTE &= (uint8_t)(INV_BIT_7 & INV_BIT_5 & INV_BIT_2); /* Call the callback function */ if (rpdl_SCI_TX_End_callback_func[0] != PDL_NO_FUNC) { rpdl_SCI_TX_End_callback_func[0](); } } #if FAST_INTC_VECTOR == VECT_SCI1_TEI1 __fast_interrupt void Interrupt_SCI1_TEI1(void) #else #pragma vector = VECT_SCI1_TEI1 __interrupt void Interrupt_SCI1_TEI1(void) #endif { /* Disable interrupt request generation, and try to disable transmission */ SCI1.SCR.BYTE &= (uint8_t)(INV_BIT_7 & INV_BIT_5 & INV_BIT_2); /* Call the callback function */ if (rpdl_SCI_TX_End_callback_func[1] != PDL_NO_FUNC) { rpdl_SCI_TX_End_callback_func[1](); } } #if FAST_INTC_VECTOR == VECT_SCI2_TEI2 __fast_interrupt void Interrupt_SCI2_TEI2(void) #else #pragma vector = VECT_SCI2_TEI2 __interrupt void Interrupt_SCI2_TEI2(void) #endif { /* Disable interrupt request generation, and try to disable transmission */ SCI2.SCR.BYTE &= (uint8_t)(INV_BIT_7 & INV_BIT_5 & INV_BIT_2); /* Call the callback function */ if (rpdl_SCI_TX_End_callback_func[2] != PDL_NO_FUNC) { rpdl_SCI_TX_End_callback_func[2](); } } #if FAST_INTC_VECTOR == VECT_SCI3_TEI3 __fast_interrupt void Interrupt_SCI3_TEI3(void) #else #pragma vector = VECT_SCI3_TEI3 __interrupt void Interrupt_SCI3_TEI3(void) #endif { /* Disable interrupt request generation, and try to disable transmission */ SCI3.SCR.BYTE &= (uint8_t)(INV_BIT_7 & INV_BIT_5 & INV_BIT_2); /* Call the callback function */ if (rpdl_SCI_TX_End_callback_func[3] != PDL_NO_FUNC) { rpdl_SCI_TX_End_callback_func[3](); } } #if FAST_INTC_VECTOR == VECT_SCI5_TEI5 __fast_interrupt void Interrupt_SCI5_TEI5(void) #else #pragma vector = VECT_SCI5_TEI5 __interrupt void Interrupt_SCI5_TEI5(void) #endif { /* Disable interrupt request generation, and try to disable transmission */ SCI5.SCR.BYTE &= (uint8_t)(INV_BIT_7 & INV_BIT_5 & INV_BIT_2); /* Call the callback function */ if (rpdl_SCI_TX_End_callback_func[5] != PDL_NO_FUNC) { rpdl_SCI_TX_End_callback_func[5](); } } #if FAST_INTC_VECTOR == VECT_SCI6_TEI6 __fast_interrupt void Interrupt_SCI6_TEI6(void) #else #pragma vector = VECT_SCI6_TEI6 __interrupt void Interrupt_SCI6_TEI6(void) #endif { /* Disable interrupt request generation, and try to disable transmission */ SCI6.SCR.BYTE &= (uint8_t)(INV_BIT_7 & INV_BIT_5 & INV_BIT_2); /* Call the callback function */ if (rpdl_SCI_TX_End_callback_func[6] != PDL_NO_FUNC) { rpdl_SCI_TX_End_callback_func[6](); } } /* End of file */
VernamRD/StoryGraph
Plugins/StoryGraphPlugin/Source/StoryGraphPluginRuntime/Public/StoryGraphBlueprint.h
<filename>Plugins/StoryGraphPlugin/Source/StoryGraphPluginRuntime/Public/StoryGraphBlueprint.h #pragma once #include "CoreMinimal.h" #include "Engine/Blueprint.h" #include "StoryGraphBlueprint.generated.h" class UEdGraph_StoryGraph; UCLASS() class STORYGRAPHPLUGINRUNTIME_API UStoryGraphBlueprint : public UBlueprint { GENERATED_BODY() public: UPROPERTY() class UStoryGraph* StoryGraph; UPROPERTY() TArray<UEdGraph_StoryGraph*> Graphs; public: UEdGraph_StoryGraph* FindGraph(UObject* GraphOwner); void AddGraph(UEdGraph_StoryGraph* Graph) { Graphs.Add(Graph); } void RemoveGraph(UEdGraph_StoryGraph* Graph) { Graphs.RemoveSingle(Graph); } };
grizim/oe-cloud
test/switch-data-source-test.js
/** * * ©2016-2017 EdgeVerve Systems Limited (a fully owned Infosys subsidiary), * Bangalore, India. All Rights Reserved. * */ /* jshint -W024 */ /* jshint expr:true */ //to avoid jshint errors for expect var bootstrap = require('./bootstrap'); var chalk = require('chalk'); var async = require('async') var loopback = require('loopback'); var log = require('oe-logger')('switch-data-source-test'); var chai = require('chai'); var expect = chai.expect; chai.use(require('chai-things')); var events = require('events'); var eventEmitter = new events.EventEmitter(); var mongoHost = process.env.MONGO_HOST || 'localhost'; describe(chalk.blue('switch-data-source-test'), function () { this.timeout(60000); var models = [ { name: 'model1', base: 'BaseEntity', properties: { 'name': { 'type': 'string', }, 'description': { 'type': 'string', } }, dataSourceName: 'db' }, { name: 'model2', base: 'BaseEntity', properties: { 'name': { 'type': 'string', }, 'description': { 'type': 'string', } }, dataSourceName: 'db' }, { name: 'model3', base: 'BaseEntity', properties: { 'name': { 'type': 'string', }, 'description': { 'type': 'string', } }, dataSourceName: 'db' }, { name: 'model4', base: 'BaseEntity', properties: { 'name': { 'type': 'string', }, 'description': { 'type': 'string', } }, dataSourceName: 'db' }, { name: 'model5', base: 'BaseEntity', properties: { 'name': { 'type': 'string', }, 'description': { 'type': 'string', } }, dataSourceName: 'db' }, { name: 'model9', base: 'BaseEntity', properties: { 'name': { 'type': 'string', }, 'description': { 'type': 'string', } }, dataSourceName: 'emailDs' } ]; var datasources = [ { 'host': mongoHost, 'port': 27017, 'url': 'mongodb://' + mongoHost + ':27017/tenant1a', 'database': 'tenant1a', 'password': '<PASSWORD>', 'name': 'tenant1a', 'connector': 'mongodb', 'user': 'admin', 'id': 'ds-tenant1a', 'description': 'tenant1a', 'connectionTimeout': 50000 }, { 'host': mongoHost, 'port': 27017, 'url': 'mongodb://' + mongoHost + ':27017/tenant2a', 'database': 'tenant2a', 'password': '<PASSWORD>', 'name': 'tenant2a', 'connector': 'mongodb', 'user': 'admin', 'id': 'ds-tenant2a', 'description': 'tenant2a', 'connectionTimeout': 50000 }, { 'host': mongoHost, 'port': 27017, 'url': 'mongodb://' + mongoHost + ':27017/commondb', 'database': 'commondb', 'password': '<PASSWORD>', 'name': 'commondb', 'connector': 'mongodb', 'user': 'admin', 'description': 'accountsModule', 'id': 'ds-commondb', 'connectionTimeout': 50000 }, { 'host': mongoHost, 'port': 27017, 'url': 'mongodb://' + mongoHost + ':27017/fxdb', 'database': 'fxdb', 'name': 'fxdb', 'connector': 'mongodb', 'user': 'admin', 'description': 'fxdb', 'id': 'ds-fxdb', 'connectionTimeout': 50000 }, { 'host': mongoHost, 'port': 27017, 'url': 'mongodb://' + mongoHost + ':27017/superdb', 'database': 'superdb', 'name': 'superdb', 'connector': 'mongodb', 'user': 'admin', 'description': 'fxdb', 'id': 'ds-superdb', 'connectionTimeout': 50000 } ]; var tenant1Scope = { ignoreAutoScope: false, ctx: { tenantId: 'tenant1' } }; var tenant2Scope = { ignoreAutoScope: false, ctx: { tenantId: 'tenant2' } }; var mappingsForTenant1 = [ { modelName: 'model1', dataSourceName: 'tenant1a', }, { modelName: 'model2', dataSourceName: 'commondb', }, { modelName: 'model3', dataSourceName: 'fxdb', 'scope': { 'department': 'fx' } }, { modelName: 'model1', dataSourceName: 'superdb', priority: 10, 'scope': { 'superpower': true } }, ]; var mappingsForTenant2 = [ { modelName: 'model1', dataSourceName: 'tenant2a', }, { modelName: 'model2', dataSourceName: 'commondb', }, { modelName: 'model3', dataSourceName: 'fxdb', 'scope': { 'department': 'fx' } }, { modelName: 'model1', dataSourceName: 'superdb', priority: 10, 'scope': { 'superpower': true } }, ]; var ModelDefinition = bootstrap.models.ModelDefinition; var DataSourceDefinition = bootstrap.models.DataSourceDefinition; var DataSourceMapping = bootstrap.models.DataSourceMapping; var cleanup = function (done) { async.series([function (cb) { var model = bootstrap.models['DataSourceDefinition']; if (model) { var options = { ctx: {} }; options.ignoreAutoScope = true; options.fetchAllScopes = true; model.remove({}, options, function () { cb(); }); } else { cb(); } }, function (cb) { var model = bootstrap.models['DataSourceMapping']; if (model) { model.destroyAll({}, tenant1Scope, function () { cb(); }); } else { cb(); } }, function (cb) { var model = bootstrap.models['DataSourceMapping']; if (model) { model.destroyAll({}, tenant2Scope, function () { cb(); }); } else { cb(); } }, function (cb) { var options = { ctx: {} }; options.fetchAllScopes = true; ModelDefinition.remove({ 'where': { 'name': { inq: ['model1', 'model2', 'model3', 'model4', 'model5'] } } }, options, function () { cb(); }); }, function () { done(); }]); }; before('setup datasources', function (done) { eventEmitter.setMaxListeners(100); var callContext = bootstrap.defaultContext; callContext.ignoreAutoScope = true; async.series([function (cb) { cleanup(cb); }, function (cb) { async.each(datasources, function (ds, callback) { DataSourceDefinition.findById(ds.id, callContext, function (err, res) { if (err) { log.error(log.defaultContext(), 'error in datasource find', err); return callback(err); } if (!res) { DataSourceDefinition.create(ds, callContext, function (err, res) { if (err) { log.error(log.defaultContext(), 'error in datasource find', err); return callback(err); } callback(); }); } else { log.debug(log.defaultContext(), 'data source exists ', ds.name, ds.database); callback(); } }); }, function (err) { cb(); }); }, function (cb) { Object.keys(bootstrap.app.datasources).forEach(function (dsname) { log.debug(log.defaultContext(), dsname); }); ModelDefinition.create(models, bootstrap.defaultContext, function (err, res) { if (err) { log.debug(log.defaultContext(), 'unable to create model'); cb(); } else { cb(); } }); }, function (cb) { DataSourceMapping.create(mappingsForTenant1, tenant1Scope, function (err, res) { if (err) { cb(err); } else { cb(); } }); }, function (cb) { DataSourceMapping.create(mappingsForTenant2, tenant2Scope, function (err, res) { if (err) { cb(err); } else { cb(); } }); }, function (cb) { Object.keys(bootstrap.app.datasources).forEach(function iter(id) { log.debug(log.defaultContext(), id, bootstrap.app.datasources[id].settings); }); cb(); }, function () { done(); }]); }); it('tenant1 and model 1 ', function (done) { var model = loopback.getModel('model1', bootstrap.defaultContext); var ds = model.getDataSource(tenant1Scope); expect(ds).not.to.be.null; expect(ds.settings.database).to.equal('tenant1a'); done(); }); it('tenant2 and model 1 ', function (done) { var model = loopback.getModel('model1', bootstrap.defaultContext); var ds = model.getDataSource(tenant2Scope); expect(ds).not.to.be.null; expect(ds.settings.database).to.equal('tenant2a'); done(); }); it('model2 tenant1 commondb ', function (done) { var model = loopback.getModel('model2', bootstrap.defaultContext); var ds = model.getDataSource(tenant1Scope); expect(ds.settings.database).to.equal('commondb'); done(); }); it('model2 tenant2 commondb ', function (done) { var model = loopback.getModel('model2', bootstrap.defaultContext); var ds = model.getDataSource(tenant2Scope); expect(ds.settings.database).to.equal('commondb'); done(); }); it('department for tenant 1 ', function (done) { var callContext = {}; callContext.ctx = { tenantId: 'tenant1', department: 'fx' }; var model = loopback.getModel('model3', bootstrap.defaultContext); var ds = model.getDataSource(callContext); expect(ds.settings.database).to.equal('fxdb'); done(); }); it('superdb higher priority ', function (done) { var callContext = {}; callContext.ctx = { superpower: true, tenantId: 'tenant1', department: 'fx' }; var model = loopback.getModel('model1', bootstrap.defaultContext); var ds = model.getDataSource(callContext); expect(ds.settings.database).to.equal('superdb'); done(); }); after('after clean up', function (done) { cleanup(function () { done(); }); }); });
Sun-Joong/aifh
vol1/scala-examples/src/main/scala/com/heatonresearch/aifh/examples/error/package.scala
<reponame>Sun-Joong/aifh<gh_stars>100-1000 /** * Examples from Chapter 6: Error Calculation */ package com.heatonresearch.aifh.examples.error
chaixiang2002/code-works
java/e7_1/e1_1.java
<reponame>chaixiang2002/code-works class Fu { public void show() { System.out.println("fu show"); } public void method()//修改后!!!没这个不行 { System.out.println("fu method"); } } class Zi extends Fu { public void show() { System.out.println("zi show"); } public void method() { System.out.println("zi method"); } } class e1_1 { public static void main(String[] args) { Fu f = new Zi(); f.method(); } } // ???????????????????????????????????????????? // e1_1.java:26: 错误: 找不到符号 // f.method(); // ^ // 符号: 方法 method() // 位置: 类型为Fu的变量 f // 1 个错误
Karamays/karamay_engine
_KaramayEngine/karamay_engine_graphics_unit_cmake/karamay_engine_graphics_unit/source/framework/avatars/ship_machine.cpp
<reponame>Karamays/karamay_engine<gh_stars>0 #include "ship_machine.h" ship_machine::ship_machine(world* owner, const std::string& name) : avatar(owner, name) { }
shaikatz/tweek
e2e/ui/pages/Context/FixedKey.js
import { Selector, t } from 'testcafe'; import { dataComp, dataField } from '../../utils/selector-utils'; export default class FixedKey { constructor(key, type = 'string') { this.container = Selector(dataComp('fixed-key')).withAttribute('data-fixed-key', key); this.deleteButton = this.container.find(dataComp('delete-fixed-key')); this.valueInput = this.container.find(dataField('value')); if (type) { this.valueInput = this.valueInput.withAttribute('data-value-type', type.toLowerCase()); } } async update(value) { value = value.toString(); await t .expect(this.valueInput.disabled) .notOk() .typeText(this.valueInput, value, { replace: true }); } }
6923403/Python_Demo
test/add_end.py
def add_end(L = []): L.append('END') return L
loivp13/Interval-App
client/src/features/Page_create/components/EditableTime.styles.js
import classNames from "classnames"; export default { timeValues: classNames("h-1/3", "scroll-child-start"), input: ({ colType }) => { return classNames( "bg-th-primary", "focus:bg-opacity-25", "focus:bg-white", "focus:outline-none", "font-bold", "h-full", "placeholder-gray-300", "pointer-events-none", "text-center", "w-full", colType ); }, timeCol: classNames( "cursor-pointer", "font-bold", "h-full", "hide-scrollbar ", "overflow-scroll", "scroll-mandatory-y", "text-5xl", "md:text-9xl", "text-center", "text-th-white", "w-2/5" ), };
SouzaVitoria/WhatsBusiness
src/actions/AppActions.js
<reponame>SouzaVitoria/WhatsBusiness import { MODIFICA_ADICIONA_CONTATO_EMAIL, ADICIONA_CONTATO_ERRO, ADICIONA_CONTATO_SUCESSO, lISTA_CONTATO_USER, MODIFICA_MENSAGEM, LISTA_CONVERSA_USUARIO, ENVIA_MENSAGEM_SUCESSO, LISTA_CONVERSAS_USUARIO } from '../actions/Types'; import firebase from 'firebase'; import b64 from 'base-64'; import _ from 'lodash'; export const modificaAdicionaContatoEmail = textoModificaAdicionaContatoEmail => { return { type: MODIFICA_ADICIONA_CONTATO_EMAIL, payload: textoModificaAdicionaContatoEmail } } export const adicionaContato = email => { return dispatch => { let emailB64 = b64.encode(email); firebase.database().ref(`/contatos/${emailB64}`) .once('value') .then(snapshot => { if (snapshot.val()) { const dadosUser = _.first(_.values(snapshot.val())); const { currentUser } = firebase.auth(); let emailUsuarioB64 = b64.encode(currentUser.email); firebase.database().ref(`/usuario_contatos/${emailUsuarioB64}`) .push({ email, name: dadosUser.name }) .then(() => adicionaContatoSucesso(dispatch)) .catch(erro => adicionaContatoErro(erro.message, dispatch)) } else { //if(snapshot.val() == ){ dispatch({ type: ADICIONA_CONTATO_ERRO, payload: 'Esse usuário já esta na sua lista de contatos!' }); //} dispatch({ type: ADICIONA_CONTATO_ERRO, payload: 'Nenhum usuário com este e-mail. Tente Novamente!' }); } }) } } const adicionaContatoErro = (erro, dispatch) => ( dispatch({ type: ADICIONA_CONTATO_ERRO, payload: 'Nenhum usuário com este e-mail. Tente Novamente!' }) ) const adicionaContatoSucesso = dispatch => ( dispatch({ type: ADICIONA_CONTATO_SUCESSO, payload: true }) ) export const habilitaInclusaoContato = () => ({ type: ADICIONA_CONTATO_SUCESSO, payload: false }) export const contatosUsersFetch = () => { const { currentUser } = firebase.auth(); return (dispatch) => { let emailUsuarioB64 = b64.encode(currentUser.email); firebase.database().ref(`/usuario_contatos/${emailUsuarioB64}`) .on('value', snapshot => { dispatch({ type: lISTA_CONTATO_USER, payload: snapshot.val() }) }) } } export const modificaMensagem = texto => { return ({ type: MODIFICA_MENSAGEM, payload: texto }) } export const enviarMensagem = (mensagem, contatoNome, contatoEmail) => { const { currentUser } = firebase.auth(); return dispatch => { const usuarioEmailB64 = b64.encode(currentUser.email); const contatoEmailB64 = b64.encode(contatoEmail); firebase.database().ref(`/mensagens/${usuarioEmailB64}/${contatoEmailB64}`) .push({ mensagem, tipo: 'enviado' }) .then(() => { firebase.database().ref(`/mensagens/${contatoEmailB64}/${usuarioEmailB64}`) .push({ mensagem, tipo: 'recebido' }) .then(() => dispatch({ type: ENVIA_MENSAGEM_SUCESSO })) }) .then(() => { firebase.database().ref(`/usuario_conversas/${usuarioEmailB64}/${contatoEmailB64}`) .set({ nome: contatoNome, email: contatoEmail }) .then(() => { firebase.database().ref(`/contatos/${usuarioEmailB64}`) .once("value") .then(snapshot => { const dadosUsuario = _.first(_.values(snapshot.val())); firebase.database().ref(`/usuario_conversas/${contatoEmailB64}/${usuarioEmailB64}`) .set({ nome: dadosUsuario.name, email: usuarioEmail }) }) }) }) } } export const conversaUsuarioFetch = contatoEmail => { const { currentUser } = firebase.auth(); let contatoEmailB64 = b64.encode(contatoEmail); let usuarioEmailB64 = b64.encode(currentUser.email); return dispatch => { firebase.database().ref(`/mensagens/${usuarioEmailB64}/${contatoEmailB64}`) .on("value", snapshot => { dispatch({ type: LISTA_CONVERSA_USUARIO, payload: snapshot.val() }) }) } } export const conversasUsuarioFetch = () => { const { currentUser } = firebase.auth(); return dispatch => { let usuarioEmailB64 = b64.encode(currentUser.email); firebase.database().ref(`/usuario_conversas/${usuarioEmailB64}`) .on("value", snapshot => { dispatch({ type: LISTA_CONVERSAS_USUARIO, payload: snapshot.val() }) }) } }
crici/gradle-native
subprojects/internal-testing/src/main/java/dev/nokee/internal/testing/ConfigurationMatchers.java
<filename>subprojects/internal-testing/src/main/java/dev/nokee/internal/testing/ConfigurationMatchers.java /* * Copyright 2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package dev.nokee.internal.testing; import com.google.common.collect.ImmutableMap; import lombok.val; import org.gradle.api.Project; import org.gradle.api.artifacts.*; import org.gradle.api.attributes.Attribute; import org.gradle.api.attributes.AttributeContainer; import org.gradle.api.attributes.HasAttributes; import org.hamcrest.Description; import org.hamcrest.FeatureMatcher; import org.hamcrest.Matcher; import org.hamcrest.TypeSafeMatcher; import javax.annotation.Nullable; import java.io.File; import java.util.AbstractMap; import java.util.Map; import static java.util.Objects.requireNonNull; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.*; public final class ConfigurationMatchers { private ConfigurationMatchers() {} /** * Matches a configuration of the project. * * @param matcher a configuration matcher, must not be null * @return a project matcher for its configurations, never null */ public static Matcher<Project> hasConfiguration(Matcher<? super Configuration> matcher) { return configurations(hasItem(requireNonNull(matcher))); } public static Matcher<Project> configurations(Matcher<? super Iterable<Configuration>> matcher) { return new FeatureMatcher<Project, Iterable<Configuration>>(requireNonNull(matcher), "project's configurations is", "project's configurations") { @Override protected Iterable<Configuration> featureValueOf(Project actual) { return actual.getConfigurations(); } }; } /** * Matches a publish artifact of a configuration. * * @param matcher the publish artifact matcher, must not be null * @return a configuration matcher for its publish artifact, never null */ public static Matcher<Object> hasPublishArtifact(Matcher<? super PublishArtifact> matcher) { return new FeatureMatcher<Object, PublishArtifactSet>(hasItem(requireNonNull(matcher)), "a configuration with", "configuration") { @Override protected PublishArtifactSet featureValueOf(Object actual) { assertThat(actual, anyOf(isA(Configuration.class), isA(ConfigurationVariant.class))); if (actual instanceof Configuration) { return ((Configuration) actual).getArtifacts(); } else if (actual instanceof ConfigurationVariant) { return ((ConfigurationVariant) actual).getArtifacts(); } throw new UnsupportedOperationException(); } }; } /** * Matches a publish artifact by file. * * @param file the file of the artifact, must not be null * @return a publish artifact matcher for its file, never null */ public static Matcher<PublishArtifact> ofFile(File file) { return ofFile(equalTo(requireNonNull(file))); } public static Matcher<PublishArtifact> ofFile(Matcher<? super File> matcher) { return new FeatureMatcher<PublishArtifact, File>(requireNonNull(matcher), "a publish artifact with file", "publish artifact's file") { @Override protected File featureValueOf(PublishArtifact actual) { return actual.getFile(); } }; } public static Matcher<PublishArtifact> ofClassifier(String classifier) { return ofClassifier(equalTo(requireNonNull(classifier))); } public static Matcher<PublishArtifact> ofClassifier(Matcher<? super String> matcher) { return new FeatureMatcher<PublishArtifact, String>(requireNonNull(matcher), "a publish artifact with classifier", "publish artifact's classifier") { @Nullable @Override protected String featureValueOf(PublishArtifact actual) { return actual.getClassifier(); } }; } public static Matcher<PublishArtifact> ofType(String type) { return new FeatureMatcher<PublishArtifact, String>(equalTo(requireNonNull(type)), "a publish artifact with type", "publish artifact's type") { @Override protected String featureValueOf(PublishArtifact actual) { return actual.getType(); } }; } public static Matcher<Configuration> hasOutgoingVariant(Matcher<? super ConfigurationVariant> matcher) { return outgoingVariants(hasItem(matcher)); } /** * Matches outgoing variants of the configuration. * * @param matcher an outgoing variant matcher, must not be null * @return a configuration matcher for its outgoing variants, never null */ public static Matcher<Configuration> outgoingVariants(Matcher<? super Iterable<ConfigurationVariant>> matcher) { return new FeatureMatcher<Configuration, Iterable<ConfigurationVariant>>(requireNonNull(matcher), "configuration's outgoing variants is", "configuration's outgoing variants") { @Override protected Iterable<ConfigurationVariant> featureValueOf(Configuration actual) { return actual.getOutgoing().getVariants(); } }; } /** * Matches dependencies of the configuration. * * @param matcher a dependency matcher, must not be null * @return a configuration matcher for its dependency, never null */ public static Matcher<Configuration> dependencies(Matcher<? super Iterable<Dependency>> matcher) { return new FeatureMatcher<Configuration, Iterable<Dependency>>(matcher, "configuration's dependencies is", "configuration's dependencies") { @Override protected Iterable<Dependency> featureValueOf(Configuration actual) { return actual.getDependencies(); } }; } /** * Matches extends from of the configuration. * * @param matcher an extends from matcher, must not be null * @return a configuration matcher for its extends from, never null */ public static Matcher<Configuration> extendsFrom(Matcher<? super Iterable<Configuration>> matcher) { return new FeatureMatcher<Configuration, Iterable<Configuration>>(requireNonNull(matcher), "configuration's extends from is", "configuration's extends from") { @Override protected Iterable<Configuration> featureValueOf(Configuration actual) { return actual.getExtendsFrom(); } }; } /** * Matches a attributes of the configuration. * * @param matcher matcher for all of the configuration's attributes, must not be null * @return a configuration matcher for its attributes, never null */ public static Matcher<HasAttributes> attributes(Matcher<? super Map<? extends Attribute<?>, ?>> matcher) { return new FeatureMatcher<HasAttributes, Map<Attribute<?>, ?>>(matcher, "a configuration with attribute", "attributes") { @Override protected Map<Attribute<?>, ?> featureValueOf(HasAttributes actual) { return actual.getAttributes().keySet().stream().map(attribute -> new AbstractMap.SimpleImmutableEntry<>(attribute, actual.getAttributes().getAttribute(attribute))).collect(ImmutableMap.toImmutableMap(Map.Entry::getKey, Map.Entry::getValue)); } }; } /** * Matches an attribute for key and value matchers. * * @param keyMatcher the attribute key matcher, must not be null * @param valueMatcher the attribute value matcher, must not be null * @param <T> the attribute type * @return an attribute container matcher for the specified key/value matchers, never null */ public static <T> Matcher<AttributeContainer> hasAttribute(Matcher<? super Attribute<T>> keyMatcher, Matcher<? super T> valueMatcher) { return new FeatureMatcher<AttributeContainer, Map<? extends Attribute<?>, ?>>(hasEntry((Matcher<? super Attribute<?>>) keyMatcher, (Matcher<? super Object>) valueMatcher), "an attribute", "the attribute") { @Override protected Map<? extends Attribute<?>, ?> featureValueOf(AttributeContainer actual) { return actual.getAttributes().keySet().stream().map(attribute -> new AbstractMap.SimpleImmutableEntry<>(attribute, actual.getAttributes().getAttribute(attribute))).collect(ImmutableMap.toImmutableMap(Map.Entry::getKey, Map.Entry::getValue)); } }; } /** * Matches an attribute key. * * @param attribute the attribute key, must not be null * @param <T> the attribute type * @return an attribute container matcher for the specified key, never null */ public static <T> Matcher<AttributeContainer> hasAttribute(Attribute<T> attribute) { return new FeatureMatcher<AttributeContainer, Map<? extends Attribute<?>, ?>>(hasKey(equalTo(attribute)), "an attribute", "the attribute") { @Override protected Map<? extends Attribute<?>, ?> featureValueOf(AttributeContainer actual) { return actual.getAttributes().keySet().stream().map(attribute -> new AbstractMap.SimpleImmutableEntry<>(attribute, actual.getAttributes().getAttribute(attribute))).collect(ImmutableMap.toImmutableMap(Map.Entry::getKey, Map.Entry::getValue)); } }; } /** * Matches a dependency with the specified coordinate. * * @param coordinate the expected dependency coordinate, must not be null * @return a dependency matcher, never null */ public static <T extends Dependency> Matcher<T> forCoordinate(String coordinate) { val token = coordinate.split(":"); return forCoordinate(token[0], token[1], token[2]); } /** * Matches a dependency with the specified coordinate. * * @param group the expected dependency group, can be null * @param name the expected dependency name, must not be null * @param version the expected dependency version, can be null * @return a dependency matcher, never null */ public static <T extends Dependency> Matcher<T> forCoordinate(@Nullable String group, String name, @Nullable String version) { return allOf(withGroup(equalTo(group)), withName(equalTo(requireNonNull(name))), withVersion(equalTo(version))); } private static Matcher<Dependency> withGroup(Matcher<? super String> matcher) { return new FeatureMatcher<Dependency, String>(matcher, "a dependency with group", "but dependency's group") { @Nullable @Override protected String featureValueOf(Dependency actual) { return actual.getGroup(); } }; } private static Matcher<Dependency> withName(Matcher<? super String> matcher) { return new FeatureMatcher<Dependency, String>(matcher, "a dependency with name", "but dependency's name") { @Override protected String featureValueOf(Dependency actual) { return actual.getName(); } }; } private static Matcher<Dependency> withVersion(Matcher<? super String> matcher) { return new FeatureMatcher<Dependency, String>(matcher, "a dependency with version", "but dependency's version") { @Nullable @Override protected String featureValueOf(Dependency actual) { return actual.getVersion(); } }; } /** * Matches a declarable configuration, that is a configuration where {@link Configuration#isCanBeConsumed()} is {@literal false} and {@link Configuration#isCanBeResolved()} is {@literal false}. * * @return a configuration matcher, never null */ public static Matcher<Configuration> declarable() { return new TypeSafeMatcher<Configuration>() { @Override protected boolean matchesSafely(Configuration item) { return !item.isCanBeConsumed() && !item.isCanBeResolved(); } @Override protected void describeMismatchSafely(Configuration item, Description description) { description.appendText("was a ").appendText(configurationType(item)).appendText(" configuration"); } @Override public void describeTo(Description description) { description.appendText("a declarable configuration"); } }; } /** * Matches a consumable configuration, that is a configuration where {@link Configuration#isCanBeConsumed()} is {@literal true} and {@link Configuration#isCanBeResolved()} is {@literal false}. * * @return a configuration matcher, never null */ public static Matcher<Configuration> consumable() { return new TypeSafeMatcher<Configuration>() { @Override protected boolean matchesSafely(Configuration item) { return item.isCanBeConsumed() && !item.isCanBeResolved(); } @Override protected void describeMismatchSafely(Configuration item, Description description) { description.appendText("was a ").appendText(configurationType(item)).appendText(" configuration"); } @Override public void describeTo(Description description) { description.appendText("a consumable configuration"); } }; } /** * Matches a resolvable configuration, that is a configuration where {@link Configuration#isCanBeConsumed()} is {@literal false} and {@link Configuration#isCanBeResolved()} is {@literal true}. * * @return a configuration matcher, never null */ public static Matcher<Configuration> resolvable() { return new TypeSafeMatcher<Configuration>() { @Override protected boolean matchesSafely(Configuration item) { return !item.isCanBeConsumed() && item.isCanBeResolved(); } @Override protected void describeMismatchSafely(Configuration item, Description description) { description.appendText("was a ").appendText(configurationType(item)).appendText(" configuration"); } @Override public void describeTo(Description description) { description.appendText("a resolvable configuration"); } }; } private static String configurationType(Configuration item) { if (item.isCanBeConsumed() && item.isCanBeResolved()) { return "legacy"; } else if (item.isCanBeConsumed() && !item.isCanBeResolved()) { return "consumable"; } else if (!item.isCanBeConsumed() && item.isCanBeResolved()) { return "resolvable"; } else if (!item.isCanBeConsumed() && !item.isCanBeResolved()) { return "declarable"; } throw new UnsupportedOperationException(); } public static Matcher<Configuration> description(String description) { return description(equalTo(description)); } /** * Matches a configuration description using the specified matcher. * * @param matcher a configuration description to matcher, must not be null * @return a configuration matcher, never null */ public static Matcher<Configuration> description(Matcher<? super String> matcher) { return new FeatureMatcher<Configuration, String>(matcher, "a configuration with description of", "configuration description") { @Override protected String featureValueOf(Configuration actual) { return actual.getDescription(); } }; } public static Matcher<Dependency> module(Matcher<? super ModuleDependency> matcher) { return new FeatureMatcher<Dependency, ModuleDependency>(matcher, "a module dependency", "module dependency") { @Override protected ModuleDependency featureValueOf(Dependency actual) { return (ModuleDependency) actual; } }; } }
solocarrie/talipp
tests/test_indicator_chaining.py
<filename>tests/test_indicator_chaining.py import unittest from talipp.indicators import SMA from TalippTest import TalippTest class Test(TalippTest): def setUp(self) -> None: self.input_values = list(TalippTest.CLOSE_TMPL) def test_init(self): sma1 = SMA(3, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) sma2 = SMA(3, input_indicator = sma1) sma3 = SMA(3, input_indicator = sma2) sma4 = SMA(3, input_indicator = sma3) print(sma1) print(sma2) print(sma3) print(sma4) self.assertAlmostEqual(sma4[-2], 5) self.assertAlmostEqual(sma4[-1], 6) def test_iterative_add(self): sma1 = SMA(3) sma2 = SMA(3, input_indicator = sma1) sma3 = SMA(3, input_indicator = sma2) sma4 = SMA(3, input_indicator = sma3) for i in range(1, 11): sma1.add_input_value(i) print(sma1) print(sma2) print(sma3) print(sma4) self.assertAlmostEqual(sma4[-2], 5) self.assertAlmostEqual(sma4[-1], 6) def test_update(self): sma1 = SMA(3, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) sma2 = SMA(3, input_indicator = sma1) sma3 = SMA(3, input_indicator = sma2) sma4 = SMA(3, input_indicator = sma3) last_indicator_value = sma4[-1] last_input_value = sma1.input_values[-1] for i in range(1, 20): sma1.update_input_value(i) sma1.update_input_value(last_input_value) self.assertEqual(last_indicator_value, sma4[-1]) def test_delete(self): sma1 = SMA(3, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) sma2 = SMA(3, input_indicator = sma1) sma3 = SMA(3, input_indicator = sma2) sma4 = SMA(3, input_indicator = sma3) last_indicator_value = sma4[-1] for i in range(1, 20): sma1.add_input_value(i) for i in range(1, 20): sma1.remove_input_value() self.assertEqual(last_indicator_value, sma4[-1]) def test_purge_oldest(self): sma1 = SMA(3, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) sma2 = SMA(3, input_indicator = sma1) sma3 = SMA(3, input_indicator = sma2) sma4 = SMA(3, input_indicator = sma3) # purge oldest N values purge_size = 2 sma1_copy = sma1[:] sma2_copy = sma2[:] sma3_copy = sma3[:] sma4_copy = sma4[:] sma1.purge_oldest(purge_size) self.assertSequenceEqual(sma1_copy[purge_size:], sma1) self.assertSequenceEqual(sma2_copy[purge_size:], sma2) self.assertSequenceEqual(sma3_copy[purge_size:], sma3) self.assertSequenceEqual(sma4_copy[purge_size:], sma4) # purge all remaining values purge_size = len(sma1) sma1_copy = sma1[:] sma2_copy = sma2[:] sma3_copy = sma3[:] sma4_copy = sma4[:] sma1.purge_oldest(purge_size) self.assertSequenceEqual(sma1_copy[purge_size:], sma1) self.assertSequenceEqual(sma2_copy[purge_size:], sma2) self.assertSequenceEqual(sma3_copy[purge_size:], sma3) self.assertSequenceEqual(sma4_copy[purge_size:], sma4) self.assertSequenceEqual([], sma1) self.assertSequenceEqual([], sma2) self.assertSequenceEqual([], sma3) self.assertSequenceEqual([], sma4) if __name__ == '__main__': unittest.main()
alexiynew/nih_framework
neutrino/graphics/src/font/tables/index_to_location.cpp
#include <common/utils.hpp> #include <graphics/src/font/tables/index_to_location.hpp> namespace framework::graphics::details::font { IndexToLocation::IndexToLocation(std::int16_t index_to_loc_format, std::uint16_t num_glyphs, const BytesData& data) { const size_t size = num_glyphs + 1; m_offsets.reserve(size); auto in = utils::make_big_endian_buffer_reader(data); for (size_t i = 0; i < size; ++i) { switch (index_to_loc_format) { case 0: m_offsets.push_back(in.get<Offset16>() * 2); break; case 1: m_offsets.push_back(in.get<Offset32>()); break; } } } bool IndexToLocation::valid() const { return m_offsets.size() != 0 && m_offsets[0] == 0; } const std::vector<Offset32>& IndexToLocation::offsets() const { return m_offsets; } } // namespace framework::graphics::details::font
sbairos/flink
flink-runtime/src/main/java/org/apache/flink/runtime/state/TaskLocalStateStoreImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.state; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.api.common.JobID; import org.apache.flink.core.fs.FileSystem; import org.apache.flink.core.fs.Path; import org.apache.flink.runtime.checkpoint.TaskStateSnapshot; import org.apache.flink.runtime.clusterframework.types.AllocationID; import org.apache.flink.runtime.jobgraph.JobVertexID; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nonnegative; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.annotation.concurrent.GuardedBy; import java.io.File; import java.io.IOException; import java.util.AbstractMap; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executor; import java.util.function.LongPredicate; /** * Main implementation of a {@link TaskLocalStateStore}. */ public class TaskLocalStateStoreImpl implements OwnedTaskLocalStateStore { /** Logger for this class. */ private static final Logger LOG = LoggerFactory.getLogger(TaskLocalStateStoreImpl.class); /** Dummy value to use instead of null to satisfy {@link ConcurrentHashMap}. */ @VisibleForTesting static final TaskStateSnapshot NULL_DUMMY = new TaskStateSnapshot(0); /** JobID from the owning subtask. */ @Nonnull private final JobID jobID; /** AllocationID of the owning slot. */ @Nonnull private final AllocationID allocationID; /** JobVertexID of the owning subtask. */ @Nonnull private final JobVertexID jobVertexID; /** Subtask index of the owning subtask. */ @Nonnegative private final int subtaskIndex; /** The configured mode for local recovery. */ @Nonnull private final LocalRecoveryConfig localRecoveryConfig; /** Executor that runs the discarding of released state objects. */ @Nonnull private final Executor discardExecutor; /** Lock for synchronisation on the storage map and the discarded status. */ @Nonnull private final Object lock; /** Status flag if this store was already discarded. */ @GuardedBy("lock") private boolean disposed; /** Maps checkpoint ids to local TaskStateSnapshots. */ @Nonnull @GuardedBy("lock") private final SortedMap<Long, TaskStateSnapshot> storedTaskStateByCheckpointID; public TaskLocalStateStoreImpl( @Nonnull JobID jobID, @Nonnull AllocationID allocationID, @Nonnull JobVertexID jobVertexID, @Nonnegative int subtaskIndex, @Nonnull LocalRecoveryConfig localRecoveryConfig, @Nonnull Executor discardExecutor) { this( jobID, allocationID, jobVertexID, subtaskIndex, localRecoveryConfig, discardExecutor, new TreeMap<>(), new Object()); } @VisibleForTesting TaskLocalStateStoreImpl( @Nonnull JobID jobID, @Nonnull AllocationID allocationID, @Nonnull JobVertexID jobVertexID, @Nonnegative int subtaskIndex, @Nonnull LocalRecoveryConfig localRecoveryConfig, @Nonnull Executor discardExecutor, @Nonnull SortedMap<Long, TaskStateSnapshot> storedTaskStateByCheckpointID, @Nonnull Object lock) { this.jobID = jobID; this.allocationID = allocationID; this.jobVertexID = jobVertexID; this.subtaskIndex = subtaskIndex; this.discardExecutor = discardExecutor; this.localRecoveryConfig = localRecoveryConfig; this.storedTaskStateByCheckpointID = storedTaskStateByCheckpointID; this.lock = lock; this.disposed = false; } @Override public void storeLocalState( @Nonnegative long checkpointId, @Nullable TaskStateSnapshot localState) { if (localState == null) { localState = NULL_DUMMY; } if (LOG.isTraceEnabled()) { LOG.trace( "Stored local state for checkpoint {} in subtask ({} - {} - {}) : {}.", checkpointId, jobID, jobVertexID, subtaskIndex, localState); } else if (LOG.isDebugEnabled()) { LOG.debug( "Stored local state for checkpoint {} in subtask ({} - {} - {})", checkpointId, jobID, jobVertexID, subtaskIndex); } Map.Entry<Long, TaskStateSnapshot> toDiscard = null; synchronized (lock) { if (disposed) { // we ignore late stores and simply discard the state. toDiscard = new AbstractMap.SimpleEntry<>(checkpointId, localState); } else { TaskStateSnapshot previous = storedTaskStateByCheckpointID.put(checkpointId, localState); if (previous != null) { toDiscard = new AbstractMap.SimpleEntry<>(checkpointId, previous); } } } if (toDiscard != null) { asyncDiscardLocalStateForCollection(Collections.singletonList(toDiscard)); } } @Override @Nullable public TaskStateSnapshot retrieveLocalState(long checkpointID) { TaskStateSnapshot snapshot; synchronized (lock) { snapshot = storedTaskStateByCheckpointID.get(checkpointID); } if (snapshot != null) { if (LOG.isTraceEnabled()) { LOG.trace("Found registered local state for checkpoint {} in subtask ({} - {} - {}) : {}", checkpointID, jobID, jobVertexID, subtaskIndex, snapshot); } else if (LOG.isDebugEnabled()) { LOG.debug("Found registered local state for checkpoint {} in subtask ({} - {} - {})", checkpointID, jobID, jobVertexID, subtaskIndex); } } else { LOG.debug("Did not find registered local state for checkpoint {} in subtask ({} - {} - {})", checkpointID, jobID, jobVertexID, subtaskIndex); } return (snapshot != NULL_DUMMY) ? snapshot : null; } @Override @Nonnull public LocalRecoveryConfig getLocalRecoveryConfig() { return localRecoveryConfig; } @Override public void confirmCheckpoint(long confirmedCheckpointId) { LOG.debug("Received confirmation for checkpoint {} in subtask ({} - {} - {}). Starting to prune history.", confirmedCheckpointId, jobID, jobVertexID, subtaskIndex); pruneCheckpoints( (snapshotCheckpointId) -> snapshotCheckpointId < confirmedCheckpointId, true); } @Override public void abortCheckpoint(long abortedCheckpointId) { LOG.debug("Received abort information for checkpoint {} in subtask ({} - {} - {}). Starting to prune history.", abortedCheckpointId, jobID, jobVertexID, subtaskIndex); pruneCheckpoints(snapshotCheckpointId -> snapshotCheckpointId == abortedCheckpointId, false); } @Override public void pruneMatchingCheckpoints(@Nonnull LongPredicate matcher) { pruneCheckpoints( matcher, false); } /** * Disposes the state of all local snapshots managed by this object. */ @Override public CompletableFuture<Void> dispose() { Collection<Map.Entry<Long, TaskStateSnapshot>> statesCopy; synchronized (lock) { disposed = true; statesCopy = new ArrayList<>(storedTaskStateByCheckpointID.entrySet()); storedTaskStateByCheckpointID.clear(); } return CompletableFuture.runAsync( () -> { // discard all remaining state objects. syncDiscardLocalStateForCollection(statesCopy); // delete the local state subdirectory that belong to this subtask. LocalRecoveryDirectoryProvider directoryProvider = localRecoveryConfig.getLocalStateDirectoryProvider(); for (int i = 0; i < directoryProvider.allocationBaseDirsCount(); ++i) { File subtaskBaseDirectory = directoryProvider.selectSubtaskBaseDirectory(i); try { deleteDirectory(subtaskBaseDirectory); } catch (IOException e) { LOG.warn("Exception when deleting local recovery subtask base directory {} in subtask ({} - {} - {})", subtaskBaseDirectory, jobID, jobVertexID, subtaskIndex, e); } } }, discardExecutor); } private void asyncDiscardLocalStateForCollection(Collection<Map.Entry<Long, TaskStateSnapshot>> toDiscard) { if (!toDiscard.isEmpty()) { discardExecutor.execute(() -> syncDiscardLocalStateForCollection(toDiscard)); } } private void syncDiscardLocalStateForCollection(Collection<Map.Entry<Long, TaskStateSnapshot>> toDiscard) { for (Map.Entry<Long, TaskStateSnapshot> entry : toDiscard) { discardLocalStateForCheckpoint(entry.getKey(), entry.getValue()); } } /** * Helper method that discards state objects with an executor and reports exceptions to the log. */ private void discardLocalStateForCheckpoint(long checkpointID, TaskStateSnapshot o) { if (LOG.isTraceEnabled()) { LOG.trace("Discarding local task state snapshot of checkpoint {} for subtask ({} - {} - {}).", checkpointID, jobID, jobVertexID, subtaskIndex); } else { LOG.debug("Discarding local task state snapshot {} of checkpoint {} for subtask ({} - {} - {}).", o, checkpointID, jobID, jobVertexID, subtaskIndex); } try { o.discardState(); } catch (Exception discardEx) { LOG.warn("Exception while discarding local task state snapshot of checkpoint {} in subtask ({} - {} - {}).", checkpointID, jobID, jobVertexID, subtaskIndex, discardEx); } LocalRecoveryDirectoryProvider directoryProvider = localRecoveryConfig.getLocalStateDirectoryProvider(); File checkpointDir = directoryProvider.subtaskSpecificCheckpointDirectory(checkpointID); LOG.debug("Deleting local state directory {} of checkpoint {} for subtask ({} - {} - {}).", checkpointDir, checkpointID, jobID, jobVertexID, subtaskIndex); try { deleteDirectory(checkpointDir); } catch (IOException ex) { LOG.warn("Exception while deleting local state directory of checkpoint {} in subtask ({} - {} - {}).", checkpointID, jobID, jobVertexID, subtaskIndex, ex); } } /** * Helper method to delete a directory. */ private void deleteDirectory(File directory) throws IOException { Path path = new Path(directory.toURI()); FileSystem fileSystem = path.getFileSystem(); if (fileSystem.exists(path)) { fileSystem.delete(path, true); } } /** * Pruning the useless checkpoints, it should be called only when holding the {@link #lock}. */ private void pruneCheckpoints(LongPredicate pruningChecker, boolean breakOnceCheckerFalse) { final List<Map.Entry<Long, TaskStateSnapshot>> toRemove = new ArrayList<>(); synchronized (lock) { Iterator<Map.Entry<Long, TaskStateSnapshot>> entryIterator = storedTaskStateByCheckpointID.entrySet().iterator(); while (entryIterator.hasNext()) { Map.Entry<Long, TaskStateSnapshot> snapshotEntry = entryIterator.next(); long entryCheckpointId = snapshotEntry.getKey(); if (pruningChecker.test(entryCheckpointId)) { toRemove.add(snapshotEntry); entryIterator.remove(); } else if (breakOnceCheckerFalse) { break; } } } asyncDiscardLocalStateForCollection(toRemove); } @Override public String toString() { return "TaskLocalStateStore{" + "jobID=" + jobID + ", jobVertexID=" + jobVertexID + ", allocationID=" + allocationID.toHexString() + ", subtaskIndex=" + subtaskIndex + ", localRecoveryConfig=" + localRecoveryConfig + ", storedCheckpointIDs=" + storedTaskStateByCheckpointID.keySet() + '}'; } }
withabound/irs
pkg/subrecords/5498.go
// Copyright 2020 The Moov Authors // Use of this source code is governed by an Apache License // license that can be found in the LICENSE file. package subrecords import ( "bytes" "reflect" "time" "unicode/utf8" "github.com/moov-io/irs/pkg/config" "github.com/moov-io/irs/pkg/utils" ) type Sub5498 struct { // Enter “1” (one) if reporting a rollover (Amount Code 2) or Fair // Market Value (Amount Code 5) for an IRA. Otherwise, enter // a blank. IRAIndicator string `json:"ira_indicator"` // Enter “1” (one) if reporting a rollover (Amount Code 2) or Fair // Market Value (Amount Code 5) for a SEP. Otherwise, enter // a blank. SEPIndicator string `json:"sep_indicator"` // Enter “1” (one) if reporting a rollover (Amount Code 2) or Fair // Market Value (Amount Code 5) for a SIMPLE. Otherwise, // enter a blank. SIMPLEIndicator string `json:"simple_indicator"` // Enter “1” (one) if reporting a rollover (Amount Code 2) or Fair // Market Value (Amount Code 5) for a Roth IRA. Otherwise, // enter a blank. RothIRAIndicator string `json:"roth_ira_indicator"` // Enter “1” (one) if reporting RMD for 2020. Otherwise, enter a // blank. RMDIndicator string `json:"rmd_indicator"` // Required. Enter the date the option was granted in // YYYYMMDD format (for example, January 5, 2019, would be // 20190105). YearPostponedContribution int `json:"year_postponed_contribution"` // Required, if applicable. Enter the code from the table below. // Right justify. Otherwise, enter blanks. // FD: Federally Designated Disaster Area // PL: Public Law // EO: Executive Order // PO: Rollovers of qualified plan loan offset amounts // SC: For participants who have certified that // the rollover contribution is late because // of an error on the part of a financial // institution, death, disability, // hospitalization, incarceration, // restrictions imposed by a foreign // country, postal error, or other // circumstance listed in Section // 3.02(2) of Rev. Proc. 2016-47 or other // event beyond the reasonable control of // the participant. PostponedContributionCode string `json:"postponed_contribution_code"` // Required, if applicable. Enter the federally declared disaster // area, public law number or executive order number under // which the postponed contribution is being issued. // Right justify. Otherwise, enter blanks. PostponedContributionReason string `json:"postponed_contribution_reason"` // Required. Enter the two-character alpha Repayment Code. // Right justify. Otherwise, enter blanks. // QR: Qualified Reservist Distribution // DD: Federally Designated Disaster Distribution RepaymentCode string `json:"repayment_code"` // Enter the date by which the RMD amount must be distributed // to avoid the 50% excise tax. Format the date as // YYYYMMDD (for example, January 5, 2019, would be // 20190105). Otherwise, enter blanks. RMDDate time.Time `json:"rmd_date"` // Equal to one alpha character or two alpha characters or // blank. Valid characters are: // • Two-character combinations can consist of A, B, C, // D, E, F, and G. // • Valid character H cannot be present with any other // characters. Codes string `json:"codes"` // This portion of the “B” Record may be used to record // information for state or local government reporting or for the // filer’s own purposes. Payers should contact the state or local // revenue departments for filing requirements. // If this field is not used, enter blanks. SpecialDataEntries string `json:"special_data_entries"` // Enter the valid CF/SF code if this payee record is to be // forwarded to a state agency as part of the CF/SF Program. CombinedFSCode int `json:"combined_federal_state_code"` } // Type returns type of “5498” record func (r *Sub5498) Type() string { return config.Sub5498Type } // Type returns FS code of “5498” record func (r *Sub5498) FederalState() int { return r.CombinedFSCode } // Parse parses the “5498” record from fire ascii func (r *Sub5498) Parse(buf []byte) error { record := string(buf) if utf8.RuneCountInString(record) != config.SubRecordLength { return utils.ErrRecordLength } fields := reflect.ValueOf(r).Elem() if !fields.IsValid() { return utils.ErrValidField } return utils.ParseValue(fields, config.Sub5498Layout, record) } // Ascii returns fire ascii of “5498” record func (r *Sub5498) Ascii() []byte { var buf bytes.Buffer records := config.ToSpecifications(config.Sub5498Layout) fields := reflect.ValueOf(r).Elem() if !fields.IsValid() { return nil } buf.Grow(config.SubRecordLength) for _, spec := range records { value := utils.ToString(spec.Field, fields.FieldByName(spec.Name)) buf.WriteString(value) } return buf.Bytes() } // Validate performs some checks on the record and returns an error if not Validated func (r *Sub5498) Validate() error { return utils.Validate(r, config.Sub5498Layout, config.Sub5498Type) } // customized field validation functions // function name should be "Validate" + field name func (r *Sub5498) ValidateIRAIndicator() error { if len(r.IRAIndicator) > 0 && r.IRAIndicator != config.GeneralOneIndicator { return utils.NewErrValidValue("ira indicator") } return nil } func (r *Sub5498) ValidateSEPIndicator() error { if len(r.SEPIndicator) > 0 && r.SEPIndicator != config.GeneralOneIndicator { return utils.NewErrValidValue("sep indicator") } return nil } func (r *Sub5498) ValidateSIMPLEIndicator() error { if len(r.SIMPLEIndicator) > 0 && r.SIMPLEIndicator != config.GeneralOneIndicator { return utils.NewErrValidValue("simple indicator") } return nil } func (r *Sub5498) ValidateRothIRAIndicator() error { if len(r.RothIRAIndicator) > 0 && r.RothIRAIndicator != config.GeneralOneIndicator { return utils.NewErrValidValue("roth ira indicator") } return nil } func (r *Sub5498) ValidateRMDIndicator() error { if len(r.RMDIndicator) > 0 && r.RMDIndicator != config.GeneralOneIndicator { return utils.NewErrValidValue("rmd indicator") } return nil } func (r *Sub5498) ValidatePostponedContributionCode() error { if len(r.PostponedContributionCode) > 0 { switch r.PostponedContributionCode { case "FD", "PL", "EO", "PO", "SC": return nil default: return utils.NewErrValidValue("postponed contribution code") } } return nil } func (r *Sub5498) ValidateRepaymentCode() error { if len(r.RepaymentCode) > 0 { switch r.RepaymentCode { case "QR", "DD": return nil default: return utils.NewErrValidValue("repayment code") } } return nil } func (r *Sub5498) ValidateCodes() error { if len(r.Codes) > 0 { lowCode := 'A' highCode := 'H' if len(r.Codes) > 1 { highCode = 'G' } for _, letter := range r.Codes { if letter >= lowCode && letter <= highCode { return nil } else { return utils.NewErrValidValue("repayment code") } } } return nil } func (r *Sub5498) ValidateCombinedFSCode() error { return utils.ValidateCombinedFSCode(r.CombinedFSCode) }
libc0607/nodemcu-firmware
app/rtl8370/rtl8370_asicdrv_rrcp.h
<gh_stars>1-10 #ifndef _RTL8370_ASICDRV_RRCP_H_ #define _RTL8370_ASICDRV_RRCP_H_ #include <rtl8370_asicdrv.h> extern ret_t rtl8370_setAsicRrcp(uint32 vOneEnable, uint32 vTwoEnable); extern ret_t rtl8370_getAsicRrcp(uint32 *vOneEnable, uint32 *vTwoEnable); extern ret_t rtl8370_setAsicRrcpTrustPortmask(uint32 pmsk); extern ret_t rtl8370_getAsicRrcpTrustPortmask(uint32 *pmsk); extern ret_t rtl8370_setAsicRrcpAuthenticationKey(uint32 authKey); extern ret_t rtl8370_getAsicRrcpAuthenticationKey(uint32 *authKey); extern ret_t rtl8370_setAsicRrcpPrivateKey(uint32 privateKey); extern ret_t rtl8370_getAsicRrcpPrivateKey(uint32 *privateKey); extern ret_t rtl8370_setAsicRrcpV2Trap8051(uint32 trap); extern ret_t rtl8370_getAsicRrcpV2Trap8051(uint32 *trap); #endif /*_RTL8370_ASICDRV_RRCP_H_*/
Orange-OpenSource/optisam-backend
report-service/pkg/protocol/rest/server.go
// Copyright (C) 2019 Orange // // This software is distributed under the terms and conditions of the 'Apache License 2.0' // license which can be found in the file 'License.txt' in this package distribution // or at 'http://www.apache.org/licenses/LICENSE-2.0'. package rest import ( "context" "crypto/rsa" "net/http" "optisam-backend/common/optisam/logger" rest_middleware "optisam-backend/common/optisam/middleware/rest" v1 "optisam-backend/report-service/pkg/api/v1" "os" "os/signal" "time" "github.com/grpc-ecosystem/grpc-gateway/runtime" "go.opencensus.io/plugin/ocgrpc" "go.opencensus.io/plugin/ochttp" "go.uber.org/zap" "google.golang.org/grpc" ) // RunServer runs HTTP/REST gateway func RunServer(ctx context.Context, grpcPort, httpPort string, verifyKey *rsa.PublicKey) error { ctx, cancel := context.WithCancel(ctx) defer cancel() mux := runtime.NewServeMux() opts := []grpc.DialOption{grpc.WithInsecure(), grpc.WithStatsHandler(&ocgrpc.ClientHandler{})} if err := v1.RegisterReportServiceHandlerFromEndpoint(ctx, mux, "localhost:"+grpcPort, opts); err != nil { logger.Log.Fatal("failed to start HTTP gateway", zap.String("reason", err.Error())) } srv := &http.Server{ Addr: ":" + httpPort, Handler: &ochttp.Handler{Handler: rest_middleware.AddCORS([]string{"*"}, // rest_middleware.ValidateAuth(verifyKey, // rest_middleware.AddLogger(logger.Log, mux), // )}, }, } // graceful shutdown c := make(chan os.Signal, 1) signal.Notify(c, os.Interrupt) go func() { for range c { // sig is a ^C, handle it } _, cancel := context.WithTimeout(ctx, 5*time.Second) defer cancel() _ = srv.Shutdown(ctx) }() logger.Log.Info("starting HTTP/REST gateway...") return srv.ListenAndServe() }
mglantz/insights-core
insights/parsers/tests/test_nova_conf.py
from __future__ import print_function from insights.core.context import OSP from insights.parsers import nova_conf from insights.tests import context_wrap nova_content = """ [DEFAULT] notification_driver = #this is comment notification_topics=notifications rpc_backend=rabbit use_ipv6=True notify_on_state_change=vm_and_task_state notify_api_faults=False state_path=/var/lib/nova report_interval = 10 osapi_compute_listen=fd00:4888:1000:f901::c1 osapi_compute_workers=32 metadata_listen=fd00:4888:1000:f901::c1 metadata_workers=32 service_down_time=60 rootwrap_config=/etc/nova/rootwrap.conf auth_strategy=keystone use_forwarded_for=False novncproxy_host=fd00:4888:1000:f901::c1 novncproxy_port=6080 network_api_class=nova.network.neutronv2.api.API dhcp_domain= security_group_api=neutron debug=False verbose=False log_dir=/var/log/nova use_syslog=False scheduler_host_manager=nova.scheduler.host_manager.HostManager scheduler_host_subset_size=1 cpu_allocation_ratio=16.0 disk_allocation_ratio=1.0 max_io_ops_per_host=8 max_instances_per_host=50 ram_allocation_ratio=1.0 scheduler_available_filters=nova.scheduler.filters.all_filters scheduler_default_filters=RetryFilter,AvailabilityZoneFilter,RamFilter,ComputeFilter,ComputeCapabilitiesFilter,ImagePropertiesFilter,ServerGroupAntiAffinityFilter,ServerGroupAffinityFilter,NUMATopologyFilter,PciPassthroughFilter scheduler_weight_classes=nova.scheduler.weights.all_weighers scheduler_max_attempts=3 vif_plugging_is_fatal=True vif_plugging_timeout=300 firewall_driver=nova.virt.firewall.NoopFirewallDriver novncproxy_base_url=http://[fd00:4888:fdf8:f53e:61e4::18]:6080/vnc_auto.html volume_api_class=nova.volume.cinder.API memcached_servers=inet6:[fdfc00:e968:6179::de52:7100]:11211,inet6:[fd00:4fc00:db20:35b:7399::5]:11211,inet6:[fdfdf8:f53e:61e4::18]:11211 [ephemeral_storage_encryption] [glance] api_servers=http://[fd00:fdf8:f53e:61e4::18]:9292 [keystone_authtoken] auth_uri=http://[fd00:4888:1fc00:db20:35b:7399::5]:5000/v2.0 identity_uri=http://192.168.1.107:35357 admin_user=nova admin_password=********* admin_tenant_name=service service_metadata_proxy=True ovs_bridge=br-int extension_sync_interval=600 rabbit_hosts=fd00:4888:1000:f901::c0,fd00:4888:1000:f901::c1,fd00:4888:1000:f901::c2 rabbit_use_ssl=False rabbit_userid=guest rabbit_password=********* rabbit_virtual_host = / rabbit_ha_queues=True heartbeat_timeout_threshold=60 heartbeat_rate=2 """ osp = OSP() osp.role = "Compute" def test_nova_conf(): result = nova_conf.NovaConf(context_wrap(nova_content, osp=osp)) print(result) assert result.get("DEFAULT", "notification_driver") == "" assert result.get("DEFAULT", "report_interval") == "10" assert result.get("DEFAULT", "novncproxy_host") == "fd00:4888:1000:f901::c1" assert result.get("keystone_authtoken", "auth_uri") == "http://[fd00:4888:fc00:e968:6179::de52:7100]:5000/v2.0" assert result.get("keystone_authtoken", "service_metadata_proxy") == "True" assert result.get("keystone_authtoken", "rabbit_hosts")\ == "fd00:4888:1000:f901::c0,fd00:4888:1000:f901::c1,fd00:4888:1000:f901::c2"
shao1chuan/pythonbook
机器学习/逻辑回归/sigmoid曲线.py
<reponame>shao1chuan/pythonbook import numpy as np import matplotlib.pyplot as plt def sigmoid(t): return 1. / (1. + np.exp(-t)) x = np.linspace(-10, 10, 500) plt.plot(x, sigmoid(x)) plt.show()
darione85/omirlMe
OmirlServer/Omirl/src/it/fadeout/omirl/CacheObject.java
package it.fadeout.omirl; public class CacheObject { private long timestamp; private Object data; public long getTimestamp() { return timestamp; } public void setTimestamp(long timestamp) { this.timestamp = timestamp; } public Object getData() { return data; } public void setData(Object data) { this.data = data; } }
community-boating/cbidb-api
app/org/sailcbi/APIServer/Entities/JsFacades/Stripe/ChargeRefund.scala
<reponame>community-boating/cbidb-api package org.sailcbi.APIServer.Entities.JsFacades.Stripe import com.coleji.neptune.Core.RequestCacheObject import com.coleji.neptune.IO.PreparedQueries.PreparedValue import com.coleji.neptune.Storable.{CastableToStorableClass, CastableToStorableObject} import org.sailcbi.APIServer.UserTypes.ApexRequestCache import play.api.libs.json.{JsValue, Json} case class ChargeRefund( refundId: String, chargeId: String, closeId: Int, amountInCents: Int ) extends CastableToStorableClass { val storableObject: CastableToStorableObject[_] = ChargeRefund val persistenceValues: Map[String, PreparedValue] = ChargeRefund.persistenceValues(this) val pkSqlLiteral: String = refundId } object ChargeRefund extends StripeCastableToStorableObject[ChargeRefund] { implicit val chargeRefundJSONFormat = Json.format[ChargeRefund] override val allowedUserTypes: Set[RequestCacheObject[_]] = Set(ApexRequestCache) def apply(v: JsValue): ChargeRefund = v.as[ChargeRefund] val apexTableName = "STRIPE_REFUNDS" val persistenceFieldsMap: Map[String, ChargeRefund => PreparedValue] = Map( "REFUND_ID" -> ((r: ChargeRefund) => r.refundId), "CHARGE_ID" -> ((r: ChargeRefund) => r.chargeId), "CLOSE_ID" -> ((r: ChargeRefund) => r.closeId), "AMOUNT_IN_CENTS" -> ((r: ChargeRefund) => r.amountInCents) ) val pkColumnName = "REFUND_ID" val getURL: String = "refunds" val getId: ChargeRefund => String = _.refundId }
profmikegreene/HAXcms
build/es6-amd/node_modules/@lrnwebcomponents/haxcms-elements/lib/ui-components/navigation/site-top-menu.js
<reponame>profmikegreene/HAXcms<filename>build/es6-amd/node_modules/@lrnwebcomponents/haxcms-elements/lib/ui-components/navigation/site-top-menu.js define(["exports", "require", "../../../../../@polymer/polymer/polymer-element.js", "../../core/haxcms-site-store.js", "../../../../../mobx/lib/mobx.module.js", "../query/site-query.js", "../../../../../@polymer/polymer/lib/elements/dom-repeat.js"], function (_exports, _require, _polymerElement, _haxcmsSiteStore, _mobxModule, _siteQuery, _domRepeat) { "use strict"; Object.defineProperty(_exports, "__esModule", { value: true }); _exports.SiteTopMenu = void 0; _require = babelHelpers.interopRequireWildcard(_require); /** * Copyright 2019 The Pennsylvania State University * @license Apache-2.0, see License.md for full text. */ /** * `site-top-menu` * `Menu on top of the site typically a bar of options` * * @customElement * @polymer * @demo demo/index.html */ class SiteTopMenu extends _polymerElement.PolymerElement { /** * Store the tag name to make it easier to obtain directly. * @notice function name must be here for tooling to operate correctly */ static get tag() { return "site-top-menu"; } constructor() { super(); this.__disposer = []; new Promise((res, rej) => _require.default(["../../../../../@polymer/paper-icon-button/paper-icon-button.js"], res, rej)); new Promise((res, rej) => _require.default(["../../../../../@polymer/paper-button/paper-button.js"], res, rej)); } // render function static get template() { return _polymerElement.html` <style> :host { display: block; --site-top-menu-bg: var(--haxcms-color, #ffffff); --site-top-menu-indicator-arrow: 6px; transition: 0.2s opacity linear; opacity: 1; } :host([edit-mode]) { opacity: 0.2; pointer-events: none; } :host([sticky]) { position: fixed; top: 0; left: 0; right: 0; z-index: 1000; @apply --site-top-menu-sticky; } .wrapper { display: flex; justify-content: space-evenly; background-color: var(--site-top-menu-bg); @apply --site-top-menu-wrapper; } :host .wrapper ::slotted(div.spacing) { display: inline-flex; @apply --site-top-menu-spacing; } .spacing { display: inline-flex; @apply --site-top-menu-spacing; } .link { color: var(--site-top-menu-link-color, #444444); @apply --site-top-menu-link; } paper-button { text-transform: unset; min-width: unset; @apply --site-top-menu-button; } .active { color: var(--site-top-menu-link-active-color, #000000); @apply --site-top-menu-link-active; } #indicator { transition: 0.4s ease-in-out left; transition-delay: 0.2s; position: relative; width: 0; height: 0; visibility: hidden; } :host([indicator="line"]) #indicator { border-bottom: 2px solid var(--site-top-menu-indicator-color, #000000); @apply --site-top-menu-indicator; } :host([indicator="arrow"]) #indicator { border-left: var(--site-top-menu-indicator-arrow) solid transparent; border-right: var(--site-top-menu-indicator-arrow) solid transparent; border-bottom: var(--site-top-menu-indicator-arrow) solid var(--site-top-menu-indicator-color, #000000); @apply --site-top-menu-indicator; } #indicator.activated { visibility: visible; position: absolute; @apply --site-top-menu-indicator-activated; } :host([notitle]) .spacing .link-title { display: none; } .spacing .link-index { display: none; } :host([showindex]) .spacing .link-index { display: inline-flex; } .mobiletitle, paper-icon-button { display: none; } @media screen and (max-width: 640px) { .wrapper .spacing { display: none; } .wrapper .mobiletitle, .wrapper paper-icon-button { display: inline-block; } .wrapper { display: block; } } @media screen and (max-width: 640px) { #indicator { display: none !important; } .wrapper.responsive { position: relative; } .wrapper.responsive .spacing { float: none; display: block; text-align: left; } } </style> <site-query result="{{__items}}" sort="[[sort]]" conditions="[[conditions]]" ></site-query> <div id="wrapper" class="wrapper"> <paper-icon-button icon="menu" id="menu" title="Open navigation" ></paper-icon-button> <span class="mobiletitle">[[mobileTitle]]</span> <slot name="prefix"></slot> <dom-repeat items="[[__items]]" mutable-data> <template> <div class="spacing"> <a data-id$="[[item.id]]" class="link" tabindex="-1" title$="Go to [[item.title]]" href$="[[item.location]]" > <paper-button id$="item-[[item.id]]" noink="[[noink]]"> <span class="link-index">[[humanIndex(index)]]</span> <span class="link-title">[[item.title]]</span> </paper-button> </a> </div> </template> </dom-repeat> <slot name="suffix"></slot> </div> <div id="indicator"></div> `; } /** * Props */ static get properties() { return { /** * manifest of everything, in case we need to check on children of parents */ manifest: { type: Object }, /** * acitvely selected item */ activeId: { type: String, observer: "_activeIdChanged" }, /** * visually stick to top of interface at all times */ sticky: { type: Boolean, reflectToAttribute: true, value: false }, /** * visualize the indicator as a a line, arrow, or not at all */ indicator: { type: String, reflectToAttribute: true, value: "line" }, /** * ink on the buttons */ noink: { type: Boolean, reflectToAttribute: true, value: false }, /** * hide title on the buttons */ notitle: { type: Boolean, reflectToAttribute: true, value: false }, /** * ink on the buttons */ showindex: { type: Boolean, reflectToAttribute: true, value: false }, /** * Stupid but faster then calculating on the fly for sure */ arrowSize: { type: Number, value: 6 }, /** * Allow customization of sort */ sort: { type: Object, value: { order: "ASC" } }, /** * Allow customization of the conditions if needed */ conditions: { type: Object, value: { parent: null } }, mobileTitle: { type: String, value: "Navigation" }, editMode: { type: Boolean, reflectToAttribute: true } }; } humanIndex(index) { return index + 1; } toggleOpen() { var wrapper = this.shadowRoot.querySelector("#wrapper"); if (wrapper.classList.contains("responsive")) { wrapper.classList.remove("responsive"); } else { wrapper.classList.add("responsive"); } } /** * When active ID changes, see if we know what to highlight automatically */ _activeIdChanged(newValue) { // as long as didn't disable the indicator, do this processing if (this.indicator != "none") { if (newValue) { this.$.indicator.classList.add("activated"); let el = null; //ensure that this level is included if (this.shadowRoot.querySelector('[data-id="' + newValue + '"]')) { el = this.shadowRoot.querySelector('[data-id="' + newValue + '"]'); } else { let tmpItem = this.manifest.items.find(i => i.id == newValue); // fallback, maybe there's a child of this currently active while (el === null && tmpItem && tmpItem.parent != null) { // take the parent object of this current item tmpItem = this.manifest.items.find(i => i.id == tmpItem.parent); // see if IT lives in the dom, if not, keep going until we run out if (tmpItem && this.shadowRoot.querySelector('[data-id="' + tmpItem.id + '"]')) { el = this.shadowRoot.querySelector('[data-id="' + tmpItem.id + '"]'); } } } if (this._prevEl) { this._prevEl.classList.remove("active"); } if (el) { el.classList.add("active"); this._prevEl = el; if (this.indicator == "arrow") { this.$.indicator.style.left = el.offsetLeft + el.offsetWidth / 2 - this.arrowSize + "px"; this.$.indicator.style.top = el.offsetTop + el.offsetHeight - this.arrowSize + "px"; } else { this.$.indicator.style.left = el.offsetLeft + "px"; this.$.indicator.style.top = el.offsetTop + el.offsetHeight + "px"; this.$.indicator.style.width = el.offsetWidth + "px"; } } } else { // shouldn't be possible but might as well list this.$.indicator.classList.remove("activated"); } } } connectedCallback() { super.connectedCallback(); this.shadowRoot.querySelector("#menu").addEventListener("click", this.toggleOpen.bind(this)); (0, _mobxModule.autorun)(reaction => { this.manifest = (0, _mobxModule.toJS)(_haxcmsSiteStore.store.manifest); this.__disposer.push(reaction); }); (0, _mobxModule.autorun)(reaction => { this.editMode = (0, _mobxModule.toJS)(_haxcmsSiteStore.store.editMode); this.__disposer.push(reaction); }); // minor timing thing to ensure store has picked active // needed if routes set on first paint or lifecycles miss setTimeout(() => { (0, _mobxModule.autorun)(reaction => { this.activeId = (0, _mobxModule.toJS)(_haxcmsSiteStore.store.activeId); this.__disposer.push(reaction); }); }, 50); window.addEventListener("resize", () => { this._activeIdChanged(this.activeId); }, true); } disconnectedCallback() { // clean up state for (var i in this.__disposer) { this.__disposer[i].dispose(); } window.removeEventListener("resize", () => { this._activeIdChanged(this.activeId); }, true); super.disconnectedCallback(); } } _exports.SiteTopMenu = SiteTopMenu; window.customElements.define(SiteTopMenu.tag, SiteTopMenu); });
Sxmurai/xeno-client-buildable
src/main/java/me/xenodevs/xeno/utils/render/builder/RenderBuilder.java
<filename>src/main/java/me/xenodevs/xeno/utils/render/builder/RenderBuilder.java package me.xenodevs.xeno.utils.render.builder; import net.minecraft.client.renderer.GlStateManager; import net.minecraft.util.math.BlockPos; import java.awt.*; import static org.lwjgl.opengl.GL11.*; /** * @author linustouchtips * @since 01/12/2021 */ public class RenderBuilder { public static void glSetup() { GlStateManager.pushMatrix(); GlStateManager.enableBlend(); GlStateManager.disableDepth(); GlStateManager.tryBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_ZERO, GL_ONE); GlStateManager.disableTexture2D(); GlStateManager.depthMask(false); glEnable(GL_LINE_SMOOTH); glHint(GL_LINE_SMOOTH_HINT, GL_NICEST); glLineWidth(1.5f); } public static void glRelease() { glDisable(GL_LINE_SMOOTH); GlStateManager.depthMask(true); GlStateManager.enableDepth(); GlStateManager.enableTexture2D(); GlStateManager.disableBlend(); GlStateManager.popMatrix(); } public static void glPrepare() { GlStateManager.disableCull(); GlStateManager.disableAlpha(); GlStateManager.shadeModel(GL_SMOOTH); } public static void glRestore() { GlStateManager.enableCull(); GlStateManager.enableAlpha(); GlStateManager.shadeModel(GL_FLAT); } public enum RenderMode { Fill, Outline, Both, Claw, Glow } // gl private boolean setup = false; private boolean depth = false; private boolean blend = false; private boolean texture = false; private boolean cull = false; private boolean alpha = false; private boolean shade = false; // box private BlockPos blockPos = BlockPos.ORIGIN; private Box box = Box.FILL; private double height = 0; private double length = 0; private double width = 0; private Color color = Color.WHITE; public RenderBuilder setup() { GlStateManager.pushMatrix(); GlStateManager.tryBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_ONE, GL_ZERO); glEnable(GL_LINE_SMOOTH); glHint(GL_LINE_SMOOTH_HINT, GL_NICEST); setup = true; return this; } public RenderBuilder depth(boolean in) { if (in) { GlStateManager.disableDepth(); GlStateManager.depthMask(false); } depth = in; return this; } public RenderBuilder blend() { GlStateManager.enableBlend(); blend = true; return this; } public RenderBuilder texture() { GlStateManager.disableTexture2D(); texture = true; return this; } public RenderBuilder line(float width) { GlStateManager.glLineWidth(width); return this; } public RenderBuilder cull(boolean in) { if (cull) { GlStateManager.disableCull(); } cull = in; return this; } public RenderBuilder alpha(boolean in) { if (alpha) { GlStateManager.disableAlpha(); } alpha = in; return this; } public RenderBuilder shade(boolean in) { if (in) { GlStateManager.shadeModel(GL_SMOOTH); } shade = in; return this; } public RenderBuilder build() { if (depth) { GlStateManager.depthMask(true); GlStateManager.enableDepth(); } if (texture) { GlStateManager.enableTexture2D(); } if (blend) { GlStateManager.disableBlend(); } if (cull) { GlStateManager.enableCull(); } if (alpha) { GlStateManager.enableAlpha(); } if (shade) { GlStateManager.shadeModel(GL_FLAT); } if (setup) { glDisable(GL_LINE_SMOOTH); GlStateManager.popMatrix(); } return this; } public RenderBuilder position(BlockPos in) { blockPos = in; return this; } public RenderBuilder height(double in) { height = in; return this; } public RenderBuilder width(double in) { width = in; return this; } public RenderBuilder length(double in) { length = in; return this; } public RenderBuilder color(Color in) { color = in; return this; } public RenderBuilder box(Box in) { box = in; return this; } public BlockPos getBlockPos() { return blockPos; } public double getHeight() { return height; } public double getWidth() { return width; } public double getLength() { return length; } public Color getColor() { return color; } public Box getBox() { return box; } public enum Box { FILL, OUTLINE, BOTH, GLOW, REVERSE, CLAW, NONE } }
BirdBrainTechnologies/BirdBlox-FinchBlox-Android
app/src/main/java/com/birdbraintechnologies/birdblox/Robots/RobotStates/RobotStateObjects/HBitBuzzer.java
package com.birdbraintechnologies.birdblox.Robots.RobotStates.RobotStateObjects; import java.util.Arrays; public class HBitBuzzer extends RobotStateObject { private short frequency; private short duration; public HBitBuzzer() { short stopFrequency = 0; short stopDuration = 1; duration = stopDuration; frequency = stopFrequency; } public HBitBuzzer(short f, short d) { duration = d; frequency = f; } public short getDuration() { return duration; } public short getFrequency() { return frequency; } public short[] getFD() { short[] fd = new short[2]; fd[0] = frequency; fd[1] = duration; return fd; } private void setFD(short f, short d) { duration = d; frequency = f; } private void setFD(byte f, byte d) { duration = (short) d; frequency = (short) f; } @Override public void setValue(int... values) { if (values.length == 2) { if (values[0] == 0 && values[1] == 0) { setFD((short) 0, (short) 0); } else { short noteToHertz = (short) (1000.0 / ((Math.pow(2.0, ((short) values[0] * 1.0 - 69.0) / 12.0) * 440.0)) * 1000.0); setFD(noteToHertz, (short) values[1]); } } } @Override public void setValue(byte... values) { if (values.length == 2) { setFD(values[0], values[1]); } } @Override public boolean equals(Object buzzer) { // self check if (this == buzzer) return true; // null check if (buzzer == null) return false; // type check and cast if (getClass() != buzzer.getClass()) return false; return Arrays.equals(this.getFD(), ((HBitBuzzer) buzzer).getFD()); } }
rossi1/go-learning
binary/07-get-network-and-hosts-of-an-ip-address/main.go
/* Calculates the Prefix, and the address range for that prefix. Using bitshift and logical AND (&) to split up the uint32 to uint8. Using logical AND (&) to get the network prefix, and logical XOR (^) to get the inverse value of the mask to calulate the range of addresses for that prefix. */ package main import ( "fmt" ) func main() { //The 32 bit unsigned int representing an ip address. var addr uint32 = 0xC0A87880 fmt.Printf("addr = %b, human readable = %v\n", addr, convertToOctets(addr)) //The 32 bit unsigned int representing an net mask. var mask uint32 = 0xFFFFFFE0 fmt.Printf("mask=%032b, human readable = %v\n", mask, convertToOctets(mask)) fmt.Println("-----------Get Prefix----------------------------------------------------------") prefix := getPrefix(addr, mask) fmt.Printf("The prefix=%032b, human readable=%v\n", prefix, convertToOctets(prefix)) fmt.Println("-----------Get Hosts----------------------------------------------------------") fmt.Println("Inverse mask = ", ^mask) maxHosts := prefix + ^mask fmt.Printf("starting at = %v, ending at = %v\n", convertToOctets(prefix), convertToOctets(maxHosts)) } //getPrefix will get the network portion of the address. // The ^ operator flips all the bits to it's opposite value, // meaning 0 becomes 1, and 1 becomes a 0. func getPrefix(addr uint32, mask uint32) uint32 { prefix := addr & mask //fmt.Printf("%032b, %032b, %032b\n", addr, mask, prefix) return prefix } //readOctets will read chuncs of 8 bits from the uint32 address, // and put each byte into a slice which will be returned from the function. func convertToOctets(addr uint32) []byte { var numBits uint32 = 0xFF bSlice := make([]byte, 4) for i := 3; i >= 0; i-- { //Take out the 8 least significant bits (lsb) by doing an and operation // with the binary value 11111111 which eqals FF in hexadecimal, // and append those 8 to the slice. b := addr & numBits if addr == 0 { //Check if all bits are shifted out. break } bSlice[i] = uint8(b) //Shift 8 bits out since we are done with them. addr = addr >> 8 } return bSlice }
onap/vfc-nfvo-driver-vnfm-svnfm
huawei/vnfmadapter/VnfmadapterService/service/src/main/java/org/onap/vfc/nfvo/vnfm/svnfm/vnfmadapter/service/api/internalsvc/impl/VnfmAdapter2DriverMgrService.java
<reponame>onap/vfc-nfvo-driver-vnfm-svnfm<filename>huawei/vnfmadapter/VnfmadapterService/service/src/main/java/org/onap/vfc/nfvo/vnfm/svnfm/vnfmadapter/service/api/internalsvc/impl/VnfmAdapter2DriverMgrService.java /* * Copyright 2017 Huawei Technologies Co., Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onap.vfc.nfvo.vnfm.svnfm.vnfmadapter.service.api.internalsvc.impl; import java.io.BufferedInputStream; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.Map; import java.util.concurrent.Executors; import org.onap.vfc.nfvo.vnfm.svnfm.vnfmadapter.common.restclient.SystemEnvVariablesFactory; import org.onap.vfc.nfvo.vnfm.svnfm.vnfmadapter.service.adapter.impl.VnfmAdapter2DriverManager; import org.onap.vfc.nfvo.vnfm.svnfm.vnfmadapter.service.adapter.inf.IVnfmAdapter2DriverManager; import org.onap.vfc.nfvo.vnfm.svnfm.vnfmadapter.service.api.internalsvc.inf.IVnfmAdapter2DriverMgrService; import org.onap.vfc.nfvo.vnfm.svnfm.vnfmadapter.service.constant.Constant; import org.onap.vfc.nfvo.vnfm.svnfm.vnfmadapter.service.constant.UrlConstant; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import net.sf.json.JSONObject; /** * <br> * <p> * </p> * * @author * @version VFC 1.0 Jan 23, 2017 */ public class VnfmAdapter2DriverMgrService implements IVnfmAdapter2DriverMgrService { private static final Logger LOG = LoggerFactory.getLogger(VnfmAdapter2DriverMgrService.class); public static final String VNFMADAPTER2DRIVERMGR = "vnfmadapter2drivermgr.json"; @Override public void register() { // set URL and mothedtype Map<String, String> paramsMap = new HashMap<>(); paramsMap.put("url", UrlConstant.REST_DRIVERMGR_REGISTER); paramsMap.put("methodType", Constant.POST); // get vim adapter info and raise registration try { String adapterInfo = readVnfmAdapterInfoFromJson(); if(!"".equals(adapterInfo)) { JSONObject adapterObject = JSONObject.fromObject(adapterInfo); RegisterVnfm2DriverMgrThread vnfmAdapterThread = new RegisterVnfm2DriverMgrThread(paramsMap, adapterObject); Executors.newSingleThreadExecutor().submit(vnfmAdapterThread); } else { LOG.error("vnfmadapter2drivermgr info is null,please check!"); } } catch(IOException e) { LOG.error("Failed to read vnfmadapter2drivermgr info! " + e.getMessage(), e); } } /** * Retrieve VIM driver information. * * @return * @throws IOException */ public static String readVnfmAdapterInfoFromJson() throws IOException { String fileName = SystemEnvVariablesFactory.getInstance().getAppRoot() + System.getProperty(Constant.FILE_SEPARATOR) + "etc" + System.getProperty(Constant.FILE_SEPARATOR) + "adapterInfo" + System.getProperty(Constant.FILE_SEPARATOR) + VNFMADAPTER2DRIVERMGR; return readJson(fileName); } public static String readJson(String fileName) throws IOException { String fileContent = ""; try (InputStream ins = new FileInputStream(fileName)){ try(BufferedInputStream bins = new BufferedInputStream(ins)){ byte[] contentByte = new byte[ins.available()]; int num = bins.read(contentByte); if(num > 0) { fileContent = new String(contentByte); } } } catch(FileNotFoundException e) { LOG.error(fileName + "is not found!", e); } return fileContent; } private static class RegisterVnfm2DriverMgrThread implements Runnable { private IVnfmAdapter2DriverManager adapter2DriverMgr = new VnfmAdapter2DriverManager(); // url and mothedtype private Map<String, String> paramsMap; // driver body private JSONObject adapterInfo; public RegisterVnfm2DriverMgrThread(Map<String, String> paramsMap, JSONObject adapterInfo) { this.paramsMap = paramsMap; this.adapterInfo = adapterInfo; } @Override public void run() { LOG.info("start register vnfmadapter to Driver Manager", RegisterVnfm2DriverMgrThread.class); if(paramsMap == null || adapterInfo == null) { LOG.error("parameter is null,please check!", RegisterVnfm2DriverMgrThread.class); return; } // catch Runtime Exception try { sendRequest(paramsMap, adapterInfo); } catch(RuntimeException e) { LOG.error(e.getMessage(), e); } } private void sendRequest(Map<String, String> paramsMap, JSONObject driverInfo) { JSONObject resultObj = adapter2DriverMgr.registerDriver(paramsMap, driverInfo); if(Integer.valueOf(resultObj.get(Constant.RETCODE).toString()) == Constant.HTTP_CREATED) { LOG.info("Vnfmadapter has now Successfully Registered to the Driver Manager!"); } else { LOG.error("Vnfmadapter failed to Register to the Driver Manager! Reason:" + resultObj.get(Constant.REASON).toString() + " retCode:" + resultObj.get(Constant.RETCODE).toString()); // if registration fails,wait one minute and try again try { Thread.sleep(Constant.REPEAT_REG_TIME); } catch(InterruptedException e) { LOG.error(e.getMessage(), e); // Restore interrupted state... Thread.currentThread().interrupt(); } sendRequest(this.paramsMap, this.adapterInfo); } } } @Override public void unregister() { // unregister } }
MASAICommunity/MASAI-concierge-android-app
app/src/main/java/solutions/masai/masai/android/core/model/GoogleMapsAddress.java
<reponame>MASAICommunity/MASAI-concierge-android-app package solutions.masai.masai.android.core.model; /** * Created by Semko on 2017-03-03. */ public class GoogleMapsAddress { private Result[] results; public Result[] getResults() { return results; } public void setResults(Result[] results) { this.results = results; } public class Result { private String formatted_address; public String getFormattedAddress() { return formatted_address; } public void setFormatted_address(String formatted_address) { this.formatted_address = formatted_address; } } }
cliffano/jenkins-api-clients-generator
clients/go-echo-server/generated/models/model_empty_change_log_set.go
<filename>clients/go-echo-server/generated/models/model_empty_change_log_set.go package models type EmptyChangeLogSet struct { Class string `json:"_class,omitempty"` Kind string `json:"kind,omitempty"` }
lyne-bot/lyne-components
src/components/lyne-timetable-transportation-number/lyne-timetable-transportation-number.stories.js
<reponame>lyne-bot/lyne-components import { h } from 'jsx-dom'; import readme from './readme.md'; import sampleData from './lyne-timetable-transportation-number.sample-data'; const Template = (args) => ( <lyne-timetable-transportation-number appearance={args.appearance} config={JSON.stringify(args.config)} > </lyne-timetable-transportation-number> ); const appearance = { control: { type: 'select' }, options: [ 'first-level', 'second-level' ] }; const config = { table: { disable: false } }; const defaultArgTypes = { appearance, config }; const defaultArgs = { appearance: appearance.options[0] }; /* ************************************************* */ /* The Stories */ /* ************************************************* */ export const BusFirstLevel = Template.bind({}); BusFirstLevel.argTypes = defaultArgTypes; BusFirstLevel.args = { ...defaultArgs, config: sampleData.bus }; BusFirstLevel.documentation = { title: 'Bus - First Level' }; export const BusSecondLevel = Template.bind({}); BusSecondLevel.argTypes = defaultArgTypes; BusSecondLevel.args = { ...defaultArgs, appearance: appearance.options[1], config: sampleData.bus }; BusSecondLevel.documentation = { title: 'Bus - Second Level' }; export const CableCarFirstLevel = Template.bind({}); CableCarFirstLevel.argTypes = defaultArgTypes; CableCarFirstLevel.args = { ...defaultArgs, config: sampleData.cableCar }; CableCarFirstLevel.documentation = { title: 'Cable Car - First Level' }; export const CableCarSecondLevel = Template.bind({}); CableCarSecondLevel.argTypes = defaultArgTypes; CableCarSecondLevel.args = { ...defaultArgs, appearance: appearance.options[1], config: sampleData.cableCar }; CableCarSecondLevel.documentation = { title: 'Cable Car - Second Level' }; export const TrainFirstLevel = Template.bind({}); TrainFirstLevel.argTypes = defaultArgTypes; TrainFirstLevel.args = { ...defaultArgs, config: sampleData.train }; TrainFirstLevel.documentation = { title: 'Train - First Level' }; export const TrainSecondLevel = Template.bind({}); TrainSecondLevel.argTypes = defaultArgTypes; TrainSecondLevel.args = { ...defaultArgs, appearance: appearance.options[1], config: sampleData.train }; TrainSecondLevel.documentation = { title: 'Train - Second Level' }; export const TramFirstLevel = Template.bind({}); TramFirstLevel.argTypes = defaultArgTypes; TramFirstLevel.args = { ...defaultArgs, config: sampleData.tram }; TramFirstLevel.documentation = { title: 'Tram - First Level' }; export const TramSecondLevel = Template.bind({}); TramSecondLevel.argTypes = defaultArgTypes; TramSecondLevel.args = { ...defaultArgs, appearance: appearance.options[1], config: sampleData.tram }; TramSecondLevel.documentation = { title: 'Tram - Second Level' }; export default { decorators: [ (Story) => ( <Story/> ) ], parameters: { backgrounds: { disable: true }, docs: { extractComponentDescription: () => readme } }, title: 'internals/lyne-timetable-transportation-number' };
kpi-keoa/TheConnectedMCU_Labs
alexskp/lab4-UART_SPI/docs/html/search/files_0.js
<filename>alexskp/lab4-UART_SPI/docs/html/search/files_0.js var searchData= [ ['chrfont0_2ec',['ChrFont0.c',['../_chr_font0_8c.html',1,'']]], ['configuration_5fbits_2ec',['configuration_bits.c',['../configuration__bits_8c.html',1,'']]] ];
nistefan/cmssw
RecoTracker/TkHitPairs/interface/LayerWithHits.h
<reponame>nistefan/cmssw<gh_stars>1-10 #ifndef LayerWithHits_H #define LayerWithHits_H #include "DataFormats/TrackingRecHit/interface/TrackingRecHit.h" #include "DataFormats/TrackerRecHit2D/interface/SiPixelRecHitCollection.h" #include "DataFormats/TrackerRecHit2D/interface/SiStripRecHit2DCollection.h" #include "DataFormats/TrackerRecHit2D/interface/SiStripMatchedRecHit2DCollection.h" #include "DataFormats/Common/interface/DetSetVectorNew.h" #include "DataFormats/Common/interface/DetSetAlgorithm.h" #include "TrackingTools/DetLayers/interface/DetLayer.h" class LayerWithHits { public: LayerWithHits(const DetLayer *dl,const std::vector<const TrackingRecHit*>& theInputHits): theDetLayer(dl),theHits(theInputHits){} /// Usage: /// edm::ESHandle<TrackerTopology> httopo; /// iSetup.get<TrackerTopologyRcd>().get(httopo); /// const TrackerTopology& ttopo = *httopo; /// LayerWithHits( theLayer, collrphi, ttopo.tibDetIdLayerComparator(1) ); template <typename DSTV, typename SEL> LayerWithHits(const DetLayer *dl, DSTV const & allhits, SEL const & sel) { theDetLayer = dl; edmNew::copyDetSetRange(allhits,theHits,sel); } //destructor ~LayerWithHits(){} /// return the recHits of the Layer const std::vector<const TrackingRecHit*>& recHits() const {return theHits;} //detlayer const DetLayer* layer() const {return theDetLayer;} private: const DetLayer* theDetLayer; std::vector<const TrackingRecHit*> theHits; }; #endif
supreethms1809/magma-2.2.0
docs/html/search/classes_5.js
<gh_stars>1-10 var searchData= [ ['sgehrd_5fdata',['sgehrd_data',['../structsgehrd__data.html',1,'']]], ['sgemm_5ftask',['sgemm_task',['../classsgemm__task.html',1,'']]], ['slaswp_5fparams_5ft',['slaswp_params_t',['../structslaswp__params__t.html',1,'']]], ['slaswp_5fsym_5fparams_5ft',['slaswp_sym_params_t',['../structslaswp__sym__params__t.html',1,'']]] ];
SHIVJITH/Odoo_Machine_Test
addons/mail/static/src/js/custom_filter_item.js
odoo.define('mail.CustomFilterItem', function (require) { "use strict"; const CustomFilterItem = require('web.CustomFilterItem'); CustomFilterItem.patch('mail.CustomFilterItem', T => class extends T { /** * With the `mail` module installed, we want to filter out some of the * available fields in 'Add custom filter' menu (@see CustomFilterItem). * @override */ _validateField(field) { return super._validateField(...arguments) && field.relation !== 'mail.message' && field.name !== 'message_ids'; } }); return CustomFilterItem; });
google-cloud-sdk-unofficial/google-cloud-sdk
lib/googlecloudsdk/third_party/apis/transcoder/v1beta1/transcoder_v1beta1_client.py
"""Generated client library for transcoder version v1beta1.""" # NOTE: This file is autogenerated and should not be edited by hand. from __future__ import absolute_import from apitools.base.py import base_api from googlecloudsdk.third_party.apis.transcoder.v1beta1 import transcoder_v1beta1_messages as messages class TranscoderV1beta1(base_api.BaseApiClient): """Generated client library for service transcoder version v1beta1.""" MESSAGES_MODULE = messages BASE_URL = 'https://transcoder.googleapis.com/' MTLS_BASE_URL = 'https://transcoder.mtls.googleapis.com/' _PACKAGE = 'transcoder' _SCOPES = ['https://www.googleapis.com/auth/cloud-platform'] _VERSION = 'v1beta1' _CLIENT_ID = '1042881264118.apps.googleusercontent.com' _CLIENT_SECRET = 'x_Tw5K8nnjoRAqULM9PFAC2b' _USER_AGENT = 'google-cloud-sdk' _CLIENT_CLASS_NAME = 'TranscoderV1beta1' _URL_VERSION = 'v1beta1' _API_KEY = None def __init__(self, url='', credentials=None, get_credentials=True, http=None, model=None, log_request=False, log_response=False, credentials_args=None, default_global_params=None, additional_http_headers=None, response_encoding=None): """Create a new transcoder handle.""" url = url or self.BASE_URL super(TranscoderV1beta1, self).__init__( url, credentials=credentials, get_credentials=get_credentials, http=http, model=model, log_request=log_request, log_response=log_response, credentials_args=credentials_args, default_global_params=default_global_params, additional_http_headers=additional_http_headers, response_encoding=response_encoding) self.projects_locations_jobTemplates = self.ProjectsLocationsJobTemplatesService(self) self.projects_locations_jobs = self.ProjectsLocationsJobsService(self) self.projects_locations = self.ProjectsLocationsService(self) self.projects = self.ProjectsService(self) class ProjectsLocationsJobTemplatesService(base_api.BaseApiService): """Service class for the projects_locations_jobTemplates resource.""" _NAME = 'projects_locations_jobTemplates' def __init__(self, client): super(TranscoderV1beta1.ProjectsLocationsJobTemplatesService, self).__init__(client) self._upload_configs = { } def Create(self, request, global_params=None): r"""Creates a job template in the specified region. Args: request: (TranscoderProjectsLocationsJobTemplatesCreateRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (JobTemplate) The response message. """ config = self.GetMethodConfig('Create') return self._RunMethod( config, request, global_params=global_params) Create.method_config = lambda: base_api.ApiMethodInfo( flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/jobTemplates', http_method='POST', method_id='transcoder.projects.locations.jobTemplates.create', ordered_params=['parent'], path_params=['parent'], query_params=['jobTemplateId'], relative_path='v1beta1/{+parent}/jobTemplates', request_field='jobTemplate', request_type_name='TranscoderProjectsLocationsJobTemplatesCreateRequest', response_type_name='JobTemplate', supports_download=False, ) def Delete(self, request, global_params=None): r"""Deletes a job template. Args: request: (TranscoderProjectsLocationsJobTemplatesDeleteRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (Empty) The response message. """ config = self.GetMethodConfig('Delete') return self._RunMethod( config, request, global_params=global_params) Delete.method_config = lambda: base_api.ApiMethodInfo( flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/jobTemplates/{jobTemplatesId}', http_method='DELETE', method_id='transcoder.projects.locations.jobTemplates.delete', ordered_params=['name'], path_params=['name'], query_params=[], relative_path='v1beta1/{+name}', request_field='', request_type_name='TranscoderProjectsLocationsJobTemplatesDeleteRequest', response_type_name='Empty', supports_download=False, ) def Get(self, request, global_params=None): r"""Returns the job template data. Args: request: (TranscoderProjectsLocationsJobTemplatesGetRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (JobTemplate) The response message. """ config = self.GetMethodConfig('Get') return self._RunMethod( config, request, global_params=global_params) Get.method_config = lambda: base_api.ApiMethodInfo( flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/jobTemplates/{jobTemplatesId}', http_method='GET', method_id='transcoder.projects.locations.jobTemplates.get', ordered_params=['name'], path_params=['name'], query_params=[], relative_path='v1beta1/{+name}', request_field='', request_type_name='TranscoderProjectsLocationsJobTemplatesGetRequest', response_type_name='JobTemplate', supports_download=False, ) def List(self, request, global_params=None): r"""Lists job templates in the specified region. Args: request: (TranscoderProjectsLocationsJobTemplatesListRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (ListJobTemplatesResponse) The response message. """ config = self.GetMethodConfig('List') return self._RunMethod( config, request, global_params=global_params) List.method_config = lambda: base_api.ApiMethodInfo( flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/jobTemplates', http_method='GET', method_id='transcoder.projects.locations.jobTemplates.list', ordered_params=['parent'], path_params=['parent'], query_params=['pageSize', 'pageToken'], relative_path='v1beta1/{+parent}/jobTemplates', request_field='', request_type_name='TranscoderProjectsLocationsJobTemplatesListRequest', response_type_name='ListJobTemplatesResponse', supports_download=False, ) class ProjectsLocationsJobsService(base_api.BaseApiService): """Service class for the projects_locations_jobs resource.""" _NAME = 'projects_locations_jobs' def __init__(self, client): super(TranscoderV1beta1.ProjectsLocationsJobsService, self).__init__(client) self._upload_configs = { } def Create(self, request, global_params=None): r"""Creates a job in the specified region. Args: request: (TranscoderProjectsLocationsJobsCreateRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (Job) The response message. """ config = self.GetMethodConfig('Create') return self._RunMethod( config, request, global_params=global_params) Create.method_config = lambda: base_api.ApiMethodInfo( flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/jobs', http_method='POST', method_id='transcoder.projects.locations.jobs.create', ordered_params=['parent'], path_params=['parent'], query_params=[], relative_path='v1beta1/{+parent}/jobs', request_field='job', request_type_name='TranscoderProjectsLocationsJobsCreateRequest', response_type_name='Job', supports_download=False, ) def Delete(self, request, global_params=None): r"""Deletes a job. Args: request: (TranscoderProjectsLocationsJobsDeleteRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (Empty) The response message. """ config = self.GetMethodConfig('Delete') return self._RunMethod( config, request, global_params=global_params) Delete.method_config = lambda: base_api.ApiMethodInfo( flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/jobs/{jobsId}', http_method='DELETE', method_id='transcoder.projects.locations.jobs.delete', ordered_params=['name'], path_params=['name'], query_params=[], relative_path='v1beta1/{+name}', request_field='', request_type_name='TranscoderProjectsLocationsJobsDeleteRequest', response_type_name='Empty', supports_download=False, ) def Get(self, request, global_params=None): r"""Returns the job data. Args: request: (TranscoderProjectsLocationsJobsGetRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (Job) The response message. """ config = self.GetMethodConfig('Get') return self._RunMethod( config, request, global_params=global_params) Get.method_config = lambda: base_api.ApiMethodInfo( flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/jobs/{jobsId}', http_method='GET', method_id='transcoder.projects.locations.jobs.get', ordered_params=['name'], path_params=['name'], query_params=[], relative_path='v1beta1/{+name}', request_field='', request_type_name='TranscoderProjectsLocationsJobsGetRequest', response_type_name='Job', supports_download=False, ) def List(self, request, global_params=None): r"""Lists jobs in the specified region. Args: request: (TranscoderProjectsLocationsJobsListRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (ListJobsResponse) The response message. """ config = self.GetMethodConfig('List') return self._RunMethod( config, request, global_params=global_params) List.method_config = lambda: base_api.ApiMethodInfo( flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/jobs', http_method='GET', method_id='transcoder.projects.locations.jobs.list', ordered_params=['parent'], path_params=['parent'], query_params=['pageSize', 'pageToken'], relative_path='v1beta1/{+parent}/jobs', request_field='', request_type_name='TranscoderProjectsLocationsJobsListRequest', response_type_name='ListJobsResponse', supports_download=False, ) class ProjectsLocationsService(base_api.BaseApiService): """Service class for the projects_locations resource.""" _NAME = 'projects_locations' def __init__(self, client): super(TranscoderV1beta1.ProjectsLocationsService, self).__init__(client) self._upload_configs = { } class ProjectsService(base_api.BaseApiService): """Service class for the projects resource.""" _NAME = 'projects' def __init__(self, client): super(TranscoderV1beta1.ProjectsService, self).__init__(client) self._upload_configs = { }
taylrj/static-fe-boilerplate
slave-fishermen-human-trafficking/src/data/img-src.js
<filename>slave-fishermen-human-trafficking/src/data/img-src.js import icon_scroll from '../../static/icon-scroll.png' import logo from '../../static/logo-horizontal01-white.png' import mute from '../../static/icon-mute.png' import pause_hover from '../../static/icon-pause-hover.png' import pause from '../../static/icon-pause.png' import play_hover from '../../static/icon-play-hover.png'  import play from '../../static/icon-play.png' import scroll from '../../static/icon-scroll.png' //import loading from '../../static/icon-loading.gif' import sidebar_close from '../../static/icon-sidebar-close.png' import sidebar_open from '../../static/icon-sidebar-open.png' import soundOn from '../../static/icon-sound-on.png' import popup_close from '../../static/icon-popup-close.png' import swipe_left from '../../static/icon-swipe-left.png' import back_to_top from '../../static/icon-back-to-top.png' import fishingShip from '../../static/fishing2.0-ship.png' import topicpage from '../../static/icon-topic.png' // pictures import m_photo_1 from '../../static/pictures/m-photo-1.png' import t_photo_1 from '../../static/pictures/t-photo-1.png' import d_photo_1 from '../../static/pictures/d-photo-1.png' import m_photo_2 from '../../static/pictures/m-photo-2.png' import t_photo_2 from '../../static/pictures/t-photo-2.png' import d_photo_2 from '../../static/pictures/d-photo-2.png' import m_photo_3 from '../../static/pictures/m-photo-3.png' import t_photo_3 from '../../static/pictures/t-photo-3.png' import d_photo_3 from '../../static/pictures/d-photo-3.png' import m_photo_4 from '../../static/pictures/m-photo-4.png' import t_photo_4 from '../../static/pictures/t-photo-4.png' import d_photo_4 from '../../static/pictures/d-photo-4.png' import d11 from '../../static/pictures/1/d/desktop-1.jpg' import d12 from '../../static/pictures/1/d/desktop-2.jpg' import d13 from '../../static/pictures/1/d/desktop-3.jpg' import d14 from '../../static/pictures/1/d/desktop-4.jpg' import m11 from '../../static/pictures/1/m/mobile-1.jpg' import m12 from '../../static/pictures/1/m/mobile-2.jpg' import m13 from '../../static/pictures/1/m/mobile-3.jpg' import m14 from '../../static/pictures/1/m/mobile-4.jpg' import d21 from '../../static/pictures/2/d/desktop-1.jpg' import d22 from '../../static/pictures/2/d/desktop-2.jpg' import d23 from '../../static/pictures/2/d/desktop-3.jpg' import d24 from '../../static/pictures/2/d/desktop-4.jpg' import d25 from '../../static/pictures/2/d/desktop-5.jpg' import d26 from '../../static/pictures/2/d/desktop-6.jpg' import d27 from '../../static/pictures/2/d/desktop-7.jpg' import d28 from '../../static/pictures/2/d/desktop-8.jpg' import d29 from '../../static/pictures/2/d/desktop-9.jpg' import m21 from '../../static/pictures/2/m/mobile-1.jpg' import m22 from '../../static/pictures/2/m/mobile-2.jpg' import m23 from '../../static/pictures/2/m/mobile-3.jpg' import m24 from '../../static/pictures/2/m/mobile-4.jpg' import m25 from '../../static/pictures/2/m/mobile-5.jpg' import m26 from '../../static/pictures/2/m/mobile-6.jpg' import m27 from '../../static/pictures/2/m/mobile-7.jpg' import m28 from '../../static/pictures/2/m/mobile-8.jpg' import m29 from '../../static/pictures/2/m/mobile-9.jpg' import d31 from '../../static/pictures/3/d/desktop-1.jpg' import d32 from '../../static/pictures/3/d/desktop-2.jpg' import m31 from '../../static/pictures/3/m/mobile-1.jpg' import m32 from '../../static/pictures/3/m/mobile-2.jpg' import d41 from '../../static/pictures/4/d/desktop-1.jpg' import d42 from '../../static/pictures/4/d/desktop-2.jpg' import d43 from '../../static/pictures/4/d/desktop-3.jpg' import m41 from '../../static/pictures/4/m/mobile-1.jpg' import m42 from '../../static/pictures/4/m/mobile-2.jpg' import m43 from '../../static/pictures/4/m/mobile-3.jpg' import d51 from '../../static/pictures/5/d/desktop-1.jpg' import d52 from '../../static/pictures/5/d/desktop-2.jpg' import d53 from '../../static/pictures/5/d/desktop-3.jpg' import d54 from '../../static/pictures/5/d/desktop-4.jpg' import d55 from '../../static/pictures/5/d/desktop-5.jpg' import m51 from '../../static/pictures/5/m/mobile-1.jpg' import m52 from '../../static/pictures/5/m/mobile-2.jpg' import m53 from '../../static/pictures/5/m/mobile-3.jpg' import m54 from '../../static/pictures/5/m/mobile-4.jpg' import m55 from '../../static/pictures/5/m/mobile-5.jpg' import d61 from '../../static/pictures/6/d/desktop-1.jpg' import d62 from '../../static/pictures/6/d/desktop-2.jpg' import d63 from '../../static/pictures/6/d/desktop-3.jpg' import m61 from '../../static/pictures/6/m/mobile-1.jpg' import m62 from '../../static/pictures/6/m/mobile-2.jpg' import m63 from '../../static/pictures/6/m/mobile-3.jpg' // opening import opening_desktop from '../../static/opening/opening-setting-desktop-new.jpg' import opening_tablet from '../../static/opening/opening-setting-tablet-new.jpg' import opening_mobile from '../../static/opening/opening-setting-mobile-new.jpg' // graphics import g_1_1_d from '../../static/graphics/graphic-1-1-d-.png' import g_1_1_m from '../../static/graphics/graphic-1-1-m-.png' import g_2_1_d from '../../static/graphics/graphic-2-1-d-.png' import g_2_1_m from '../../static/graphics/graphic-2-1-m-.png' import g_3_1_d from '../../static/graphics/graphic---3-1-d.png' import g_3_1_m from '../../static/graphics/graphic---3-1-m.png' import g_4_1_d from '../../static/graphics/graphic--4-1-d.png' import g_4_1_m from '../../static/graphics/graphic--4-1-m.png' import g_5_1 from '../../static/graphics/graphic--5-1.png' import g_5_2 from '../../static/graphics/graphic--5-2.png' import g_5_3_d from '../../static/graphics/graphic--5-3-d.png' import g_5_3_m from '../../static/graphics/graphic--5-3-m.png' // section opening import image1 from '../../static/pictures/image-1.png' import image2 from '../../static/pictures/image-2.png' import image3 from '../../static/pictures/image-3.png' import image4 from '../../static/pictures/image-4.png' import opening_title from '../../static/opening-title.png' import opening_subtitle from '../../static/opening-subtitle.png' // video posters import big_ship from '../../static/videos/big-ship.png' import boat_house from '../../static/videos/boat-house.png' import protest from '../../static/videos/protest.png' import ship_on_sea from '../../static/videos/ship-on-sea.png' import workers from '../../static/videos/workers.png' import country1 from '../../static/icon-country-1.png' import country2 from '../../static/icon-country-2.png' import country3 from '../../static/icon-country-3.png' // TODO: outsource link (url) import tinyog1 from '../../static/og-image-tiny-1.jpg' import tinyog2 from '../../static/og-image-tiny-2.jpg' import tinyog3 from '../../static/og-image-tiny-3.jpg' import tinyog4 from '../../static/og-image-tiny-4.jpg' import tinyog5 from '../../static/og-image-tiny-5.jpg' import tinyog6 from '../../static/og-image-tiny-6.jpg' const icons = { "icon_scroll": icon_scroll, "logo": logo, "soundOn": soundOn, "mute": mute, "pause_hover": pause_hover, "pause": pause, "play_hover": play_hover, "play": play, "scroll": scroll, "sidebar_close": sidebar_close, "sidebar_open": sidebar_open, "popup_close": popup_close, "swipe_left": swipe_left, "back_to_top": back_to_top, //"loading": loading, "fishingShip": fishingShip, "topicpage": topicpage } const opening = { "opening_desktop": opening_desktop, "opening_mobile": opening_mobile, "opening_tablet": opening_tablet } const photo_story = { "m_photo_1": m_photo_1, "t_photo_1": t_photo_1, "d_photo_1": d_photo_1, "m_photo_2": m_photo_2, "t_photo_2": t_photo_2, "d_photo_2": d_photo_2, "m_photo_3": m_photo_3, "t_photo_3": t_photo_3, "d_photo_3": d_photo_3, "m_photo_4": m_photo_4, "t_photo_4": t_photo_4, "d_photo_4": d_photo_4 } const pictures = { "d11": d11, "d12": d12, "d13": d13, "d14": d14, "m11": m11, "m12": m12, "m13": m13, "m14": m14, "d21": d21, "d22": d22, "d23": d23, "d24": d24, "d25": d25, "d26": d26, "d27": d27, "d28": d28, "d29": d29, "m21": m21, "m22": m22, "m23": m23, "m24": m24, "m25": m25, "m26": m26, "m27": m27, "m28": m28, "m29": m29, "d31": d31, "d32": d32, "m31": m31, "m32": m32, "d41": d41, "d42": d42, "d43": d43, "m41": m41, "m42": m42, "m43": m43, "d51": d51, "d52": d52, "d53": d53, "d54": d54, "d55": d55, "m51": m51, "m52": m52, "m53": m53, "m54": m54, "m55": m55, "d61": d61, "d62": d62, "d63": d63, "m61": m61, "m62": m62, "m63": m63, "out41": "https://scontent.ftpe4-1.fna.fbcdn.net/v/t1.0-9/40276482_2675423709149755_7721850993431805952_n.jpg?_nc_cat=101&_nc_ht=scontent.ftpe4-1.fna&oh=b85c3dd46b940d958fab1477cd5f5539&oe=5CA21ACF", "out42": "https://eunavfor.eu/wp-content/uploads/2013/07/20130726_nicehl01_ME03_NAHAM3_NE2L2450.jpg", "opening_title": opening_title, "opening_subtitle": opening_subtitle } const graphics = { "g_1_1_d": g_1_1_d, "g_1_1_m": g_1_1_m, "g_2_1_d": g_2_1_d, "g_2_1_m": g_2_1_m, "g_3_1_d": g_3_1_d, "g_3_1_m": g_3_1_m, "g_4_1_d": g_4_1_d, "g_4_1_m": g_4_1_m, "g_5_1": g_5_1, "g_5_2": g_5_2, "g_5_3_d": g_5_3_d, "g_5_3_m": g_5_3_m, } const forMapTooltips = { "country1": country1, "country2": country2, "country3": country3, } const videoPosters = { "big_ship":big_ship, "boat_house":boat_house, "protest":protest, "ship_on_sea":ship_on_sea, "workers":workers, } const tinyog = { "tinyog1": tinyog1, "tinyog2": tinyog2, "tinyog3": tinyog3, "tinyog4": tinyog4, "tinyog5": tinyog5, "tinyog6": tinyog6, } export default { ...icons, ...opening, ...pictures, ...graphics, ...videoPosters, ...forMapTooltips, ...photo_story, ...tinyog }
v8786339/NyuziProcessor
software/libs/librender/LinearInterpolator.h
<gh_stars>1000+ // // Copyright 2011-2015 <NAME> // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // #pragma once #include "SIMDMath.h" namespace librender { // // 2D linear interpolator. // class LinearInterpolator { public: void init(float xGradient, float yGradient, float c00) { fXGradient = xGradient; fYGradient = yGradient; fC00 = c00; } // Return values of this parameter at 16 locations given by the vectors // x and y. inline vecf16_t getValuesAt(vecf16_t x, vecf16_t y) const { return x * fXGradient + y * fYGradient + fC00; } private: float fXGradient; float fYGradient; float fC00; // Value of C at 0, 0 }; } // namespace librender
dmgerman/hadoop
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/handlers/BufferPusher.java
<reponame>dmgerman/hadoop begin_unit|revision:0.9.5;language:Java;cregit-version:0.0.1 begin_comment comment|/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ end_comment begin_package DECL|package|org.apache.hadoop.mapred.nativetask.handlers package|package name|org operator|. name|apache operator|. name|hadoop operator|. name|mapred operator|. name|nativetask operator|. name|handlers package|; end_package begin_import import|import name|java operator|. name|io operator|. name|IOException import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|classification operator|. name|InterfaceAudience import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|mapred operator|. name|OutputCollector import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|mapred operator|. name|nativetask operator|. name|NativeDataTarget import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|mapred operator|. name|nativetask operator|. name|buffer operator|. name|ByteBufferDataWriter import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|mapred operator|. name|nativetask operator|. name|serde operator|. name|IKVSerializer import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|mapred operator|. name|nativetask operator|. name|serde operator|. name|KVSerializer import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|mapred operator|. name|nativetask operator|. name|util operator|. name|SizedWritable import|; end_import begin_import import|import name|org operator|. name|slf4j operator|. name|Logger import|; end_import begin_import import|import name|org operator|. name|slf4j operator|. name|LoggerFactory import|; end_import begin_comment comment|/** * actively push data into a buffer and signal a {@link BufferPushee} to collect it */ end_comment begin_class annotation|@ name|InterfaceAudience operator|. name|Private DECL|class|BufferPusher specifier|public class|class name|BufferPusher parameter_list|< name|K parameter_list|, name|V parameter_list|> implements|implements name|OutputCollector argument_list|< name|K argument_list|, name|V argument_list|> block|{ DECL|field|LOG specifier|private specifier|static specifier|final name|Logger name|LOG init|= name|LoggerFactory operator|. name|getLogger argument_list|( name|BufferPusher operator|. name|class argument_list|) decl_stmt|; DECL|field|tmpInputKey specifier|private specifier|final name|SizedWritable argument_list|< name|K argument_list|> name|tmpInputKey decl_stmt|; DECL|field|tmpInputValue specifier|private specifier|final name|SizedWritable argument_list|< name|V argument_list|> name|tmpInputValue decl_stmt|; DECL|field|out specifier|private name|ByteBufferDataWriter name|out decl_stmt|; DECL|field|serializer name|IKVSerializer name|serializer decl_stmt|; DECL|field|closed specifier|private name|boolean name|closed init|= literal|false decl_stmt|; DECL|method|BufferPusher (Class<K> iKClass, Class<V> iVClass, NativeDataTarget target) specifier|public name|BufferPusher parameter_list|( name|Class argument_list|< name|K argument_list|> name|iKClass parameter_list|, name|Class argument_list|< name|V argument_list|> name|iVClass parameter_list|, name|NativeDataTarget name|target parameter_list|) throws|throws name|IOException block|{ name|tmpInputKey operator|= operator|new name|SizedWritable argument_list|< name|K argument_list|> argument_list|( name|iKClass argument_list|) expr_stmt|; name|tmpInputValue operator|= operator|new name|SizedWritable argument_list|< name|V argument_list|> argument_list|( name|iVClass argument_list|) expr_stmt|; if|if condition|( literal|null operator|!= name|iKClass operator|&& literal|null operator|!= name|iVClass condition|) block|{ name|this operator|. name|serializer operator|= operator|new name|KVSerializer argument_list|< name|K argument_list|, name|V argument_list|> argument_list|( name|iKClass argument_list|, name|iVClass argument_list|) expr_stmt|; block|} name|this operator|. name|out operator|= operator|new name|ByteBufferDataWriter argument_list|( name|target argument_list|) expr_stmt|; block|} DECL|method|collect (K key, V value, int partition) specifier|public name|void name|collect parameter_list|( name|K name|key parameter_list|, name|V name|value parameter_list|, name|int name|partition parameter_list|) throws|throws name|IOException block|{ name|tmpInputKey operator|. name|reset argument_list|( name|key argument_list|) expr_stmt|; name|tmpInputValue operator|. name|reset argument_list|( name|value argument_list|) expr_stmt|; name|serializer operator|. name|serializePartitionKV argument_list|( name|out argument_list|, name|partition argument_list|, name|tmpInputKey argument_list|, name|tmpInputValue argument_list|) expr_stmt|; block|} empty_stmt|; annotation|@ name|Override DECL|method|collect (K key, V value) specifier|public name|void name|collect parameter_list|( name|K name|key parameter_list|, name|V name|value parameter_list|) throws|throws name|IOException block|{ if|if condition|( name|closed condition|) block|{ return|return; block|} name|tmpInputKey operator|. name|reset argument_list|( name|key argument_list|) expr_stmt|; name|tmpInputValue operator|. name|reset argument_list|( name|value argument_list|) expr_stmt|; name|serializer operator|. name|serializeKV argument_list|( name|out argument_list|, name|tmpInputKey argument_list|, name|tmpInputValue argument_list|) expr_stmt|; block|} empty_stmt|; DECL|method|flush () specifier|public name|void name|flush parameter_list|() throws|throws name|IOException block|{ if|if condition|( literal|null operator|!= name|out condition|) block|{ if|if condition|( name|out operator|. name|hasUnFlushedData argument_list|() condition|) block|{ name|out operator|. name|flush argument_list|() expr_stmt|; block|} block|} block|} DECL|method|close () specifier|public name|void name|close parameter_list|() throws|throws name|IOException block|{ if|if condition|( name|closed condition|) block|{ return|return; block|} if|if condition|( literal|null operator|!= name|out condition|) block|{ name|out operator|. name|close argument_list|() expr_stmt|; block|} name|closed operator|= literal|true expr_stmt|; block|} block|} end_class end_unit
zinedistro/zinedistro
spec/features/site_spec.rb
require "rails_helper" feature 'Browsing the site' do before do visit root_path end context 'Site footer' do scenario 'I can see attribution content' do within 'footer#bottom' do find("a[href='https://twitter.com/faunzy']") do expect(page).to have_content 'maintained by Faun' end end end scenario 'I can see a link to the resistance army site' do within 'footer#bottom' do find("a[href='http://theresistancearmy.com/']") do expect(page).to have_content 'A Resistance Army Project' end end end end end
zoopaper/netty-study
src/main/java/org/netty/study/protobuf/OrderServerHandler.java
<reponame>zoopaper/netty-study package org.netty.study.protobuf; import io.netty.channel.ChannelHandlerAdapter; import io.netty.channel.ChannelHandlerContext; /** * @author krisjin */ public class OrderServerHandler extends ChannelHandlerAdapter { public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { OrderRequestProto.OrderRequest req = (OrderRequestProto.OrderRequest) msg; if ("krisjin".equals(req.getUserName())) { System.out.println("Order Service accept client request:\n" + req.toString()); ctx.writeAndFlush(buildResponse(req.getOrderId())); } } private OrderResponseProto.OrderResponse buildResponse(int id) { OrderResponseProto.OrderResponse.Builder builder = OrderResponseProto.OrderResponse.newBuilder(); builder.setOrderId(id); builder.setStatusCode(0); builder.setDesc("Success accept order ,and send to address 好"); return builder.build(); } public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { cause.printStackTrace(); ctx.close(); } }
ernestyalumni/HrdwCCppCUDA
Voltron/Voltron/DataStructures/level_easy.py
from collections import deque ################################################################################ # @details # # DataStructures : deque, Queue, Binary Tree, Binary Search Tree (BST) ################################################################################ class BinaryTree: def __init__(self, value): self.value = value self.left = None self.right = None def _make_disparate_input_nodes(input_list): result = [] for element in input_list: result.append(BinaryTree(element)) return result def _get_next_level_children(previous_deque): """ @details This will pop all elements out of input previous_deque """ next_level_children = deque() while (previous_deque): node = previous_deque.pop() if node.left: # Add nodes to the "left" so it acts like a queue (to the "back of # the line"). next_level_children.appendleft(node.left) if node.right: next_level_children.appendleft(node.right) return next_level_children def node_depths(root): if root == None: return 0 counter = 0 level_counter = 0 level_queue = deque() level_queue.append(root) while (level_queue): level_counter += 1 level_queue = _get_next_level_children(level_queue) counter += level_counter * len(level_queue) return counter ################################################################################ # Find Closest Value in BST ################################################################################ def _get_closest_value(node, target): """ Complexity: Average O(log(n)) time. But O(log(n)) space because of recursion. """ if node.value_ == target: return node.value_ # Base case. Node with no leaves. if (node.left_ == None and node.right_ == None): return node.value_ if (node.left_ and node.right_ == None): left_result = _get_closest_value(node.left_, target) delta_value = abs(target - node.value_) delta_left_result = abs(target - left_result) return left_result if delta_left_result < delta_value else node.value_ if (node.right_ and node.left_ == None): right_result = _get_closest_value(node.right_, target) delta_value = abs(target - node.value_) delta_right_result = abs(target - right_result) return right_result if delta_right_result < delta_value else node.value_ if node.left_.value_ == target: return node.left_.value_ if node.right_.value_ == target: return node.right_.value_ if target < node.left_.value_: left_result = _get_closest_value(node.left_, target) delta_value = abs(target - node.value_) delta_left_result = abs(target - left_result) return left_result if delta_left_result < delta_value else node.value_ if target > node.right_.value_: right_result = _get_closest_value(node.right_, target) delta_value = abs(target - node.value_) delta_right_result = abs(target - right_result) return right_result if delta_right_result < delta_value else node.value_ assert node.left_.value_ < target and target < node.right_.value_ left_result = _get_closest_value(node.left_, target) right_result = _get_closest_value(node.right_, target) delta_left_result = abs(target - left_result) delta_right_result = abs(right_result - target) delta_value = abs(node.value_ - target) results = { delta_left_result : left_result, delta_right_result : right_result, delta_value : node.value_} return results[min(results.keys())] def find_closest_value_in_bst(tree, target): """ @brief Write function that takes Binary Search Tree (BST) and target integer value and returns closest value to target value contained in BST. You can assume that there'll be only one closest value. """ return _get_closest_value(tree, target) def _get_closest_value_iterative(node, target, closest_value): current_node = node while (current_node is not None): if (current_node.value_ == target): return current_node.value_ delta_node = abs(target - current_node.value_) delta_closest = abs(target - closest_value) closest_value = (closest_value if delta_closest <= delta_node else current_node.value_) if (current_node.value_ < target): current_node = current_node.right_ else: assert target < current_node.value_ current_node = current_node.left_ return closest_value def _preorder_traversal(node, running_sum, sums): if node is not None: if node.left_ is None and node.right_ is None: sums.append(running_sum + node.value_) return running_sum += node.value_ _preorder_traversal(node.left_, running_sum, sums) _preorder_traversal(node.right_, running_sum, sums) def branch_sums(root): running_sum = 0 sums = [] _preorder_traversal(root, running_sum, sums) return sums class TreeNode: def __init__(self, name): self.children_ = [] self.name_ = name def add_child(self, name): self.children_.append(TreeNode(name)) return self @staticmethod def _depth_first_traversal_recursive(node, results): results.append(node.name_) for child in node.children_: TreeNode._depth_first_traversal_recursive(child, results) def depth_first_search_recursive(self, array): TreeNode._depth_first_traversal_recursive(self, array) return array def remove_duplicates_from_linked_list(linked_list): """ @brief Doesn't contain any nodes with duplicate values. @details You're given the head of a Singly Linked List whose nodes are in sorted order with respect to their values. @returns Returns modified version of the Linked List that doesn't contain any nodes with duplicate values. """ current_node = linked_list current_value = linked_list.value while True: # Reached the end of linked list already. No further duplicates. if (current_node.next == None): break previous_node = current_node current_node = current_node.next if (current_node.value == current_value): # No further links past this duplicate. if (current_node.next == None): # Just remove the duplicate. previous_node.next = None break previous_node.next = current_node.next current_node = previous_node else: assert current_value < current_node.value current_value = current_node.value return linked_list
dubenju/javay
src/java/com/wavpack/decoder/WavpackStream.java
<reponame>dubenju/javay package com.wavpack.decoder; /* ** WavpackStream.java ** ** Copyright (c) 2007 - 2008 <NAME> ** ** All Rights Reserved. ** ** Distributed under the BSD Software License (see license.txt) ** */ public class WavpackStream { WavpackHeader wphdr = new WavpackHeader(); Bitstream wvbits = new Bitstream(); words_data w = new words_data(); int num_terms = 0; int mute_error; long sample_index, crc; // was uint32_t in C short int32_sent_bits, int32_zeros, int32_ones, int32_dups; // was uchar in C short float_flags, float_shift, float_max_exp, float_norm_exp; // was uchar in C decorr_pass dp1 = new decorr_pass(); decorr_pass dp2 = new decorr_pass(); decorr_pass dp3 = new decorr_pass(); decorr_pass dp4 = new decorr_pass(); decorr_pass dp5 = new decorr_pass(); decorr_pass dp6 = new decorr_pass(); decorr_pass dp7 = new decorr_pass(); decorr_pass dp8 = new decorr_pass(); decorr_pass dp9 = new decorr_pass(); decorr_pass dp10 = new decorr_pass(); decorr_pass dp11 = new decorr_pass(); decorr_pass dp12 = new decorr_pass(); decorr_pass dp13 = new decorr_pass(); decorr_pass dp14 = new decorr_pass(); decorr_pass dp15 = new decorr_pass(); decorr_pass dp16 = new decorr_pass(); decorr_pass decorr_passes[] = {dp1, dp2, dp3, dp4, dp5, dp6, dp7, dp8, dp9, dp10, dp11, dp12, dp13, dp14, dp15, dp16}; }
SummerWindL/dataloader
src/main/java/com/advance/dataloader/listener/AppStartupAdapter.java
<filename>src/main/java/com/advance/dataloader/listener/AppStartupAdapter.java package com.advance.dataloader.listener; import com.advance.dataloader.spring.AppService; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.event.ContextRefreshedEvent; import java.util.TimerTask; /** * @author Advance * @date 2021年11月19日 15:43 * @since V1.0.0 */ public abstract class AppStartupAdapter extends TimerTask implements AppStartupListener{ Logger logger = LoggerFactory.getLogger(AppStartupAdapter.class); @Override public void onApplicationStartup(ContextRefreshedEvent event) { afterStartup(event.getApplicationContext()); } @Override public void run() { String runOptimize = AppService.getApplicationContext().getEnvironment().getProperty("application.runOptimize"); if (!StringUtils.equals(runOptimize, "Y")) { logger.info("开始执行一些定时任务......"); } } }
gmai2006/fhir
src/main/java/org/fhir/entity/AccountModel.java
/* * #%L * FHIR Implementation * %% * Copyright (C) 2018 DataScience 9 LLC * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * This code is 100% AUTO generated. Please do not modify it DIRECTLY * If you need new features or function or changes please update the templates * then submit the template through our web interface. */ package org.fhir.entity; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.Table; import org.fhir.pojo.*; import java.io.Serializable; import org.fhir.utils.JsonUtils; /** * "A financial tool for tracking value accrued for a particular purpose. In the healthcare field, used to track charges for a patient, cost centers, etc." * generated on 07/14/2018 */ @Entity @Table(name="account") public class AccountModel implements Serializable { private static final long serialVersionUID = 153159210222896617L; /** * Description: "This is a Account resource" */ @javax.validation.constraints.NotNull @javax.persistence.Basic @Column(name="\"resourceType\"") private String resourceType; /** * Description: "Unique identifier used to reference the account. May or may not be intended for human use (e.g. credit card number)." * Actual type: List<String>; * Store this type as a string in db */ @javax.persistence.Basic @Column(name="\"identifier\"", length = 16777215) private String identifier; /** * Description: "Indicates whether the account is presently used/usable or not." */ @javax.persistence.Basic @Column(name="\"status\"") private String status; /** * Description: "Categorizes the account for reporting and searching purposes." */ @javax.persistence.Basic @Column(name="\"type_id\"") private String type_id; @javax.persistence.OneToMany(cascade = javax.persistence.CascadeType.ALL) @javax.persistence.JoinColumn(name = "\"db_container_id\"", referencedColumnName="type_id", insertable=false, updatable=false) private java.util.List<CodeableConceptModel> type; /** * Description: "Name used for the account when displaying it to humans in reports, etc." */ @javax.persistence.Basic @Column(name="\"name\"") private String name; /** * Description: "Identifies the patient, device, practitioner, location or other object the account is associated with." */ @javax.persistence.Basic @Column(name="\"subject_id\"") private String subject_id; @javax.persistence.OneToMany(cascade = javax.persistence.CascadeType.ALL) @javax.persistence.JoinColumn(name = "\"db_container_id\"", referencedColumnName="subject_id", insertable=false, updatable=false) private java.util.List<ReferenceModel> subject; /** * Description: "Identifies the period of time the account applies to; e.g. accounts created per fiscal year, quarter, etc." * Actual type: String; * Store this type as a string in db */ @javax.persistence.Basic @Column(name="\"period\"", length = 16777215) private String period; /** * Description: "Indicates the period of time over which the account is allowed to have transactions posted to it.\nThis period may be different to the coveragePeriod which is the duration of time that services may occur." * Actual type: String; * Store this type as a string in db */ @javax.persistence.Basic @Column(name="\"active\"", length = 16777215) private String active; /** * Description: "Represents the sum of all credits less all debits associated with the account. Might be positive, zero or negative." */ @javax.persistence.Basic @Column(name="\"balance_id\"") private String balance_id; @javax.persistence.OneToMany(cascade = javax.persistence.CascadeType.ALL) @javax.persistence.JoinColumn(name = "\"db_container_id\"", referencedColumnName="balance_id", insertable=false, updatable=false) private java.util.List<MoneyModel> balance; /** * Description: "The party(s) that are responsible for covering the payment of this account, and what order should they be applied to the account." */ @javax.persistence.Basic @Column(name="\"coverage_id\"") private String coverage_id; @javax.persistence.OneToMany(cascade = javax.persistence.CascadeType.ALL) @javax.persistence.JoinColumn(name = "\"db_container_id\"", referencedColumnName="coverage_id", insertable=false, updatable=false) private java.util.List<AccountCoverageModel> coverage; /** * Description: "Indicates the organization, department, etc. with responsibility for the account." */ @javax.persistence.Basic @Column(name="\"owner_id\"") private String owner_id; @javax.persistence.OneToMany(cascade = javax.persistence.CascadeType.ALL) @javax.persistence.JoinColumn(name = "\"db_container_id\"", referencedColumnName="owner_id", insertable=false, updatable=false) private java.util.List<ReferenceModel> owner; /** * Description: "Provides additional information about what the account tracks and how it is used." */ @javax.persistence.Basic @Column(name="\"description\"") private String description; /** * Description: "Parties financially responsible for the account." */ @javax.persistence.Basic @Column(name="\"guarantor_id\"") private String guarantor_id; @javax.persistence.OneToMany(cascade = javax.persistence.CascadeType.ALL) @javax.persistence.JoinColumn(name = "\"db_container_id\"", referencedColumnName="guarantor_id", insertable=false, updatable=false) private java.util.List<AccountGuarantorModel> guarantor; /** * Description: "A human-readable narrative that contains a summary of the resource, and may be used to represent the content of the resource to a human. The narrative need not encode all the structured data, but is required to contain sufficient detail to make it \"clinically safe\" for a human to just read the narrative. Resource definitions may define what content should be represented in the narrative to ensure clinical safety." derived from DomainResource */ @javax.persistence.Basic @Column(name="\"text_id\"") private String text_id; @javax.persistence.OneToMany(cascade = javax.persistence.CascadeType.ALL) @javax.persistence.JoinColumn(name = "\"db_container_id\"", referencedColumnName="text_id", insertable=false, updatable=false) private java.util.List<NarrativeModel> text; /** * Description: "These resources do not have an independent existence apart from the resource that contains them - they cannot be identified independently, and nor can they have their own independent transaction scope." derived from DomainResource * Actual type: List<String>; * Store this type as a string in db */ @javax.persistence.Basic @Column(name="\"contained\"", length = 16777215) private String contained; /** * Description: "May be used to represent additional information that is not part of the basic definition of the resource. In order to make the use of extensions safe and manageable, there is a strict set of governance applied to the definition and use of extensions. Though any implementer is allowed to define an extension, there is a set of requirements that SHALL be met as part of the definition of the extension." derived from DomainResource * Actual type: List<String>; * Store this type as a string in db */ @javax.persistence.Basic @Column(name="\"extension\"", length = 16777215) private String extension; /** * Description: "May be used to represent additional information that is not part of the basic definition of the resource, and that modifies the understanding of the element that contains it. Usually modifier elements provide negation or qualification. In order to make the use of extensions safe and manageable, there is a strict set of governance applied to the definition and use of extensions. Though any implementer is allowed to define an extension, there is a set of requirements that SHALL be met as part of the definition of the extension. Applications processing a resource are required to check for modifier extensions." derived from DomainResource * Actual type: List<String>; * Store this type as a string in db */ @javax.persistence.Basic @Column(name="\"modifierExtension\"", length = 16777215) private String modifierExtension; /** * Description: "The logical id of the resource, as used in the URL for the resource. Once assigned, this value never changes." derived from Resource derived from DomainResource */ @javax.validation.constraints.NotNull @javax.validation.constraints.Pattern(regexp="[A-Za-z0-9\\-\\.]{1,64}") @javax.persistence.Id @Column(name="\"id\"") private String id; /** * Description: "The metadata about the resource. This is content that is maintained by the infrastructure. Changes to the content may not always be associated with version changes to the resource." derived from Resource derived from DomainResource */ @javax.persistence.Basic @Column(name="\"meta_id\"") private String meta_id; @javax.persistence.OneToMany(cascade = javax.persistence.CascadeType.ALL) @javax.persistence.JoinColumn(name = "\"db_container_id\"", referencedColumnName="meta_id", insertable=false, updatable=false) private java.util.List<MetaModel> meta; /** * Description: "A reference to a set of rules that were followed when the resource was constructed, and which must be understood when processing the content." derived from Resource derived from DomainResource */ @javax.persistence.Basic @Column(name="\"implicitRules\"") private String implicitRules; /** * Description: "The base language in which the resource is written." derived from Resource derived from DomainResource */ @javax.validation.constraints.Pattern(regexp="[^\\s]+([\\s]?[^\\s]+)*") @javax.persistence.Basic @Column(name="\"language\"") private String language; public AccountModel() { } public AccountModel(Account o) { this.id = o.getId(); this.resourceType = o.getResourceType(); if (null != o.getIdentifier()) { this.identifier = JsonUtils.toJson(o.getIdentifier()); } this.status = o.getStatus(); if (null != o.getType() ) { this.type_id = "type" + this.id; this.type = CodeableConceptHelper.toModel(o.getType(), this.type_id); } this.name = o.getName(); if (null != o.getSubject() ) { this.subject_id = "subject" + this.id; this.subject = ReferenceHelper.toModel(o.getSubject(), this.subject_id); } if (null != o.getPeriod()) { this.period = JsonUtils.toJson(o.getPeriod()); } if (null != o.getActive()) { this.active = JsonUtils.toJson(o.getActive()); } if (null != o.getBalance() ) { this.balance_id = "balance" + this.id; this.balance = MoneyHelper.toModel(o.getBalance(), this.balance_id); } if (null != o.getCoverage() && !o.getCoverage().isEmpty()) { this.coverage_id = "coverage" + this.id; this.coverage = AccountCoverageHelper.toModelFromArray(o.getCoverage(), this.coverage_id); } if (null != o.getOwner() ) { this.owner_id = "owner" + this.id; this.owner = ReferenceHelper.toModel(o.getOwner(), this.owner_id); } this.description = o.getDescription(); if (null != o.getGuarantor() && !o.getGuarantor().isEmpty()) { this.guarantor_id = "guarantor" + this.id; this.guarantor = AccountGuarantorHelper.toModelFromArray(o.getGuarantor(), this.guarantor_id); } if (null != o.getText() ) { this.text_id = "text" + this.id; this.text = NarrativeHelper.toModel(o.getText(), this.text_id); } if (null != o.getContained()) { this.contained = JsonUtils.toJson(o.getContained()); } if (null != o.getExtension()) { this.extension = JsonUtils.toJson(o.getExtension()); } if (null != o.getModifierExtension()) { this.modifierExtension = JsonUtils.toJson(o.getModifierExtension()); } if (null != o.getMeta() ) { this.meta_id = "meta" + this.id; this.meta = MetaHelper.toModel(o.getMeta(), this.meta_id); } this.implicitRules = o.getImplicitRules(); this.language = o.getLanguage(); } public String getResourceType() { return this.resourceType; } public void setResourceType( String value) { this.resourceType = value; } public String getIdentifier() { return this.identifier; } public void setIdentifier( String value) { this.identifier = value; } public String getStatus() { return this.status; } public void setStatus( String value) { this.status = value; } public java.util.List<CodeableConceptModel> getType() { return this.type; } public void setType( java.util.List<CodeableConceptModel> value) { this.type = value; } public String getName() { return this.name; } public void setName( String value) { this.name = value; } public java.util.List<ReferenceModel> getSubject() { return this.subject; } public void setSubject( java.util.List<ReferenceModel> value) { this.subject = value; } public String getPeriod() { return this.period; } public void setPeriod( String value) { this.period = value; } public String getActive() { return this.active; } public void setActive( String value) { this.active = value; } public java.util.List<MoneyModel> getBalance() { return this.balance; } public void setBalance( java.util.List<MoneyModel> value) { this.balance = value; } public java.util.List<AccountCoverageModel> getCoverage() { return this.coverage; } public void setCoverage( java.util.List<AccountCoverageModel> value) { this.coverage = value; } public java.util.List<ReferenceModel> getOwner() { return this.owner; } public void setOwner( java.util.List<ReferenceModel> value) { this.owner = value; } public String getDescription() { return this.description; } public void setDescription( String value) { this.description = value; } public java.util.List<AccountGuarantorModel> getGuarantor() { return this.guarantor; } public void setGuarantor( java.util.List<AccountGuarantorModel> value) { this.guarantor = value; } public java.util.List<NarrativeModel> getText() { return this.text; } public void setText( java.util.List<NarrativeModel> value) { this.text = value; } public String getContained() { return this.contained; } public void setContained( String value) { this.contained = value; } public String getExtension() { return this.extension; } public void setExtension( String value) { this.extension = value; } public String getModifierExtension() { return this.modifierExtension; } public void setModifierExtension( String value) { this.modifierExtension = value; } public String getId() { return this.id; } public void setId( String value) { this.id = value; } public java.util.List<MetaModel> getMeta() { return this.meta; } public void setMeta( java.util.List<MetaModel> value) { this.meta = value; } public String getImplicitRules() { return this.implicitRules; } public void setImplicitRules( String value) { this.implicitRules = value; } public String getLanguage() { return this.language; } public void setLanguage( String value) { this.language = value; } @Override public String toString() { StringBuilder builder = new StringBuilder(); builder.append("[AccountModel]:" + "\n"); builder.append("resourceType" + "->" + this.resourceType + "\n"); builder.append("identifier" + "->" + this.identifier + "\n"); builder.append("status" + "->" + this.status + "\n"); builder.append("name" + "->" + this.name + "\n"); builder.append("period" + "->" + this.period + "\n"); builder.append("active" + "->" + this.active + "\n"); builder.append("description" + "->" + this.description + "\n"); builder.append("contained" + "->" + this.contained + "\n"); builder.append("extension" + "->" + this.extension + "\n"); builder.append("modifierExtension" + "->" + this.modifierExtension + "\n"); builder.append("id" + "->" + this.id + "\n"); builder.append("implicitRules" + "->" + this.implicitRules + "\n"); builder.append("language" + "->" + this.language + "\n"); ; return builder.toString(); } public String debug() { StringBuilder builder = new StringBuilder(); builder.append("[AccountModel]:" + "\n"); builder.append("resourceType" + "->" + this.resourceType + "\n"); builder.append("identifier" + "->" + this.identifier + "\n"); builder.append("status" + "->" + this.status + "\n"); builder.append("name" + "->" + this.name + "\n"); builder.append("period" + "->" + this.period + "\n"); builder.append("active" + "->" + this.active + "\n"); builder.append("description" + "->" + this.description + "\n"); builder.append("contained" + "->" + this.contained + "\n"); builder.append("extension" + "->" + this.extension + "\n"); builder.append("modifierExtension" + "->" + this.modifierExtension + "\n"); builder.append("id" + "->" + this.id + "\n"); builder.append("implicitRules" + "->" + this.implicitRules + "\n"); builder.append("language" + "->" + this.language + "\n"); ; return builder.toString(); } }
freight-dev/freight-backend
common/src/main/java/com/freight/request_body/AuthenticationRequestBody.java
package com.freight.request_body; import com.fasterxml.jackson.annotation.JsonProperty; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import java.io.Serializable; import java.util.Optional; import static java.util.Optional.ofNullable; /** * Created by toshikijahja on 3/26/19. */ @ApiModel public class AuthenticationRequestBody implements Serializable { @ApiModelProperty(value = "User's email to be used for signing in") @JsonProperty private String email; @ApiModelProperty(value = "User's phone number to be used for signing in") @JsonProperty private Long phone; @ApiModelProperty(value = "User's password") private String password; public Optional<String> getEmailOptional() { return ofNullable(this.email); } public Optional<Long> getPhoneOptional() { return ofNullable(this.phone); } public String getPassword() { return this.password; } }
cryptable/ejbca-rootca
ejbca_ce_6_15_3_0/modules/systemtests/src-test/org/cesecore/certificates/ca/CaSessionTest.java
/************************************************************************* * * * CESeCore: CE Security Core * * * * This software is free software; you can redistribute it and/or * * modify it under the terms of the GNU Lesser General Public * * License as published by the Free Software Foundation; either * * version 2.1 of the License, or any later version. * * * * See terms of license at gnu.org. * * * *************************************************************************/ package org.cesecore.certificates.ca; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.security.cert.X509Certificate; import java.util.Collection; import org.cesecore.CaTestUtils; import org.cesecore.RoleUsingTestCase; import org.cesecore.authentication.tokens.AuthenticationToken; import org.cesecore.authentication.tokens.X509CertificateAuthenticationToken; import org.cesecore.authorization.AuthorizationDeniedException; import org.cesecore.certificates.certificate.CertificateWrapper; import org.cesecore.keys.token.CryptoTokenTestUtils; import org.cesecore.mock.authentication.tokens.TestAlwaysAllowLocalAuthenticationToken; import org.cesecore.roles.RoleNotFoundException; import org.cesecore.util.CertTools; import org.cesecore.util.CryptoProviderTools; import org.cesecore.util.EjbRemoteHelper; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; /** * Tests the CA session bean using soft CA tokens. * * @version $Id: CaSessionTest.java 29306 2018-06-21 14:07:02Z andresjakobs $ */ public class CaSessionTest extends RoleUsingTestCase { private static final String X509CADN = "CN=TEST"; private static CA testx509ca; private static CaSessionTestBase testBase; private CaSessionRemote caSession = EjbRemoteHelper.INSTANCE.getRemoteSession(CaSessionRemote.class); private final AuthenticationToken alwaysAllowToken = new TestAlwaysAllowLocalAuthenticationToken("CaSessionTest"); @BeforeClass public static void setUpProviderAndCreateCA() throws Exception { CryptoProviderTools.installBCProvider(); testx509ca = CaTestUtils.createTestX509CA(X509CADN, null, false); testBase = new CaSessionTestBase(testx509ca, null); } @AfterClass public static void tearDownFinal() throws RoleNotFoundException, AuthorizationDeniedException { if (testx509ca != null) { CryptoTokenTestUtils.removeCryptoToken(null, testx509ca.getCAToken().getCryptoTokenId()); } } @Before public void setUp() throws Exception { testBase.setUp(); } @After public void tearDown() throws Exception { testBase.tearDown(); } @Test public void testAddRenameAndRemoveX509CA() throws Exception { testBase.addRenameAndRemoveX509CA(); } @Test public void testAddAndGetCAWithDifferentCaid() throws Exception { testBase.addAndGetCAWithDifferentCaid(); } @Test public void addCAGenerateKeysLater() throws Exception { final String cadn = "CN=TEST GEN KEYS, O=CaSessionTest, C=SE"; final String tokenpwd = "<PASSWORD>"; CA ca = CaTestUtils.createTestX509CAOptionalGenKeys(cadn, tokenpwd.toCharArray(), false, false); final int cryptoTokenId = ca.getCAToken().getCryptoTokenId(); testBase.addCAGenerateKeysLater(ca, cadn, tokenpwd.toCharArray()); CryptoTokenTestUtils.removeCryptoToken(null, cryptoTokenId); } @Test public void addCAUseSessionBeanToGenerateKeys2() throws Exception { final String cadn = "CN=TEST GEN KEYS, O=CaSessionTest, C=SE"; final String tokenpwd = "<PASSWORD>"; CA ca = CaTestUtils.createTestX509CAOptionalGenKeys(cadn, tokenpwd.toCharArray(), false, false); final int cryptoTokenId = ca.getCAToken().getCryptoTokenId(); testBase.addCAUseSessionBeanToGenerateKeys(ca, cadn, tokenpwd.toCharArray()); CryptoTokenTestUtils.removeCryptoToken(null, cryptoTokenId); } @Test public void testExtendedCAService() throws Exception { CA ca = CaTestUtils.createTestX509CAOptionalGenKeys("CN=Test Extended CA service", "foo123".toCharArray(), false, false); final int cryptoTokenId = ca.getCAToken().getCryptoTokenId(); testBase.extendedCAServices(ca); CryptoTokenTestUtils.removeCryptoToken(null, cryptoTokenId); } @Test public void testAuthorization() throws Exception { testBase.authorization(); } @Test public void testGetCaChain() throws Exception { final String caDn = "CN=TestCAChain"; final String caName = CertTools.getPartFromDN(caDn, "CN"); CAInfo caInfo = null; try { final CA ca = CaTestUtils.createTestX509CAOptionalGenKeys(caDn, "foo123".toCharArray(), true, false); ca.setStatus(CAConstants.CA_ACTIVE); caSession.addCA(alwaysAllowToken, ca); caInfo = caSession.getCAInfo(alwaysAllowToken, caName); // 1. Test get certificate chain. Collection<CertificateWrapper> certificates = caSession.getCaChain(alwaysAllowToken, caName); assertNotNull(certificates); assertEquals("The length if the CA certificate chain of a self signed CA should be 1.", 1, certificates.size()); // 2. Test exception handling. // 2.1 Test with no authorization. final AuthenticationToken adminTokenNoAuth = new X509CertificateAuthenticationToken((X509Certificate) certificates.iterator().next().getCertificate()); try { certificates = caSession.getCaChain(adminTokenNoAuth, caName); fail("Get the CA certificate chain for an administrator with no authorization should throw an exception."); } catch(Exception e) { assertTrue("Get the CA certificate chain for a non existing CA should throw a CADoesntExistsException: " + e, e instanceof AuthorizationDeniedException); } // 2.2 Try to get CA chain for a non existing CA. try { certificates = caSession.getCaChain(alwaysAllowToken, caName + "-not-exists."); fail("Get the CA certificate chain for a non existing CA should throw an exception."); } catch(Exception e) { assertTrue("Get the CA certificate chain for a non existing CA should throw a CADoesntExistsException: " + e, e instanceof CADoesntExistsException); } // 2.3 Try to get the CA certificate chain for a CA with status = CAConstants.CA_WAITING_CERTIFICATE_RESPONSE ca.setStatus(CAConstants.CA_WAITING_CERTIFICATE_RESPONSE); caSession.editCA(alwaysAllowToken, ca.getCAInfo()); certificates = caSession.getCaChain(alwaysAllowToken, caName); assertEquals("Get the CA certificate chain for CA with status CAConstants.CA_WAITING_CERTIFICATE_RESPONSE should return an empty collection.", certificates.size(), 0); } finally { if (caInfo != null) { caSession.removeCA(alwaysAllowToken, caInfo.getCAId()); } } } }
tkminek/material_database
mat_db/main/migrations/0002_rename_material_cycliccurve_material_id_and_more.py
# Generated by Django 4.0.2 on 2022-02-15 19:11 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('main', '0001_initial'), ] operations = [ migrations.RenameField( model_name='cycliccurve', old_name='material', new_name='material_id', ), migrations.RenameField( model_name='encurve', old_name='material', new_name='material_id', ), migrations.RenameField( model_name='material', old_name='material_type', new_name='material_type_id', ), migrations.RenameField( model_name='sncurve', old_name='material', new_name='material_id', ), ]
billowave/TyzLearn2018
JavaPlatform/EclipseWorkSpaces/LearnWSOne/SpringLeranRoot/SpLNinth/src/main/java/org/SpLNinth/EventListen/example01/Tyz_LgEventListener.java
<filename>JavaPlatform/EclipseWorkSpaces/LearnWSOne/SpringLeranRoot/SpLNinth/src/main/java/org/SpLNinth/EventListen/example01/Tyz_LgEventListener.java package org.SpLNinth.EventListen.example01; import java.util.EventListener; //定义自定义监听器接口,继承EventListener interface Tyz_LgEventListener extends TyzEventListener { void Lg(MyEvent me); }
carldea/rapaio
src/rapaio/math/linear/dense/DMatrixDenseC.java
<filename>src/rapaio/math/linear/dense/DMatrixDenseC.java /* * Apache License * Version 2.0, January 2004 * http://www.apache.org/licenses/ * * Copyright 2013 - 2021 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package rapaio.math.linear.dense; import rapaio.math.linear.DMatrix; import rapaio.math.linear.DVector; import rapaio.math.linear.MType; import rapaio.util.collection.DoubleArrays; import java.io.Serial; /** * Dense matrix with values in double floating point precision. * Values are stored in arrays of arrays with first array holding column references * and secondary level arrays being the column arrays. */ public class DMatrixDenseC extends DMatrixDense { @Serial private static final long serialVersionUID = -2186520026933442642L; public DMatrixDenseC(int rows, int cols) { super(MType.CDENSE, rows, cols, newArray(rows, cols)); } public DMatrixDenseC(int rows, int cols, double[][] array) { super(MType.CDENSE, rows, cols, array); } private static double[][] newArray(int rowCount, int colCount) { double[][] array = new double[colCount][rowCount]; for (int i = 0; i < colCount; i++) { array[i] = DoubleArrays.newFill(rowCount, 0); } return array; } @Override public DVector mapCol(int col) { return DVector.wrapArray(rowCount, values[col]); } @Override public DMatrix mapCols(int... indexes) { double[][] array = new double[indexes.length][rowCount]; for (int i = 0; i < indexes.length; i++) { array[i] = values[indexes[i]]; } return new DMatrixDenseC(rowCount, indexes.length, array); } @Override public double get(int row, int col) { return values[col][row]; } @Override public void set(int row, int col, double value) { values[col][row] = value; } @Override public void inc(int row, int col, double value) { values[col][row] += value; } @Override public DMatrix add(double x) { for (double[] col : values) { for (int i = 0; i < col.length; i++) { col[i] += x; } } return this; } @Override public DVector dot(DVector b) { if (b instanceof DVectorDense vd) { double[] array = vd.elements(); double[] c = DoubleArrays.newFill(rowCount, 0); for (int i = 0; i < colCount; i++) { double[] col = values[i]; addMultiplied(c, col, array[i]); } return new DVectorDense(c.length, c); } return super.dot(b); } private void addMultiplied(double[] c, double[] b, double factor) { for (int i = 0; i < c.length; i++) { c[i] += b[i] * factor; } } @Override public DMatrix dotDiag(DVector v) { if (v instanceof DVectorDense) { var array = v.asDense().elements(); var len = v.size(); for (int i = 0; i < colCount; i++) { DoubleArrays.mult(values[i], 0, array[i], rowCount); } return this; } return super.dotDiag(v); } @Override public DMatrix dotDiagT(DVector v) { if (v.isDense()) { var array = v.asDense().elements(); var len = v.size(); for (int i = 0; i < rowCount; i++) { DoubleArrays.mult(values[i], 0, array[i], colCount); } return this; } return super.dotDiagT(v); } @Override public DMatrix t() { return new DMatrixDenseR(colCount, rowCount, values); } @Override public DMatrixDenseC copy() { double[][] copy = new double[colCount][rowCount]; for (int i = 0; i < colCount; i++) { copy[i] = DoubleArrays.copy(values[i], 0, rowCount); } return new DMatrixDenseC(rowCount, colCount, copy); } @Override public DMatrix resizeCopy(int rows, int cols, double fill) { double[][] copy = new double[cols][rows]; for (int i = 0; i < Math.min(cols, colCount); i++) { copy[i] = DoubleArrays.newFill(rows, fill); System.arraycopy(values[i], 0, copy[i], 0, Math.min(rows, rowCount)); } for (int i = colCount; i < cols; i++) { copy[i] = DoubleArrays.newFill(rows, fill); } return new DMatrixDenseC(rows, cols, copy); } }
AbdullahRusi/Tehreer-Android
demo/src/main/java/com/mta/tehreer/demo/BidiAlgorithmActivity.java
/* * Copyright (C) 2016-2018 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mta.tehreer.demo; import android.content.Intent; import android.os.Bundle; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.widget.EditText; import android.widget.Toast; import androidx.appcompat.app.ActionBar; import androidx.appcompat.app.AppCompatActivity; public class BidiAlgorithmActivity extends AppCompatActivity { private EditText mBidiEditText; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_bidi_algorithm); ActionBar actionBar = getSupportActionBar(); if (actionBar != null) { actionBar.setDisplayHomeAsUpEnabled(true); } mBidiEditText = findViewById(R.id.bidi_edit_text); mBidiEditText.setSelection(mBidiEditText.getText().length()); } @Override public boolean onCreateOptionsMenu(Menu menu) { MenuInflater inflater = getMenuInflater(); inflater.inflate(R.menu.menu_bidi_algorithm, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.action_apply: if (mBidiEditText.getText().length() == 0) { Toast.makeText(this, "Please write something!", Toast.LENGTH_SHORT).show(); } else { Intent intent = new Intent(BidiAlgorithmActivity.this, BidiInfoActivity.class); intent.putExtra(BidiInfoActivity.BIDI_TEXT, mBidiEditText.getText()); startActivity(intent); } return true; } return super.onOptionsItemSelected(item); } @Override public boolean onSupportNavigateUp(){ onBackPressed(); return true; } }
chelo84/tibiainfo
src/main/java/com/tibiainfo/model/dto/npc/NpcDTO.java
package com.tibiainfo.model.dto.npc; import com.tibiainfo.model.entity.npc.Npc; import lombok.Data; @Data public class NpcDTO { Long id; String title; String name; String race; String gender; String city; String location; String job; String version; String x; String y; String z; public NpcDTO(Npc npc){ this(npc, true); } public NpcDTO(Npc npc, boolean extended){ this.id = npc.getId(); this.title = npc.getTitle(); this.city = npc.getCity(); if(extended) { this.name = npc.getName(); this.race = npc.getRace(); this.gender = npc.getGender(); this.location = npc.getLocation(); this.job = npc.getJob(); this.version = npc.getVersion(); this.x = npc.getX(); this.y = npc.getY(); this.z = npc.getZ(); } } }
rexcorp01/bet6
pick6-backend/test/controllers/games_controller_test.rb
require 'test_helper' class GamesControllerTest < ActionDispatch::IntegrationTest setup do @game = games(:one) end test "should get index" do get games_url, as: :json assert_response :success end test "should create game" do assert_difference('Game.count') do post games_url, params: { game: { ascore: @game.ascore, away: @game.away, day: @game.day, handicap: @game.handicap, home: @game.home, hscore: @game.hscore, time: @game.time, total: @game.total, winner: @game.winner } }, as: :json end assert_response 201 end test "should show game" do get game_url(@game), as: :json assert_response :success end test "should update game" do patch game_url(@game), params: { game: { ascore: @game.ascore, away: @game.away, day: @game.day, handicap: @game.handicap, home: @game.home, hscore: @game.hscore, time: @game.time, total: @game.total, winner: @game.winner } }, as: :json assert_response 200 end test "should destroy game" do assert_difference('Game.count', -1) do delete game_url(@game), as: :json end assert_response 204 end end
tomzlh/dtc
sc-transaction-core/src/main/java/com/ops/sc/core/spi/SPIPostProcessor.java
package com.ops.sc.core.spi; import java.util.Map; import java.util.Properties; public interface SPIPostProcessor { void init(Map<String,String> map); }
ScaledByDesign/cloudboost
dashboard-ui/app/components/settings/email.js
/** * Created by Darkstar on 11/29/2016. */ import React from 'react'; import PropTypes from 'prop-types'; import { connect } from 'react-redux'; import { showAlert, updateSettings } from '../../actions'; // mui import { FormGroup, Radio, Button } from 'react-bootstrap'; import TrasnparentLoader from 'react-loader-advanced'; import Loader from 'react-dots-loader'; import 'react-dots-loader/index.css'; const style = { refresh: { display: 'inline-block', position: 'relative', background: 'none', boxShadow: 'none', marginLeft: '18px', marginRight: '18px' } }; export class Email extends React.Component { static propTypes = { emailSettings: PropTypes.any, appData: PropTypes.any, updateSettings: PropTypes.any } constructor (props) { super(props); this.state = { loading: true, mandrill: { apiKey: null, enabled: true }, mailgun: { apiKey: null, domain: null, enabled: false }, fromEmail: null, fromName: null }; } componentWillMount () { if (this.props.emailSettings) { this.setState({ ...this.props.emailSettings.settings, loading: false }); } } componentWillReceiveProps (nextProps) { if (nextProps.emailSettings) { this.setState({ ...nextProps.emailSettings.settings, loading: false }); } } textChangeHandler = (which) => (e) => { this.state[which] = e.target.value; this.setState(this.state); } mailKeysChangeHandler = (mailType, which) => (e) => { this.state[mailType][which] = e.target.value; this.setState(this.state); } selectMailType = (val) => { if (val === 'mandrill') { this.setState({ mandrill: { enabled: true }, mailgun: { enabled: false } }); } else { this.setState({ mandrill: { enabled: false }, mailgun: { enabled: true } }); } } updateSettings = () => { let reqObj = { ...this.state }; delete reqObj['loading']; if (this.state.mandrill.enabled) { if (this.state.mandrill.apiKey && this.state.fromName && this.state.fromEmail) { this.setState( { loading: true }, this.props.updateSettings(this.props.appData.appId, this.props.appData.masterKey, 'email', reqObj) ); } else showAlert('error', 'Please fill all the fields.'); } else { if (this.state.mailgun.apiKey && this.state.mailgun.domain && this.state.fromName && this.state.fromEmail) { this.setState( { loading: true }, this.props.updateSettings(this.props.appData.appId, this.props.appData.masterKey, 'email', reqObj) ); } else showAlert('error', 'Please fill all the fields.'); } } render () { return ( <div style={{ paddingBottom: 0, paddingTop: 41, paddingLeft: 54 }}> <h2 className='head'>Email Settings</h2> <TrasnparentLoader show={this.state.loading} message={<Loader style={{ marginLeft: '50%', position: 'relative' }} />} contentBlur={1} // foregroundStyle={{foregroundColor: 'white', opacity: "0.3"}} backgroundStyle={{ backgroundColor: 'rgba(255, 255, 255, 0.3)' }} > <div> <div className='small-form-row'> <div className='control-label'> <label className='danger'>Create</label> <p className='label-desc'> Choose an email provider that you want to send emails with </p> </div> <div className='control'> <div> <FormGroup> <Radio inline name='radioGroup' onChange={() => this.selectMailType('mandrill')} checked={this.state.mandrill.enabled}> Mandrill </Radio> {' '} <Radio name='radioGroup' inline onChange={() => this.selectMailType('mailgun')} checked={this.state.mailgun.enabled}> Mailgun </Radio> {' '} </FormGroup> </div> </div> </div> <div className={this.state.mandrill.enabled ? 'small-form-row' : 'hide'}> <div className='control-label'> <label className='danger'>Mandrill API Key</label> <p className='label-desc'> API Key of Mandrill email service. </p> </div> <div className='control'> <div> <input type='text' className='form form-control' placeholder='Enter Mandrill API Key' value={this.state.mandrill.apiKey || ''} onChange={this.mailKeysChangeHandler('mandrill', 'apiKey')} /> </div> </div> </div> <div className={this.state.mailgun.enabled ? 'small-form-row' : 'hide'}> <div className='control-label'> <label className='danger'>Mailgun API Key</label> <p className='label-desc'> API Key of Mailgun email service </p> </div> <div className='control'> <div> <input type='text' className='form form-control' placeholder='Enter Mailgun API Key' value={this.state.mailgun.apiKey || ''} onChange={this.mailKeysChangeHandler('mailgun', 'apiKey')} /> </div> </div> </div> <div className={this.state.mailgun.enabled ? 'small-form-row' : 'hide'}> <div className='control-label'> <label className='danger'>Mailgun Domain</label> <p className='label-desc'> Domain listed in your Mailgun Dashboard </p> </div> <div className='control'> <div> <input type='text' className='form form-control' placeholder='Enter Mandrill API Key' value={this.state.mailgun.domain || ''} onChange={this.mailKeysChangeHandler('mailgun', 'domain')} /> </div> </div> </div> <div className='small-form-row'> <div className='control-label'> <label className='danger'>From Email</label> <p className='label-desc'> Email address which you want an email to be sent from </p> </div> <div className='control'> <div> <input type='text' className='form form-control' placeholder='Enter From Email' value={this.state.fromEmail || ''} onChange={this.textChangeHandler('fromEmail')} /> </div> </div> </div> <div className='small-form-row'> <div className='control-label'> <label className='danger'>From Name</label> <p className='label-desc'> Name you want an email to be sent from </p> </div> <div className='control'> <div> <input type='text' className='form form-control' placeholder='Enter From Email' value={this.state.fromName || ''} onChange={this.textChangeHandler('fromName')} /> </div> </div> </div> </div> </TrasnparentLoader> <div> <div className='small-form-row'> <div className='control'> <div> <Button style={{ marginTop: 15 }} className={!this.state.loading ? 'btn-primary' : 'btn-primary'} onClick={this.updateSettings} disabled={this.state.loading} > Save { this.state.loading && <Loader size={10} distance={5} color='#ececec' style={style.refresh} /> } </Button> </div> </div> </div> </div> </div> ); } } const mapStateToProps = (state) => { let emailSettings = null; if (state.settings.length) { emailSettings = state.settings.filter(x => x.category === 'email')[0]; } return { appData: state.manageApp, emailSettings: emailSettings }; }; const mapDispatchToProps = (dispatch) => { return { updateSettings: (appId, masterKey, categoryName, settingsObject) => dispatch(updateSettings(appId, masterKey, categoryName, settingsObject)) }; }; export default connect(mapStateToProps, mapDispatchToProps)(Email);
gidabite/libcds
test/include/cds_test/stat_skiplist_out.h
// Copyright (c) 2006-2018 <NAME> // // Distributed under the Boost Software License, Version 1.0. (See accompanying // file LICENSE or copy at http://www.boost.org/LICENSE_1_0.txt) #ifndef CDSTEST_STAT_SKIPLIST_OUT_H #define CDSTEST_STAT_SKIPLIST_OUT_H #include <cds/intrusive/details/skip_list_base.h> namespace cds_test { static inline property_stream& operator <<( property_stream& o, cds::intrusive::skip_list::empty_stat const& /*s*/ ) { return o; } static inline property_stream& operator <<( property_stream& o, cds::intrusive::skip_list::stat<> const& s ) { { std::stringstream stm; for ( unsigned int i = 0; i < sizeof( s.m_nNodeHeightAdd ) / sizeof( s.m_nNodeHeightAdd[0] ); ++i ) stm << " +" << s.m_nNodeHeightAdd[i].get() << "/-" << s.m_nNodeHeightDel[i].get(); o << CDSSTRESS_STAT_OUT_( "stat.level_ins_del", stm.str().substr( 1 ).c_str()); } return o << CDSSTRESS_STAT_OUT( s, m_nInsertSuccess ) << CDSSTRESS_STAT_OUT( s, m_nInsertFailed ) << CDSSTRESS_STAT_OUT( s, m_nInsertRetries ) << CDSSTRESS_STAT_OUT( s, m_nUpdateExist ) << CDSSTRESS_STAT_OUT( s, m_nUpdateNew ) << CDSSTRESS_STAT_OUT( s, m_nUnlinkSuccess ) << CDSSTRESS_STAT_OUT( s, m_nUnlinkFailed ) << CDSSTRESS_STAT_OUT( s, m_nExtractSuccess ) << CDSSTRESS_STAT_OUT( s, m_nExtractFailed ) << CDSSTRESS_STAT_OUT( s, m_nExtractRetries ) << CDSSTRESS_STAT_OUT( s, m_nExtractMinSuccess ) << CDSSTRESS_STAT_OUT( s, m_nExtractMinFailed ) << CDSSTRESS_STAT_OUT( s, m_nExtractMinRetries ) << CDSSTRESS_STAT_OUT( s, m_nExtractMaxSuccess ) << CDSSTRESS_STAT_OUT( s, m_nExtractMaxFailed ) << CDSSTRESS_STAT_OUT( s, m_nExtractMaxRetries ) << CDSSTRESS_STAT_OUT( s, m_nEraseSuccess ) << CDSSTRESS_STAT_OUT( s, m_nEraseFailed ) << CDSSTRESS_STAT_OUT( s, m_nEraseRetry ) << CDSSTRESS_STAT_OUT( s, m_nFindFastSuccess ) << CDSSTRESS_STAT_OUT( s, m_nFindFastFailed ) << CDSSTRESS_STAT_OUT( s, m_nFindSlowSuccess ) << CDSSTRESS_STAT_OUT( s, m_nFindSlowFailed ) << CDSSTRESS_STAT_OUT( s, m_nRenewInsertPosition ) << CDSSTRESS_STAT_OUT( s, m_nLogicDeleteWhileInsert ) << CDSSTRESS_STAT_OUT( s, m_nRemoveWhileInsert ) << CDSSTRESS_STAT_OUT( s, m_nFastErase ) << CDSSTRESS_STAT_OUT( s, m_nSlowErase ) << CDSSTRESS_STAT_OUT( s, m_nFastExtract ) << CDSSTRESS_STAT_OUT( s, m_nSlowExtract ) << CDSSTRESS_STAT_OUT( s, m_nEraseWhileFind ) << CDSSTRESS_STAT_OUT( s, m_nExtractWhileFind ) << CDSSTRESS_STAT_OUT( s, m_nMarkFailed ) << CDSSTRESS_STAT_OUT( s, m_nEraseContention ); } } // namespace cds_test #endif // #ifndef CDSTEST_STAT_SKIPLIST_OUT_H
AmmarQaseem/CPI-Pipeline-test
scripts/ppi-benchmark/Experiments/SL/measures/Fscore.py
import sys def F1(TP, FP, FN): if (TP == 0. and (FP == 0. or FN == 0.)): F = 0. prec = 0 rec = 0 else: prec = float(TP) / float(TP+FP) rec = float(TP) / float(TP + FN) if (prec == 0 and rec == 0): F = 0. else: F = (2.*prec*rec)/(prec+rec) return F, prec, rec def readResults(outputfile, threshold): TP = 0 FP = 0 FN = 0 TN = 0 for line in outputfile: line = line.split() correct = float(line[1]) predict = float(line[2]) if predict <threshold: predict = -1.0 else: predict = 1.0 if predict == 1.0: if correct == 1.0: TP +=1 elif correct == -1.0: FP += 1 else: print "Fatal error" sys.exit(0) elif predict == -1.0: if correct == 1.0: FN += 1 elif correct == -1.0: TN += 1 else: print "Fatal error" sys.exit(0) else: print "Fatal error" sys.exit(0) F, prec, rec = F1(TP, FP, FN) return F, prec, rec, TP, FP, FN, TN if __name__=="__main__": if len(sys.argv)!=3: print "Usage: python Fscore RESULTFILE THRESHOLD" sys.exit(0) else: f = open(sys.argv[1]) threshold = float(sys.argv[2]) F, prec, rec = readResults(f, threshold) f.close() print "F: %f" %F print "Precision: %f" %prec print "Recall: %f" %rec
ozdanborne/libcalico-go
lib/api/workloadendpoint.go
// Copyright (c) 2016 Tigera, Inc. All rights reserved. // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package api import ( . "github.com/tigera/libcalico-go/lib/api/unversioned" . "github.com/tigera/libcalico-go/lib/net" ) type WorkloadEndpointMetadata struct { ObjectMetadata Name string `json:"name,omitempty" validate:"omitempty,name"` WorkloadID string `json:"workloadID,omitempty" valid:"omitempty,name"` OrchestratorID string `json:"orchestratorID,omitempty" valid:"omitempty,name"` Hostname string `json:"hostname,omitempty" valid:"omitempty,name"` Labels map[string]string `json:"labels,omitempty" validate:"omitempty,labels"` } type WorkloadEndpointSpec struct { IPNetworks []IPNet `json:"ipNetworks,omitempty" validate:"omitempty"` Profiles []string `json:"profiles,omitempty" validate:"omitempty,dive,name"` InterfaceName string `json:"interfaceName,omitempty" validate:"omitempty,interface"` MAC MAC `json:"mac,omitempty" validate:"omitempty"` } type WorkloadEndpoint struct { TypeMetadata Metadata WorkloadEndpointMetadata `json:"metadata,omitempty"` Spec WorkloadEndpointSpec `json:"spec,omitempty"` } func NewWorkloadEndpoint() *WorkloadEndpoint { return &WorkloadEndpoint{TypeMetadata: TypeMetadata{Kind: "workloadEndpoint", APIVersion: "v1"}} } type WorkloadEndpointList struct { TypeMetadata Metadata ListMetadata `json:"metadata,omitempty"` Items []WorkloadEndpoint `json:"items" validate:"dive"` } func NewWorkloadEndpointList() *WorkloadEndpointList { return &WorkloadEndpointList{TypeMetadata: TypeMetadata{Kind: "workloadEndpointList", APIVersion: "v1"}} }
lefevre-fraser/openmeta-mms
bin/Python27/Lib/site-packages/matlab_wrapper/__main__.py
import sys import json from matlab_wrapper import MatlabWrapper from openmdao.api import FileRef import numpy if __name__ == '__main__': # print(repr(sys.argv[1:])) c = MatlabWrapper(sys.argv[1], start_engine=False) def default(obj): if isinstance(obj, FileRef): return repr(obj) if isinstance(obj, numpy.ndarray): return repr(obj) raise TypeError(repr(obj) + " is not JSON serializable") print(json.dumps({'params': c._init_params_dict, 'unknowns': c._init_unknowns_dict}, default=default))
ksemer/snap
snap-exp/test-dev/IdTest.cpp
#include "Snap.h" int main(){ TTableContext Context; // create scheme Schema CountryS; CountryS.Add(TPair<TStr,TAttrType>("Country", atStr)); CountryS.Add(TPair<TStr,TAttrType>("Continent", atStr)); CountryS.Add(TPair<TStr,TAttrType>("Size", atStr)); TIntV RelevantCols; RelevantCols.Add(0); RelevantCols.Add(1); RelevantCols.Add(2); Schema ContinentS; ContinentS.Add(TPair<TStr,TAttrType>("Continent", atStr)); ContinentS.Add(TPair<TStr,TAttrType>("Size", atStr)); RelevantCols.Clr(); RelevantCols.Add(0); RelevantCols.Add(1); PTable P = TTable::LoadSS(CountryS, "tests/country.txt", Context, RelevantCols); P->SaveSS("tests/p.txt"); P->Unique("Continent"); P->SaveSS("tests/p1.txt"); P->Defrag(); P->SaveSS("tests/p2.txt"); THash<TInt, TInt> Hash = P->GetRowIdMap(); for (THash<TInt, TInt>::TIter it = Hash.BegI(); it != Hash.EndI(); it++) { TInt id = it.GetKey(); TInt row = it.GetDat(); printf("Id: %d, Row: %d\n", id.Val, row.Val); } PTable Q = TTable::LoadSS(ContinentS, "tests/continent.txt", Context, RelevantCols); Q->SaveSS("tests/q.txt"); PTable R = P->Join("Continent", *Q, "Continent"); R->SaveSS("tests/r.txt"); return 0; }
vertigo-io/vertigo-extensions
vertigo-commons/src/main/java/io/vertigo/commons/eventbus/EventBusManager.java
<filename>vertigo-commons/src/main/java/io/vertigo/commons/eventbus/EventBusManager.java /** * vertigo - application development platform * * Copyright (C) 2013-2021, Vertigo.io, <EMAIL> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.vertigo.commons.eventbus; import java.util.function.Consumer; import io.vertigo.core.node.component.Manager; /** * Inter-components events manager. * Publisher / Subscriber on event type for communication between components. * Listeners are configured to listen on the same JVM. * * The process is synchronous. * the suscribers execute their methods on the same thread. * The errors are not caught. * If one of the suscriber throws an error, this error is thrown on the post(). * * The purpose of this pattern is to decouple the managers. * Managers that post don't need to know which components are listening. * * Example : * - flushes local cache when an object is updated, deleted, inserted in the store * * WARNING : * By default, EventBus is not distributed. * A cache component should listen : a modification in one node should flush cache all over the system : it's cache component responsibility to do this * An audit component should listen to do the audit log * * @author pchretien, npiedeloup */ public interface EventBusManager extends Manager { /** * Posts an event. * @param event Event */ void post(Event event); /** * Registers a dead event consumer. * @param eventConsumer the consumer */ void registerDead(final Consumer<Event> eventConsumer); }
sunjiGit/litemall
litemall-admin-api/src/main/java/org/linlinjava/litemall/admin/web/AdminStoreInventoryController.java
<filename>litemall-admin-api/src/main/java/org/linlinjava/litemall/admin/web/AdminStoreInventoryController.java package org.linlinjava.litemall.admin.web; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.shiro.authz.annotation.RequiresPermissions; import org.linlinjava.litemall.admin.annotation.RequiresPermissionsDesc; import org.linlinjava.litemall.db.enums.store.StoreInventoryOperateType; import org.linlinjava.litemall.core.util.ResponseUtil; import org.linlinjava.litemall.db.domain.LitemallStoreInventoryFlow; import org.linlinjava.litemall.db.service.LitemallStoreInventoryService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.validation.annotation.Validated; import org.springframework.web.bind.annotation.*; import javax.validation.constraints.NotNull; @RestController @RequestMapping("/admin/storeInventory") @Validated @Api(value = "门店产品库存的后台操作") public class AdminStoreInventoryController { private final Log logger = LogFactory.getLog(AdminStoreInventoryController.class); @Autowired private LitemallStoreInventoryService storeInventoryService; /** * 制作门店商品库存 */ @RequiresPermissions("admin:store:list") @RequiresPermissionsDesc(menu = {"门店管理", "门店库存管理"}, button = "输入") @PostMapping("/add") @ApiOperation(value = "增加门店库存商品数量 门店=storeId 商品=goodsId 产品=productId 操作=operateType:MAKE-IN 数量=amount 操作人=operator") public Object add(@RequestBody LitemallStoreInventoryFlow flow) { logger.info(String.format("add flow:%s", flow)); StoreInventoryOperateType flowType = StoreInventoryOperateType.getByCode(flow.getOperateType()); if (StoreInventoryOperateType.MAKE_IN != flowType) { logger.info("flow type is not fit. flow=" + flow); return ResponseUtil.badArgument(); } storeInventoryService.plusFlow(flow); return ResponseUtil.ok(); } /** * 查询门店库存列表 */ @RequiresPermissions("admin:store:list") @RequiresPermissionsDesc(menu = {"门店管理", "门店库存管理"}, button = "查询") @GetMapping("/detail") @ApiOperation(value = "查看门店库存 门店=storeId,返回对象有 goods_name product_id") public Object detail(@NotNull Integer storeId) { logger.info(String.format("detail id:%d", storeId)); return ResponseUtil.okList(storeInventoryService.findByStoreId(storeId)); } /** * 调整门店商品库存(增加 or 减少) */ @RequiresPermissions("admin:store:list") @RequiresPermissionsDesc(menu = {"门店管理", "门店库存管理"}, button = "调整") @PostMapping("/adjust") @ApiOperation(value = "调整库存 门店=storeId 商品=goodsId 产品=productId 操作=operateType:ADJUST_OUT-调整减少 ADJUST_IN-调整增加, " + "数量=amount 操作人=operator") public Object adjust(@RequestBody LitemallStoreInventoryFlow flow) { logger.info(String.format("adjust flow:%s", flow)); StoreInventoryOperateType flowType = StoreInventoryOperateType.getByCode(flow.getOperateType()); if (StoreInventoryOperateType.ADJUST_IN != flowType && StoreInventoryOperateType.ADJUST_OUT != flowType) { logger.info("flow type is not fit. flow=" + flow); return ResponseUtil.badArgument(); } if (StoreInventoryOperateType.ADJUST_IN == flowType) { storeInventoryService.plusFlow(flow); } if (StoreInventoryOperateType.ADJUST_OUT == flowType) { storeInventoryService.minusFlow(flow); } return ResponseUtil.ok(); } }
ekirving/alleletraj
alleletraj/ancient/dedupe.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # third party modules import luigi # local modules from alleletraj import utils from alleletraj.ancient.align import BwaSamSe from alleletraj.samtools import SAMToolsMerge class FilterUniqueSAMCons(utils.DatabaseTask): """ Remove PCR duplicates, so we don't overestimate coverage. FilterUniqueSAMCons calls consensus on bases in duplicate reads, rather than simply keeping the read with best baseq https://bioinf.eva.mpg.de/fastqProcessing/ NOTE When `accession` is not set, this deduplicates a merged BAM file containing all the sample accessions. :type species: str :type sample: str :type accession: str """ species = luigi.Parameter() sample = luigi.Parameter() accession = luigi.OptionalParameter(default=None) def requires(self): if self.accession: return BwaSamSe(self.species, self.sample, self.accession, self.accession_data['paired']) else: return SAMToolsMerge(self.species, self.sample) def output(self): if self.accession: return [luigi.LocalTarget('data/bam/{}.sort.rmdup.{}'.format(self.accession, ext)) for ext in ['bam', 'bam.bai']] else: return [luigi.LocalTarget('data/bam/{}.merged.rmdup.{}'.format(self.sample, ext)) for ext in ['bam', 'bam.bai']] def run(self): # unpack the params bam_in, _, _ = self.input() bam_out, _ = self.output() with bam_out.temporary_path() as bam_path: params = { 'bam_in': bam_in.path, 'bam_out': bam_path, 'readgroup': r'@RG\tID:{basename}\tSM:{sample}'.format(basename=self.basename, sample=self.sample) } # NOTE FilterUniqueSAMCons strips the RG tag from merged reads, so we add a new readgroup to orphaned reads cmd = "samtools view -h {bam_in} | " \ "FilterUniqueSAMCons.py | " \ "samtools addreplacerg -r '{readgroup}' -m orphan_only -O bam -o {bam_out} - ".format(**params) utils.run_cmd([cmd], shell=True) # index the BAM file utils.run_cmd(['samtools', 'index', '-b', bam_out.path]) if __name__ == '__main__': luigi.run()
vany152/FilesHash
console/src/boost_1_78_0/libs/spirit/test/karma/repeat2.cpp
// Copyright (c) 2001-2011 <NAME> // // Distributed under the Boost Software License, Version 1.0. (See accompanying // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) #include <boost/spirit/include/karma_repeat.hpp> #include <boost/spirit/include/karma_operator.hpp> #include <boost/spirit/include/karma_char.hpp> #include <boost/spirit/include/karma_string.hpp> #include <boost/spirit/include/karma_numeric.hpp> #include <boost/spirit/include/karma_directive.hpp> #include <boost/spirit/include/karma_operator.hpp> #include <boost/spirit/include/karma_action.hpp> #include <boost/spirit/include/karma_nonterminal.hpp> #include <boost/spirit/include/karma_auxiliary.hpp> #include <boost/spirit/include/karma_directive.hpp> #include <boost/spirit/include/karma_phoenix_attributes.hpp> #include <boost/spirit/include/support_argument.hpp> #include <boost/assign/std/vector.hpp> #include <boost/phoenix/core.hpp> #include <boost/phoenix/operator.hpp> #include <boost/fusion/include/std_pair.hpp> #include <string> #include <iostream> #include <vector> #include "test.hpp" using namespace spirit_test; /////////////////////////////////////////////////////////////////////////////// int main() { using namespace boost::spirit::ascii; using boost::spirit::karma::repeat; using boost::spirit::karma::inf; using boost::spirit::karma::int_; using boost::spirit::karma::hex; using boost::spirit::karma::_1; { std::string str("aBcdeFGH"); BOOST_TEST(test("abcdefgh", lower[repeat(8)[char_]], str)); BOOST_TEST(test_delimited("A B C D E F G H ", upper[repeat(8)[char_]], str, space)); } { std::string s1 = "aaaaa"; BOOST_TEST(test("aaaaa", char_ << repeat(2)[char_ << char_], s1)); s1 = "aaa"; BOOST_TEST(test("aaa", char_ << repeat(1, 2)[char_ << char_], s1)); s1 = "aa"; BOOST_TEST(!test("", char_ << repeat(1)[char_ << char_], s1)); } { // actions namespace phx = boost::phoenix; std::vector<char> v; v.push_back('a'); v.push_back('a'); v.push_back('a'); v.push_back('a'); BOOST_TEST(test("aaaa", repeat(4)[char_][_1 = phx::ref(v)])); } { // more actions namespace phx = boost::phoenix; std::vector<int> v; v.push_back(123); v.push_back(456); v.push_back(789); BOOST_TEST(test_delimited("123 456 789 ", repeat(3)[int_][_1 = phx::ref(v)], space)); } // failing sub-generators { using boost::spirit::karma::strict; using boost::spirit::karma::relaxed; using namespace boost::assign; namespace karma = boost::spirit::karma; typedef std::pair<char, char> data; std::vector<data> v2, v3; v2 += std::make_pair('a', 'a'), std::make_pair('b', 'b'), std::make_pair('c', 'c'), std::make_pair('d', 'd'), std::make_pair('e', 'e'), std::make_pair('f', 'f'), std::make_pair('g', 'g'); v3 += std::make_pair('a', 'a'), std::make_pair('b', 'b'), std::make_pair('c', 'c'), std::make_pair('d', 'd'); karma::rule<spirit_test::output_iterator<char>::type, data()> r; r = &char_('d') << char_; BOOST_TEST(test("d", repeat[r], v2)); BOOST_TEST(test("d", relaxed[repeat[r]], v2)); BOOST_TEST(test("", strict[repeat[r]], v2)); r = !char_('d') << char_; BOOST_TEST(test("abcefg", repeat(6)[r], v2)); BOOST_TEST(!test("", repeat(5)[r], v2)); BOOST_TEST(test("abcefg", relaxed[repeat(6)[r]], v2)); BOOST_TEST(!test("", relaxed[repeat(5)[r]], v2)); BOOST_TEST(!test("", strict[repeat(6)[r]], v2)); BOOST_TEST(!test("", strict[repeat(5)[r]], v2)); r = !char_('c') << char_; BOOST_TEST(test("abd", repeat(3)[r], v2)); BOOST_TEST(test("abd", relaxed[repeat(3)[r]], v2)); BOOST_TEST(!test("", strict[repeat(3)[r]], v2)); r = !char_('a') << char_; BOOST_TEST(test("bcdef", repeat(3, 5)[r], v2)); BOOST_TEST(test("bcd", repeat(3, 5)[r], v3)); BOOST_TEST(!test("", repeat(4, 5)[r], v3)); BOOST_TEST(test("bcdef", relaxed[repeat(3, 5)[r]], v2)); BOOST_TEST(test("bcd", relaxed[repeat(3, 5)[r]], v3)); BOOST_TEST(!test("", relaxed[repeat(4, 5)[r]], v3)); BOOST_TEST(!test("", strict[repeat(3, 5)[r]], v2)); BOOST_TEST(!test("", strict[repeat(3, 5)[r]], v3)); BOOST_TEST(!test("", strict[repeat(4, 5)[r]], v3)); BOOST_TEST(test("bcd", repeat(3, inf)[r], v3)); BOOST_TEST(test("bcdefg", repeat(3, inf)[r], v2)); BOOST_TEST(!test("", repeat(4, inf)[r], v3)); r = !char_('g') << char_; BOOST_TEST(test("abcde", repeat(3, 5)[r], v2)); BOOST_TEST(test("abcd", repeat(3, 5)[r], v3)); BOOST_TEST(!test("", repeat(4, 5)[r], v3)); BOOST_TEST(test("abcde", relaxed[repeat(3, 5)[r]], v2)); BOOST_TEST(test("abcd", relaxed[repeat(3, 5)[r]], v3)); BOOST_TEST(!test("", relaxed[repeat(4, 5)[r]], v3)); BOOST_TEST(test("abcde", strict[repeat(3, 5)[r]], v2)); BOOST_TEST(test("abcd", strict[repeat(3, 5)[r]], v3)); BOOST_TEST(!test("", strict[repeat(5)[r]], v3)); } return boost::report_errors(); }
CIDARLAB/fpSelection
fpSelectionWebApp/src/main/java/org/cidarlab/fpSelection/servlets/StartFpSelectionWebApp.java
<filename>fpSelectionWebApp/src/main/java/org/cidarlab/fpSelection/servlets/StartFpSelectionWebApp.java<gh_stars>1-10 /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package org.cidarlab.fpSelection.servlets; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.HandlerList; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.webapp.WebAppContext; /** * * @author prash */ public class StartFpSelectionWebApp { public static void main(String[] args) { Server server = new Server(8080); ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS); context.setContextPath("/"); WebAppContext contextWeb = new WebAppContext(); contextWeb.setDescriptor(context + "/WEB-INF/web.xml"); contextWeb.setResourceBase("../fpSelectionWebApp/src/main/webapp/"); contextWeb.setConfigurationClasses(new String[]{"org.eclipse.jetty.webapp.WebInfConfiguration", "org.eclipse.jetty.webapp.WebXmlConfiguration", "org.eclipse.jetty.webapp.MetaInfConfiguration", "org.eclipse.jetty.webapp.FragmentConfiguration", "org.eclipse.jetty.webapp.JettyWebXmlConfiguration", "org.eclipse.jetty.annotations.AnnotationConfiguration"}); contextWeb.setContextPath("/"); contextWeb.setParentLoaderPriority(true); HandlerList handlers = new HandlerList(); handlers.addHandler(contextWeb); server.setHandler(handlers); try { server.start(); server.join(); } catch (Throwable t) { t.printStackTrace(System.err); } } }
rallabandibhanusree/BillingSample
test/logoutTest_spec.js
<reponame>rallabandibhanusree/BillingSample /** * Created by makere001c on 2/16/17. */ /* var frisby = require('frisby'); var cloudmine = require('cloudmine'); frisby.globalSetup({ request:{ headers:{ 'Content-Type': 'application/json', 'x-cloudmine-apikey':'<KEY>', 'x-cloudmine-sessiontoken':'<KEY>' } } }); //GET THE USER ****Change Endpoint to reflect snippet in cloudmine - https://api.secure.cloudmine.me/v1/app/96fc95210061884d1aab3e4204ff3a1e/run/user frisby.create('GET THE USER') .get('http://127.0.0.1:4545/v1/app/96fc95210061884d1aab3e4204ff3a1e/run/user?search=cch_id="8a53a1dc-6f73-4bd9-8d8a-01cd0591c3eb"') // { headers: { // 'Content-Type': 'application/json', // 'x-cloudmine-apikey':'<KEY>', // 'x-cloudmine-sessiontoken':'' // // }},{json: true}) .expectBodyContains('<EMAIL>') .expectStatus(200) .inspectJSON() .toss(); //LOG OUT THE USER frisby.create('LOG OUT THE USER') .get('http://127.0.0.1:4545/v1/app/96fc95210061884d1aab3e4204ff3a1e/run/logout') .expectStatus(200) .expectJSON({ result: 'Logout successful' }) .inspectJSON() .afterJSON(function () { ////GET THE USER WHILE ALREADY LOGGED OUT frisby.create('GET THE USER WHILE ALREADY LOGGED OUT') .get('http://127.0.0.1:4545/v1/app/96fc95210061884d1aab3e4204ff3a1e/run/user?search=cch_id="8a53a1dc-6f73-4bd9-8d8a-01cd0591c3eb"') .expectJSON({ result: { '401': { errors: [ 'Unauthorized' ] } } }) .expectStatus(400) .inspectJSON() .toss(); }) .toss() */
pwaller/mewmew-l
ir/function.go
package ir import ( "fmt" "strconv" "strings" "github.com/kr/pretty" "github.com/mewmew/l/internal/enc" "github.com/mewmew/l/ir/metadata" "github.com/mewmew/l/ir/types" "github.com/mewmew/l/ir/value" ) // ~~~ [ Function Declaration or Definition ] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // Note, Sig is nil when Function is in AST form; irx.Translate instantiates Sig // based on the RetType, Params and Variadic fields of the function header. // Note, Typ is nil when Function is in AST form; irx.Translate instantiates Typ // to a pointer to Sig. // A Function is an LLVM IR function. type Function struct { Linkage Linkage // Function header Preemption Preemption // zero value if not present Visibility Visibility // zero value if not present DLLStorageClass DLLStorageClass // zero value if not present CallingConv CallingConv // zero value if not present ReturnAttrs []ReturnAttribute RetType types.Type Name string // *GlobalIdent Params []*Param Variadic bool UnnamedAddr UnnamedAddr FuncAttrs []FuncAttribute Section *Section // nil if not present Comdat *Comdat // nil if not present GC string // empty if not present Prefix Constant // *TypeConst; nil if not present Prologue Constant // *TypeConst; nil if not present Personality Constant // *TypeConst; nil if not present // Function body Blocks []*BasicBlock // nil if declaration UseListOrders []*UseListOrder // Function signature. Sig *types.FuncType Typ *types.PointerType // pointer to Sig. Metadata []*metadata.MetadataAttachment } // String returns the string representation of the function as a type-value // pair. func (f *Function) String() string { return fmt.Sprintf("%v %v", f.Type(), f.Ident()) } // Type returns the type of the function. func (f *Function) Type() types.Type { return f.Typ } // Ident returns the identifier associated with the function. func (f *Function) Ident() string { return enc.Global(f.Name) } // Def returns the LLVM syntax representation of the global variable definition. func (f *Function) Def() string { // "declare" MetadataAttachments OptExternLinkage FunctionHeader // "define" OptLinkage FunctionHeader MetadataAttachments FunctionBody buf := &strings.Builder{} if len(f.Blocks) == 0 { // Function declaration. // // "declare" MetadataAttachments OptExternLinkage FunctionHeader buf.WriteString("declare") for _, md := range f.Metadata { fmt.Fprintf(buf, " %v", md) } if f.Linkage != LinkageNone { fmt.Fprintf(buf, " %v", f.Linkage) } buf.WriteString(f.headerString()) return buf.String() } // Function definition. // // "define" OptLinkage FunctionHeader MetadataAttachments FunctionBody buf.WriteString("define") if f.Linkage != LinkageNone { fmt.Fprintf(buf, " %v", f.Linkage) } buf.WriteString(f.headerString()) for _, md := range f.Metadata { fmt.Fprintf(buf, " %v", md) } fmt.Fprintf(buf, " %v", f.bodyString()) return buf.String() } // AssignLocalIDs assigns IDs to unnamed local variables. func (f *Function) AssignLocalIDs() { if len(f.Blocks) == 0 { return } id := 0 names := make(map[string]value.Value) setName := func(n value.Named) { name := n.GetName() switch { case isUnnamed(name): name := strconv.Itoa(id) n.SetName(name) names[name] = n id++ case isLocalID(name): want := strconv.Itoa(id) if want != name { panic(fmt.Errorf("invalid local ID in function %v, expected %v, got %v", f.Name, want, name)) } id++ default: // Valid is named; nothing to do. } } for _, param := range f.Params { // Assign local IDs to unnamed parameters of function definitions. setName(param) } for _, block := range f.Blocks { // Assign local IDs to unnamed basic blocks. setName(block) for _, inst := range block.Insts { n, ok := inst.(value.Named) if !ok { continue } nt := n.Type() if nt == nil { pretty.Println("n:", n) } if n.Type().Equal(types.Void) { continue } // Assign local IDs to unnamed local variables. setName(n) } } } // isUnnamed reports whether the given identifier is unnamed. func isUnnamed(name string) bool { return len(name) == 0 } // isLocalID reports whether the given identifier is a local ID (e.g. "%42"). func isLocalID(name string) bool { for _, r := range name { if strings.IndexRune("0123456789", r) == -1 { return false } } return len(name) > 0 } // headerString returns the string representation of the function header. func (hdr *Function) headerString() string { // OptPreemptionSpecifier OptVisibility OptDLLStorageClass OptCallingConv // ReturnAttrs Type GlobalIdent "(" Params ")" OptUnnamedAddr FuncAttrs // OptSection OptComdat OptGC OptPrefix OptPrologue OptPersonality buf := &strings.Builder{} if hdr.Preemption != PreemptionNone { fmt.Fprintf(buf, " %v", hdr.Preemption) } if hdr.Visibility != VisibilityNone { fmt.Fprintf(buf, " %v", hdr.Visibility) } if hdr.DLLStorageClass != DLLStorageClassNone { fmt.Fprintf(buf, " %v", hdr.DLLStorageClass) } if hdr.CallingConv != CallingConvNone { fmt.Fprintf(buf, " %v", hdr.CallingConv) } for _, attr := range hdr.ReturnAttrs { fmt.Fprintf(buf, " %v", attr) } fmt.Fprintf(buf, " %v", hdr.RetType) fmt.Fprintf(buf, " %v(", enc.Global(hdr.Name)) for i, param := range hdr.Params { if i != 0 { buf.WriteString(", ") } buf.WriteString(param.Def()) } if hdr.Variadic { if len(hdr.Params) > 0 { buf.WriteString(", ") } buf.WriteString("...") } buf.WriteString(")") if hdr.UnnamedAddr != UnnamedAddrNone { fmt.Fprintf(buf, " %v", hdr.UnnamedAddr) } for _, attr := range hdr.FuncAttrs { fmt.Fprintf(buf, " %v", attr) } if hdr.Section != nil { fmt.Fprintf(buf, " %v", hdr.Section) } if hdr.Comdat != nil { fmt.Fprintf(buf, " %v", hdr.Comdat) } if len(hdr.GC) > 0 { fmt.Fprintf(buf, " gc %v", enc.Quote(hdr.GC)) } if hdr.Prefix != nil { fmt.Fprintf(buf, " prefix %v", hdr.Prefix) } if hdr.Prologue != nil { fmt.Fprintf(buf, " prologue %v", hdr.Prologue) } if hdr.Personality != nil { fmt.Fprintf(buf, " personality %v", hdr.Personality) } return buf.String() } // bodyString returns the string representation of the function body. func (body *Function) bodyString() string { // "{" BasicBlockList UseListOrders "}" buf := &strings.Builder{} buf.WriteString("{\n") for _, block := range body.Blocks { fmt.Fprintf(buf, "%v\n", block.Def()) } for _, useList := range body.UseListOrders { fmt.Fprintf(buf, "%v\n", useList) } buf.WriteString("}") return buf.String() }
hmcts/nfdiv-common-lib
src/main/java/uk/gov/hmcts/reform/divorce/mapper/strategy/reasonfordivorce/DesertionStrategy.java
package uk.gov.hmcts.reform.divorce.mapper.strategy.reasonfordivorce; import org.springframework.stereotype.Component; import uk.gov.hmcts.reform.divorce.model.ccd.CoreCaseData; import uk.gov.hmcts.reform.divorce.model.usersession.DivorceSession; import uk.gov.hmcts.reform.divorce.utils.DateUtils; import static org.apache.commons.lang3.StringUtils.join; import static uk.gov.hmcts.reform.divorce.mapper.MappingCommons.toYesNoPascalCase; import static uk.gov.hmcts.reform.divorce.mapper.MappingCommons.toYesNoUpperCase; @Component public class DesertionStrategy implements ReasonForDivorceStrategy { private static final String DESERTION = "desertion"; private static final String LINE_SEPARATOR = "\n"; private static final String DESERTION_STRING = "I have been deserted by my %s on the %s."; @Override public String deriveStatementOfCase(DivorceSession divorceSession) { String prettyDesertionDate = DateUtils.format( divorceSession.getReasonForDivorceDesertionDate(), "dd MMMM yyyy" ); String derivedStatementOfCase = String.format(DESERTION_STRING, divorceSession.getDivorceWho(), prettyDesertionDate); return join(derivedStatementOfCase, LINE_SEPARATOR, divorceSession.getReasonForDivorceDesertionDetails()); } @Override public boolean accepts(String reasonForDivorce) { return DESERTION.equalsIgnoreCase(reasonForDivorce); } @Override public void setLivedApartFieldsFromDivorceSession(DivorceSession divorceSession, CoreCaseData coreCaseData) { coreCaseData.setDesertionLivedApartEntireTime( toYesNoUpperCase(divorceSession.getLivedApartEntireTime()) ); coreCaseData.setDesertionLivedTogetherMoreTimeThanPermitted( toYesNoUpperCase(divorceSession.getLivedTogetherMoreTimeThanPermitted()) ); coreCaseData.setDesertionTimeTogetherPermitted( divorceSession.getTimeLivedTogetherPermitted() ); } @Override public void setLivedApartFieldsFromCoreCaseData(CoreCaseData coreCaseData, DivorceSession divorceSession) { divorceSession.setLivedTogetherMoreTimeThanPermitted( toYesNoPascalCase(coreCaseData.getDesertionLivedTogetherMoreTimeThanPermitted()) ); divorceSession.setLivedApartEntireTime( toYesNoPascalCase(coreCaseData.getDesertionLivedApartEntireTime()) ); divorceSession.setTimeLivedTogetherPermitted( coreCaseData.getDesertionTimeTogetherPermitted() ); } }
FarouDev/Projects-for-beginner-with-Cpp
BCC102/include/windows/sdk/filterpipelineutil.h
//+------------------------------------------------------------------------- // // Microsoft Windows // Copyright (c) Microsoft Corporation. All rights reserved. // //-------------------------------------------------------------------------- #ifndef _PRINT_FILTER_UTIL_813b22ee_62f7_4200_ #pragma option push -b -a8 -pc -A- -w-pun /*P_O_Push*/ #define _PRINT_FILTER_UTIL_813b22ee_62f7_4200_ #include <winapifamily.h> #pragma region Desktop Family #if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP) #if defined(__cplusplus) // // print filter pipeline // namespace pfp { // // Helpful when you want to use a print read interface with XML SAX // which needs an ISequentialStream // class PrintReadStreamToSeqStream : public ISequentialStream { public: PrintReadStreamToSeqStream( _In_ IPrintReadStream *pReadStream ) : m_cRef(1), m_pStream(pReadStream), m_bEof(FALSE) { m_pStream->AddRef(); } ~PrintReadStreamToSeqStream() { m_pStream->Release(); } STDMETHODIMP_(ULONG) AddRef( VOID ) { return InterlockedIncrement(&m_cRef); } STDMETHODIMP_(ULONG) Release( VOID ) { ULONG cRefCount = InterlockedDecrement(&m_cRef); if (cRefCount) { return cRefCount; } delete this; return 0; } STDMETHODIMP QueryInterface( _In_ REFIID riid, _COM_Outptr_ VOID **ppv ) { HRESULT hRes = E_POINTER; if (ppv) { hRes = E_NOINTERFACE; *ppv = NULL; if (riid == IID_ISequentialStream) { *ppv = static_cast<ISequentialStream *>(this); } else if (riid == IID_IUnknown) { *ppv = static_cast<IUnknown *>(this); } if (*ppv) { AddRef(); hRes = S_OK; } } return hRes; } STDMETHODIMP Read( _Out_writes_bytes_to_(cb, *pcbRead) void* pv, _In_ ULONG cb, _Out_opt_ ULONG *pcbRead ) { if (pcbRead == NULL) { return E_INVALIDARG; } *pcbRead = 0; HRESULT hr = S_OK; // // ISequentialStream::Read is expected to block until any of the following occurs: // - the requested number of bytes have been read // - the end of the stream is reached // - a failure occurs // // Since IPrintReadStream::ReadBytes is non-blocking, we must call ReadBytes until one of the // conditions is met. // while (SUCCEEDED(hr) && !m_bEof && *pcbRead < cb) { DWORD cbRead = 0; BOOL bEof = FALSE; hr = m_pStream->ReadBytes(reinterpret_cast<PVOID>(reinterpret_cast<PBYTE>(pv) + *pcbRead), cb - *pcbRead, &cbRead, &bEof); if (SUCCEEDED(hr)) { if (bEof) { m_bEof = TRUE; } *pcbRead += cbRead; } } if (SUCCEEDED(hr) && *pcbRead < cb) { // // ISequentialStream::Read returns S_FALSE when the number of bytes returned is fewer than // the number of bytes requested. (i.e. at the end of the stream) // hr = S_FALSE; } return hr; } STDMETHODIMP Write( _In_reads_bytes_(cb) void const* pv, _In_ ULONG cb, _Out_opt_ ULONG *pcbWritten ) { UNREFERENCED_PARAMETER(pv); UNREFERENCED_PARAMETER(cb); UNREFERENCED_PARAMETER(pcbWritten); return E_NOTIMPL; } private: BOOL m_bEof; LONG m_cRef; IPrintReadStream *m_pStream; }; // // Helpful when you want to use a print write interface with XML SAX // which needs an ISequentialStream // class PrintWriteStreamToSeqStream : public ISequentialStream { public: PrintWriteStreamToSeqStream( _In_ IPrintWriteStream *pWriteStream ) : m_cRef(1), m_pStream(pWriteStream) { m_pStream->AddRef(); } ~PrintWriteStreamToSeqStream() { m_pStream->Close(); m_pStream->Release(); } STDMETHODIMP_(ULONG) AddRef( VOID ) { return InterlockedIncrement(&m_cRef); } STDMETHODIMP_(ULONG) Release( VOID ) { ULONG cRefCount = InterlockedDecrement(&m_cRef); if (cRefCount) { return cRefCount; } delete this; return 0; } STDMETHODIMP QueryInterface( _In_ REFIID riid, _Out_ VOID **ppv ) { HRESULT hRes = E_POINTER; if (ppv) { hRes = E_NOINTERFACE; *ppv = NULL; if (riid == IID_ISequentialStream) { *ppv = static_cast<ISequentialStream *>(this); } else if (riid == IID_IUnknown) { *ppv = static_cast<IUnknown *>(this); } if (*ppv) { AddRef(); hRes = S_OK; } } return hRes; } STDMETHODIMP Read( _Out_writes_bytes_(cb) void* pv, _In_ ULONG cb, _Out_ ULONG *pcbRead ) { UNREFERENCED_PARAMETER(pv); UNREFERENCED_PARAMETER(cb); UNREFERENCED_PARAMETER(pcbRead); return E_NOTIMPL; } STDMETHODIMP Write( _In_reads_bytes_(cb) void const* pv, _In_ ULONG cb, _Out_ ULONG *pcbWritten ) { return m_pStream->WriteBytes(pv, cb, pcbWritten); } private: LONG m_cRef; IPrintWriteStream *m_pStream; }; }; // namespace pfp #endif // if defined(__cplusplus) #endif /* WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP) */ #pragma endregion #pragma option pop /*P_O_Pop*/ #endif // #ifndef _PRINT_FILTER_UTIL_813b22ee-62f7-4200-9c85-73d139eaa579_
codefollower/Open-Source-Research
Javac2007/流程/comp/Flow/markDead.java
<gh_stars>100-1000 /** Record that statement is unreachable. */ void markDead() { DEBUG.P(this,"markDead()"); DEBUG.P("firstadr="+firstadr+" nextadr="+nextadr); DEBUG.P("inits 前="+inits); DEBUG.P("uninits前="+uninits); inits.inclRange(firstadr, nextadr); uninits.inclRange(firstadr, nextadr); DEBUG.P("inits 后="+inits); DEBUG.P("uninits后="+uninits); alive = false; DEBUG.P("alive="+alive); DEBUG.P(0,this,"markDead()"); }
vinibiavatti1/PythonFlaskCms
project/properties/pages/calendar_properties.py
""" Calendar properties configuration. """ from project.models.property_model import PropertyModel from project.models.header_model import HeaderModel from project.enums import property_types_enum as prop_type from typing import Union from project.properties.bases.page_properties import page_properties # Properties calendar_properties: list[Union[PropertyModel, HeaderModel]] = [ ] # Extensions calendar_properties.extend(page_properties)
cblecker/managed-upgrade-operator
pkg/controller/upgradeconfig/statuschanged_predicate_test.go
package upgradeconfig import ( upgradev1alpha1 "github.com/openshift/managed-upgrade-operator/pkg/apis/upgrade/v1alpha1" testStructs "github.com/openshift/managed-upgrade-operator/util/mocks/structs" "k8s.io/apimachinery/pkg/types" "sigs.k8s.io/controller-runtime/pkg/event" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" ) var _ = Describe("UpgradeConfigController", func() { var ( upgradeConfigName types.NamespacedName upgradeConfig *upgradev1alpha1.UpgradeConfig ) BeforeEach(func() { upgradeConfigName = types.NamespacedName{ Name: "test-upgradeconfig", Namespace: "test-namespace", } upgradeConfig = testStructs.NewUpgradeConfigBuilder().WithNamespacedName(upgradeConfigName).GetUpgradeConfig() }) Context("Update", func() { Context("When the old object meta doesn't exist", func() { It("will not return true", func() { result := StatusChangedPredicate.UpdateFunc(event.UpdateEvent{MetaOld: nil, ObjectOld: upgradeConfig, MetaNew: upgradeConfig.GetObjectMeta(), ObjectNew: upgradeConfig}) Expect(result).To(BeFalse()) }) }) Context("When the old object doesn't exist", func() { It("will not return true", func() { result := StatusChangedPredicate.UpdateFunc(event.UpdateEvent{MetaOld: upgradeConfig.GetObjectMeta(), ObjectOld: nil, MetaNew: upgradeConfig.GetObjectMeta(), ObjectNew: upgradeConfig}) Expect(result).To(BeFalse()) }) }) Context("When the new object meta doesn't exist", func() { It("will not return true", func() { result := StatusChangedPredicate.UpdateFunc(event.UpdateEvent{MetaOld: upgradeConfig.GetObjectMeta(), ObjectOld: upgradeConfig, MetaNew: nil, ObjectNew: upgradeConfig}) Expect(result).To(BeFalse()) }) }) Context("When the new object doesn't exist", func() { It("will not return true", func() { result := StatusChangedPredicate.UpdateFunc(event.UpdateEvent{MetaOld: upgradeConfig.GetObjectMeta(), ObjectOld: upgradeConfig, MetaNew: upgradeConfig.GetObjectMeta(), ObjectNew: nil}) Expect(result).To(BeFalse()) }) }) Context("When the old and new events match", func() { It("will return true", func() { uc1 := testStructs.NewUpgradeConfigBuilder().WithNamespacedName(upgradeConfigName).GetUpgradeConfig() uc2 := testStructs.NewUpgradeConfigBuilder().WithNamespacedName(upgradeConfigName).GetUpgradeConfig() result := StatusChangedPredicate.UpdateFunc(event.UpdateEvent{MetaOld: uc1.GetObjectMeta(), ObjectOld: uc1, MetaNew: uc2.GetObjectMeta(), ObjectNew: uc2}) Expect(result).To(BeTrue()) }) }) Context("When the old and new events do not match", func() { It("will not return true", func() { uc1 := testStructs.NewUpgradeConfigBuilder().WithNamespacedName(upgradeConfigName).GetUpgradeConfig() uc2 := testStructs.NewUpgradeConfigBuilder().WithNamespacedName(upgradeConfigName).GetUpgradeConfig() uc2.Status.History = []upgradev1alpha1.UpgradeHistory{{Version: "something else"}} result := StatusChangedPredicate.UpdateFunc(event.UpdateEvent{MetaOld: uc1.GetObjectMeta(), ObjectOld: uc1, MetaNew: uc2.GetObjectMeta(), ObjectNew: uc2}) Expect(result).To(BeFalse()) }) }) }) })
lcnem/eurx
x/estmdist/types/events.go
package types // Event types for estmdist module const ( EventTypeEstmdist = ModuleName AttributeKeyInflation = "estm_dist_inflation" AttributeKeyStatus = "estm_dist_status" AttributeValueInactive = "inactive" )
bubenheimer/androidx
inspection/inspection/src/main/java/androidx/inspection/Inspector.java
/* * Copyright 2019 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.inspection; import android.annotation.SuppressLint; import androidx.annotation.NonNull; import java.util.concurrent.Executor; /** * Implementation of this class are responsible to handle command from frontend and * send back events. * <p> * Inspector's methods are called on single threaded {@link InspectorExecutors#primary()} executor. */ public abstract class Inspector { @NonNull private Connection mConnection; /** * @param connection a connection object that allows to send events to studio */ public Inspector(@NonNull Connection connection) { mConnection = connection; } /** * Called when this inspector is no longer needed. * <p> * Agent should use this callback to unsubscribe from any events that it is listening to. */ public void onDispose() { } /** * An inspector can implement this to handle incoming commands. * <p> * Every command should be replied with a {@link CommandCallback#reply(byte[])} call on the * given {@code callback} object. * * @param data a raw byte array of the command sent by studio. * @param callback a callback to reply on the given command. */ public abstract void onReceiveCommand(@NonNull byte[] data, @NonNull CommandCallback callback); /** * Returns a connection that allows to send events to Studio. */ @NonNull protected final Connection getConnection() { return mConnection; } /** * Callback to reply on an command from the studio */ public interface CommandCallback { /** * Sends a response on the previously handled command. * * @param response a raw byte array of the response to studio command. */ // Users don't implement this callback, but call methods on it themselves @SuppressLint("CallbackMethodName") void reply(@NonNull byte[] response); /** * Handles a signal sent from Studio that this command should be cancelled, if possible. * * @param executor There is no guarantee on which thread the listener will be triggered on, * and as an inspector developer, you should be aware of this explicitly. * As a result, we require you to pass in a custom executor to provide the * flexibility to allow running the cancellation behavior on a different * thread. If you don't care about this, use a direct executor instead. * @param runnable the listener to run when command is cancelled. */ // Users don't implement this callback, but call methods on it themselves @SuppressLint({"PairedRegistration", "CallbackMethodName"}) void addCancellationListener(@NonNull Executor executor, @NonNull Runnable runnable); } }
marcua/qurk_experiments
qurkexp/estimation/django_includes.py
<reponame>marcua/qurk_experiments import sys, os ROOT = os.path.abspath('%s/../..' % os.path.abspath(os.path.dirname(__file__))) sys.path.append(ROOT) os.environ['DJANGO_SETTINGS_MODULE'] = 'qurkexp.settings' from django.core.management import setup_environ from django.conf import settings
olivier-maury/unity
engine/src/main/java/pl/edu/icm/unity/engine/translation/in/SystemInputTranslationProfileProvider.java
/* * Copyright (c) 2017 Bixbit - <NAME> All rights reserved. * See LICENCE.txt file for licensing information. */ package pl.edu.icm.unity.engine.translation.in; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; import org.springframework.stereotype.Component; import pl.edu.icm.unity.engine.translation.SystemTranslationProfileProviderBase; import pl.edu.icm.unity.types.translation.ProfileType; /** * Provides system input translation profiles * @author P.Piernik * */ @Component public class SystemInputTranslationProfileProvider extends SystemTranslationProfileProviderBase { @Autowired public SystemInputTranslationProfileProvider(ApplicationContext applicationContext) { super(applicationContext); } @Override protected ProfileType getType() { return ProfileType.INPUT; } }
abique/mimosa
mimosa/http/mime-db.hh
#pragma once # include <string> # include <unordered_map> # include "../non-copyable.hh" namespace mimosa { namespace http { /** * @ingroup Http */ class MimeDb : private NonCopyable { public: static MimeDb & instance(); std::string mimeType(const std::string & filename); private: MimeDb(); void load(); std::unordered_map<std::string, std::string> mime_types_; }; } }
FernandoBLima/data-structures
src/DataStructures/HashTable/dump_hashTable.js
<filename>src/DataStructures/HashTable/dump_hashTable.js const HashTable = require('./hashTable'); class DumpHashTable extends HashTable { constructor() { super(); this.list = {}; this.length = 0; } /** * Should add element * @param {any} key \ * @param {any} value */ insert(key, value) { var position = this._hash(key); this.list[position] = value; this.length++; return; } /** * Should return element if it exists * @param {any} åkey * @return {Number} */ get(key) { if(this.length > 0 && key){ return this.list[this._hash(key)]; } return false; } /** * Should remove an element * @param {any} key */ remove(key){ if(this.length > 0 && key){ this.list[this._hash(key)] = null; this.length--; } return; } } module.exports = DumpHashTable;
alod83/versatile-data-kit
projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByNextRunTest.java
<filename>projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByNextRunTest.java /* * Copyright 2021 VMware, Inc. * SPDX-License-Identifier: Apache-2.0 */ package com.vmware.taurus.service.graphql.strategy.datajob; import com.cronutils.model.CronType; import com.cronutils.model.definition.CronDefinitionBuilder; import com.cronutils.model.time.ExecutionTime; import com.cronutils.parser.CronParser; import com.vmware.taurus.service.graphql.model.Criteria; import com.vmware.taurus.service.graphql.model.V2DataJob; import com.vmware.taurus.service.graphql.model.V2DataJobConfig; import com.vmware.taurus.service.graphql.model.V2DataJobSchedule; import com.vmware.taurus.service.graphql.model.Filter; import org.junit.jupiter.api.Test; import org.springframework.data.domain.Sort; import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.chrono.ChronoZonedDateTime; import java.util.*; import java.util.function.Predicate; import static org.assertj.core.api.Assertions.assertThat; class JobFieldStrategyByNextRunTest { private final JobFieldStrategyByNextRun strategyByNextRun = new JobFieldStrategyByNextRun(); @Test void testJobNextRunStrategy_whenGettingStrategyName_shouldBeSpecific() { assertThat(strategyByNextRun.getStrategyName()).isEqualTo(JobFieldStrategyBy.NEXT_RUN_EPOCH_SECS); } @Test void testJobNextRunStrategy_whenAlteringFieldData_shouldModifyState() { String scheduleCron = "12 5 2 3 *"; var executionTime = ExecutionTime.forCron( new CronParser(CronDefinitionBuilder.instanceDefinitionFor(CronType.UNIX)).parse(scheduleCron)); Optional<ZonedDateTime> utc = executionTime.nextExecution(ZonedDateTime.now(ZoneId.of("UTC"))); int baseTime = Math.toIntExact(utc.map(ChronoZonedDateTime::toEpochSecond).get()); V2DataJob dataJob = new V2DataJob(); V2DataJobConfig dataJobConfig = new V2DataJobConfig(); V2DataJobSchedule dataJobSchedule = new V2DataJobSchedule(); dataJobConfig.setSchedule(dataJobSchedule); dataJobSchedule.setScheduleCron(scheduleCron); dataJob.setConfig(dataJobConfig); assertThat(dataJob.getConfig().getSchedule().getNextRunEpochSeconds()).isZero(); strategyByNextRun.alterFieldData(dataJob); int nextRunEpochSeconds = dataJob.getConfig().getSchedule().getNextRunEpochSeconds(); assertThat(nextRunEpochSeconds).isNotZero().isEqualTo(baseTime); } @Test void testJobNextRunStrategy_whenAlteringFieldDataWithNullConfig_shouldNotModifyState() { V2DataJob dataJob = createDummyJob("12 5 2 3 *"); dataJob.setConfig(null); strategyByNextRun.alterFieldData(dataJob); assertThat(dataJob.getConfig()).isNull(); } @Test void testJobNextRunStrategy_whenAlteringFieldDataWithNullSchedule_shouldNotModifyState() { V2DataJob dataJob = createDummyJob(null); dataJob.setConfig(new V2DataJobConfig()); strategyByNextRun.alterFieldData(dataJob); assertThat(dataJob.getConfig().getSchedule()).isNull(); } @Test void testJobNextRunStrategy_whenAlteringFieldDataWithEmptySchedule_shouldReturnInvalidSchedule() { V2DataJob dataJob = createDummyJob(null); V2DataJobSchedule dataJobSchedule = new V2DataJobSchedule(); dataJobSchedule.setScheduleCron(" "); V2DataJobConfig dataJobConfig = new V2DataJobConfig(); dataJobConfig.setSchedule(dataJobSchedule); dataJob.setConfig(dataJobConfig); strategyByNextRun.alterFieldData(dataJob); assertThat(dataJob.getConfig().getSchedule().getNextRunEpochSeconds()).isEqualTo(-1); } @Test void testJobNextRunStrategy_whenAlteringFieldDataWithInvalidSchedule_shouldReturnInvalidSchedule() { V2DataJob dataJob = createDummyJob(null); V2DataJobSchedule dataJobSchedule = new V2DataJobSchedule(); dataJobSchedule.setScheduleCron("* * "); V2DataJobConfig dataJobConfig = new V2DataJobConfig(); dataJobConfig.setSchedule(dataJobSchedule); dataJob.setConfig(dataJobConfig); strategyByNextRun.alterFieldData(dataJob); assertThat(dataJob.getConfig().getSchedule().getNextRunEpochSeconds()).isEqualTo(-1); } @Test void testJobNextRunStrategy_whenComputingValidCriteriaWithoutFilter_shouldReturnValidCriteria() { Criteria<V2DataJob> baseCriteria = new Criteria<>(Objects::nonNull, Comparator.comparing(V2DataJob::getJobName)); Filter baseFilter = new Filter("random", null, Sort.Direction.DESC); V2DataJob a = createDummyJob("5 4 * * *"); V2DataJob b = createDummyJob("5 6 * * *"); // later than previous Criteria<V2DataJob> v2DataJobCriteria = strategyByNextRun.computeFilterCriteria(baseCriteria, baseFilter); assertThat(v2DataJobCriteria.getPredicate().test(a)).isTrue(); assertThat(v2DataJobCriteria.getComparator().compare(a, b)).isPositive(); } /** * This test should create two data jobs executed: * a - “At 04:05 on day-of-month 1 and on Monday in January.” * b 0 “At 04:05 every day” * * by this info we create a two dates to make a range: * Next week and February 1st next year. This will makes a range which should include data job "a", but excludes "b" * so that test does not fail each year on specific time */ @Test void testJobNextRunStrategy_whenComputingValidCriteriaWithFilter_shouldReturnValidCriteria() { Criteria<V2DataJob> baseCriteria = new Criteria<>(Objects::nonNull, Comparator.comparing(V2DataJob::getJobName)); Filter baseFilter = new Filter("config.schedule.nextRunEpochSeconds", String.format("%d-%d",getNextWeek(), getSecondMonthOfNextYear()), Sort.Direction.ASC); V2DataJob a = createDummyJob("5 4 1 1 1"); V2DataJob b = createDummyJob("5 6 * * *"); // later than previous Criteria<V2DataJob> v2DataJobCriteria = strategyByNextRun.computeFilterCriteria(baseCriteria, baseFilter); //assertThat(v2DataJobCriteria.getPredicate().test(a)).isTrue(); // TODO assertThat(v2DataJobCriteria.getPredicate().test(b)).isFalse(); assertThat(v2DataJobCriteria.getComparator().compare(a, b)).isPositive(); } @Test void testJobNextRunStrategy_whenComputingInvalidSearchProvided_shouldReturnValidPredicate() { Predicate<V2DataJob> predicate = strategyByNextRun.computeSearchCriteria("A"); V2DataJob a = createDummyJob("5 6 * * *"); assertThat(predicate.test(a)).isFalse(); } private V2DataJob createDummyJob(String schedule) { V2DataJob job = new V2DataJob(); V2DataJobConfig config = new V2DataJobConfig(); V2DataJobSchedule dataJobSchedule = new V2DataJobSchedule(); dataJobSchedule.setScheduleCron(schedule); config.setSchedule(dataJobSchedule); job.setConfig(config); strategyByNextRun.alterFieldData(job); return job; } private long getSecondMonthOfNextYear() { final Calendar calendar = Calendar.getInstance(); calendar.setTime(new Date()); calendar.set(Calendar.DAY_OF_MONTH, 1); calendar.set(Calendar.MONTH, 1); calendar.add(Calendar.YEAR, 1); return calendar.getTimeInMillis() / 1000; } private long getNextWeek() { final Calendar calendar = Calendar.getInstance(); calendar.setTime(new Date()); calendar.add(Calendar.WEEK_OF_YEAR, 1); return calendar.getTimeInMillis() / 1000; } }
bmf-san/Gobel
app/usecase/admin_repository.go
package usecase import "github.com/bmf-san/gobel-api/app/domain" // An AdminRepository is a repository interface for an authentication. type AdminRepository interface { FindByID(id int) (domain.Admin, error) FindByCredentials(req RequestCredential) (domain.Admin, error) }
reven86/dava.engine
Programs/QuickEd/Classes/Model/PackageHierarchy/PackageControlsNode.cpp
#include "PackageControlsNode.h" #include "ControlNode.h" #include "PackageVisitor.h" #include "PackageNode.h" #include "UI/UIPackage.h" #include "UI/UIControl.h" #include "Utils/Utils.h" using namespace DAVA; PackageControlsNode::PackageControlsNode(PackageNode* parent_, const String& name_) : ControlsContainerNode(parent_) , name(name_) { } PackageControlsNode::~PackageControlsNode() { for (ControlNode* node : nodes) node->Release(); nodes.clear(); } void PackageControlsNode::Add(ControlNode* node) { DVASSERT(node->GetParent() == nullptr); DVASSERT(node->GetPackageContext() == nullptr); node->SetParent(this); node->SetPackageContext(GetPackage()->GetContext()); nodes.push_back(SafeRetain(node)); } void PackageControlsNode::InsertAtIndex(int index, ControlNode* node) { DVASSERT(node->GetParent() == nullptr); DVASSERT(node->GetPackageContext() == nullptr); node->SetParent(this); node->SetPackageContext(GetPackage()->GetContext()); nodes.insert(nodes.begin() + index, SafeRetain(node)); } void PackageControlsNode::Remove(ControlNode* node) { auto it = find(nodes.begin(), nodes.end(), node); if (it != nodes.end()) { DVASSERT(node->GetParent() == this); node->SetParent(nullptr); DVASSERT(node->GetPackageContext() == GetPackage()->GetContext()); node->SetPackageContext(nullptr); nodes.erase(it); SafeRelease(node); } else { DVASSERT(false); } } int PackageControlsNode::GetCount() const { return static_cast<int>(nodes.size()); } ControlNode* PackageControlsNode::Get(int index) const { return nodes[index]; } void PackageControlsNode::Accept(PackageVisitor* visitor) { visitor->VisitControls(this); } String PackageControlsNode::GetName() const { return name; } bool PackageControlsNode::IsEditingSupported() const { return false; } bool PackageControlsNode::IsInsertingControlsSupported() const { return !IsReadOnly(); } bool PackageControlsNode::CanInsertControl(const ControlNode* node, DAVA::int32 pos) const { return !IsReadOnly(); } bool PackageControlsNode::CanRemove() const { return false; } bool PackageControlsNode::CanCopy() const { return false; } void PackageControlsNode::RefreshControlProperties() { for (ControlNode* node : nodes) node->RefreshProperties(); } ControlNode* PackageControlsNode::FindControlNodeByName(const String& name) const { FastName fName(name); for (auto it = nodes.begin(); it != nodes.end(); ++it) { if ((*it)->GetControl()->GetName() == fName) return *it; } return NULL; } ControlNode* PackageControlsNode::FindControlNodeByPath(const String& path) const { Vector<String> strPath; Split(path, "/", strPath, false, true); if (!strPath.empty()) { ControlNode* res = FindControlNodeByName(strPath[0]); for (size_t i = 1; res != nullptr && i < strPath.size(); i++) { res = res->FindByName(strPath[i]); } return res; } return nullptr; } DAVA::Vector<ControlNode*>::const_iterator PackageControlsNode::begin() const { return nodes.begin(); } DAVA::Vector<ControlNode*>::const_iterator PackageControlsNode::end() const { return nodes.end(); } DAVA::Vector<ControlNode*>::iterator PackageControlsNode::begin() { return nodes.begin(); } DAVA::Vector<ControlNode*>::iterator PackageControlsNode::end() { return nodes.end(); }
tasseron/TOMP-REF
tomp_ref/src/main/java/org/tomp/api/booking/BookingProvider.java
package org.tomp.api.booking; import javax.validation.Valid; import io.swagger.model.Booking; import io.swagger.model.BookingOperation; import io.swagger.model.BookingOption; public interface BookingProvider { Booking addNewBooking(@Valid BookingOption body, String acceptLanguage); Booking addNewBookingEvent(BookingOperation body, String acceptLanguage, String id); }
sarang-apps/darshan_browser
ui/events/keycodes/keyboard_code_conversion.cc
<filename>ui/events/keycodes/keyboard_code_conversion.cc // Copyright (c) 2011 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "ui/events/keycodes/keyboard_code_conversion.h" #include <algorithm> #include "base/stl_util.h" #include "ui/events/event_constants.h" #include "ui/events/keycodes/dom/dom_code.h" #include "ui/events/keycodes/dom/dom_key.h" #include "ui/events/keycodes/dom_us_layout_data.h" #include "ui/events/types/event_type.h" namespace ui { namespace { bool IsRightSideDomCode(DomCode code) { return (code == DomCode::SHIFT_RIGHT) || (code == DomCode::CONTROL_RIGHT) || (code == DomCode::ALT_RIGHT) || (code == DomCode::META_RIGHT); } } // anonymous namespace base::char16 DomCodeToUsLayoutCharacter(DomCode dom_code, int flags) { DomKey dom_key; KeyboardCode key_code; if (DomCodeToUsLayoutDomKey(dom_code, flags, &dom_key, &key_code) && dom_key.IsCharacter()) { return dom_key.ToCharacter(); } return 0; } bool DomCodeToNonPrintableDomKey(DomCode dom_code, DomKey* out_dom_key, KeyboardCode* out_key_code) { for (const auto& it : kNonPrintableCodeMap) { if (it.dom_code == dom_code) { *out_dom_key = it.dom_key; *out_key_code = NonPrintableDomKeyToKeyboardCode(it.dom_key); return true; } } return false; } bool DomCodeToUsLayoutDomKey(DomCode dom_code, int flags, DomKey* out_dom_key, KeyboardCode* out_key_code) { for (const auto& it : kPrintableCodeMap) { if (it.dom_code == dom_code) { int state = ((flags & EF_SHIFT_DOWN) == EF_SHIFT_DOWN); base::char16 ch = it.character[state]; if ((flags & EF_CAPS_LOCK_ON) == EF_CAPS_LOCK_ON) { ch |= 0x20; if ((ch >= 'a') && (ch <= 'z')) ch = it.character[state ^ 1]; } *out_dom_key = DomKey::FromCharacter(ch); *out_key_code = DomCodeToUsLayoutNonLocatedKeyboardCode(dom_code); return true; } } return DomCodeToNonPrintableDomKey(dom_code, out_dom_key, out_key_code); } bool DomCodeToControlCharacter(DomCode dom_code, int flags, DomKey* dom_key, KeyboardCode* key_code) { if ((flags & EF_CONTROL_DOWN) == 0) return false; int code = static_cast<int>(dom_code); const int kKeyA = static_cast<int>(DomCode::US_A); // Control-A - Control-Z map to 0x01 - 0x1A. if (code >= kKeyA && code <= static_cast<int>(DomCode::US_Z)) { *dom_key = DomKey::FromCharacter(code - kKeyA + 1); *key_code = static_cast<KeyboardCode>(code - kKeyA + VKEY_A); switch (dom_code) { case DomCode::US_H: *key_code = VKEY_BACK; break; case DomCode::US_I: *key_code = VKEY_TAB; break; case DomCode::US_M: *key_code = VKEY_RETURN; break; default: break; } return true; } if (flags & EF_SHIFT_DOWN) { switch (dom_code) { case DomCode::DIGIT2: // NUL *dom_key = DomKey::FromCharacter(0); *key_code = VKEY_2; return true; case DomCode::DIGIT6: // RS *dom_key = DomKey::FromCharacter(0x1E); *key_code = VKEY_6; return true; case DomCode::MINUS: // US *dom_key = DomKey::FromCharacter(0x1F); *key_code = VKEY_OEM_MINUS; return true; default: return false; } } switch (dom_code) { case DomCode::ENTER: // NL *dom_key = DomKey::FromCharacter(0x0A); *key_code = VKEY_RETURN; return true; case DomCode::BRACKET_LEFT: // ESC *dom_key = DomKey::FromCharacter(0x1B); *key_code = VKEY_OEM_4; return true; case DomCode::BACKSLASH: // FS *dom_key = DomKey::FromCharacter(0x1C); *key_code = VKEY_OEM_5; return true; case DomCode::BRACKET_RIGHT: // GS *dom_key = DomKey::FromCharacter(0x1D); *key_code = VKEY_OEM_6; return true; default: return false; } } // Returns a Windows-based VKEY for a non-printable DOM Level 3 |key|. // The returned VKEY is non-positional (e.g. VKEY_SHIFT). KeyboardCode NonPrintableDomKeyToKeyboardCode(DomKey dom_key) { for (const auto& it : kDomKeyToKeyboardCodeMap) { if (it.dom_key == dom_key) return it.key_code; } return VKEY_UNKNOWN; } // Determine the non-located VKEY corresponding to a located VKEY. KeyboardCode LocatedToNonLocatedKeyboardCode(KeyboardCode key_code) { switch (key_code) { case VKEY_RWIN: return VKEY_LWIN; case VKEY_LSHIFT: case VKEY_RSHIFT: return VKEY_SHIFT; case VKEY_LCONTROL: case VKEY_RCONTROL: return VKEY_CONTROL; case VKEY_LMENU: case VKEY_RMENU: return VKEY_MENU; case VKEY_NUMPAD0: return VKEY_0; case VKEY_NUMPAD1: return VKEY_1; case VKEY_NUMPAD2: return VKEY_2; case VKEY_NUMPAD3: return VKEY_3; case VKEY_NUMPAD4: return VKEY_4; case VKEY_NUMPAD5: return VKEY_5; case VKEY_NUMPAD6: return VKEY_6; case VKEY_NUMPAD7: return VKEY_7; case VKEY_NUMPAD8: return VKEY_8; case VKEY_NUMPAD9: return VKEY_9; default: return key_code; } } // Determine the located VKEY corresponding to a non-located VKEY. KeyboardCode NonLocatedToLocatedKeyboardCode(KeyboardCode key_code, DomCode dom_code) { switch (key_code) { case VKEY_SHIFT: return IsRightSideDomCode(dom_code) ? VKEY_RSHIFT : VKEY_LSHIFT; case VKEY_CONTROL: return IsRightSideDomCode(dom_code) ? VKEY_RCONTROL : VKEY_LCONTROL; case VKEY_MENU: return IsRightSideDomCode(dom_code) ? VKEY_RMENU : VKEY_LMENU; case VKEY_LWIN: return IsRightSideDomCode(dom_code) ? VKEY_RWIN : VKEY_LWIN; case VKEY_0: return (dom_code == DomCode::NUMPAD0) ? VKEY_NUMPAD0 : VKEY_0; case VKEY_1: return (dom_code == DomCode::NUMPAD1) ? VKEY_NUMPAD1 : VKEY_1; case VKEY_2: return (dom_code == DomCode::NUMPAD2) ? VKEY_NUMPAD2 : VKEY_2; case VKEY_3: return (dom_code == DomCode::NUMPAD3) ? VKEY_NUMPAD3 : VKEY_3; case VKEY_4: return (dom_code == DomCode::NUMPAD4) ? VKEY_NUMPAD4 : VKEY_4; case VKEY_5: return (dom_code == DomCode::NUMPAD5) ? VKEY_NUMPAD5 : VKEY_5; case VKEY_6: return (dom_code == DomCode::NUMPAD6) ? VKEY_NUMPAD6 : VKEY_6; case VKEY_7: return (dom_code == DomCode::NUMPAD7) ? VKEY_NUMPAD7 : VKEY_7; case VKEY_8: return (dom_code == DomCode::NUMPAD8) ? VKEY_NUMPAD8 : VKEY_8; case VKEY_9: return (dom_code == DomCode::NUMPAD9) ? VKEY_NUMPAD9 : VKEY_9; default: return key_code; } } DomCode UsLayoutKeyboardCodeToDomCode(KeyboardCode key_code) { key_code = NonLocatedToLocatedKeyboardCode(key_code, DomCode::NONE); for (const auto& it : kDomCodeToKeyboardCodeMap) { if (it.key_code == key_code) return it.dom_code; } for (const auto& it : kFallbackKeyboardCodeToDomCodeMap) { if (it.key_code == key_code) return it.dom_code; } return DomCode::NONE; } KeyboardCode DomCodeToUsLayoutKeyboardCode(DomCode dom_code) { const DomCodeToKeyboardCodeEntry* end = kDomCodeToKeyboardCodeMap + base::size(kDomCodeToKeyboardCodeMap); const DomCodeToKeyboardCodeEntry* found = std::lower_bound( kDomCodeToKeyboardCodeMap, end, dom_code, [](const DomCodeToKeyboardCodeEntry& a, DomCode b) { return static_cast<int>(a.dom_code) < static_cast<int>(b); }); if ((found != end) && (found->dom_code == dom_code)) return found->key_code; return VKEY_UNKNOWN; } KeyboardCode DomCodeToUsLayoutNonLocatedKeyboardCode(DomCode dom_code) { return LocatedToNonLocatedKeyboardCode( DomCodeToUsLayoutKeyboardCode(dom_code)); } int ModifierDomKeyToEventFlag(DomKey key) { switch (key) { case DomKey::ALT: return EF_ALT_DOWN; case DomKey::ALT_GRAPH: return EF_ALTGR_DOWN; case DomKey::CAPS_LOCK: return EF_CAPS_LOCK_ON; case DomKey::CONTROL: return EF_CONTROL_DOWN; case DomKey::META: return EF_COMMAND_DOWN; case DomKey::SHIFT: return EF_SHIFT_DOWN; case DomKey::SHIFT_LEVEL5: return EF_MOD3_DOWN; default: return EF_NONE; } // Not represented: // DomKey::ACCEL // DomKey::FN // DomKey::FN_LOCK // DomKey::HYPER // DomKey::NUM_LOCK // DomKey::SCROLL_LOCK // DomKey::SUPER // DomKey::SYMBOL_LOCK } } // namespace ui
jasonadamyoung/busterleague
app/models/game.rb
# === COPYRIGHT: # Copyright (c) <NAME> # === LICENSE: # see LICENSE file class Game < ApplicationRecord include CleanupTools has_one :boxscore has_many :team_games belongs_to :home_team, :class_name => 'Team' belongs_to :away_team, :class_name => 'Team' scope :for_season, lambda {|season| where(season: season)} scope :no_hitters, -> { where("home_hits = 0 OR away_hits = 0") } scope :perfects, -> { where("(home_hits = 0 AND away_errs = 0) OR (away_hits = 0 and home_errs = 0)") } after_create :create_or_update_boxscore,:create_or_update_team_games def self.dump_all_data self.dump_data TeamGame.dump_data Boxscore.dump_data Inning.dump_data GameBattingStat.dump_data GamePitchingStat.dump_data Record.dump_data DailyRecord.dump_data end def self.current_season Date.today.year - 1 end def self.available_seasons self.distinct.pluck(:season).sort end def self.latest_season self.maximum(:season) end def self.through_season_date(season,date) if(season == 'all' or season == 0) through_date(date) else for_season(season).through_date(date) end end def self.earliest_date(season) if(season == 'all' or season == 0) self.minimum(:date) else self.where(season: season).minimum(:date) end end def self.latest_date(season) if(season == 'all' or season == 0) self.maximum(:date) else self.where(season: season).maximum(:date) end end def create_or_update_boxscore if(!self.boxscore_name.nil?) boxscore = Boxscore.create_or_update_from_game(self) if(boxscore) self.home_hits = boxscore.home_team_stats["hits"] self.away_hits = boxscore.away_team_stats["hits"] self.home_errs = boxscore.home_team_stats["errors"] self.away_errs = boxscore.away_team_stats["errors"] self.save! end end true end def create_or_update_team_games boxscore = self.boxscore if(!(home_game = TeamGame.where(game_id: self.id).where(home: true).first)) home_game = TeamGame.new(:game_id => self.id, :date => self.date, :season => self.season) end home_game.team_id = self.home_team_id home_game.home = true home_game.opponent_id = self.away_team_id home_game.win = (self.home_runs > self.away_runs) ? true : false home_game.runs = self.home_runs home_game.opponent_runs = self.away_runs home_game.total_innings = self.total_innings if(boxscore) home_game.opponent_runs = boxscore.away_runs home_game.hits = boxscore.home_team_stats["hits"] home_game.opponent_hits = boxscore.away_team_stats["hits"] home_game.errs = boxscore.home_team_stats["errors"] home_game.opponent_errs = boxscore.away_team_stats["errors"] end home_game.save! # away team's game if(!(away_game = TeamGame.where(game_id: self.id).where(home: false).first)) away_game = TeamGame.new(:game_id => self.id, :date => self.date, :season => self.season) end away_game.team_id = self.away_team_id away_game.home = false away_game.opponent_id = self.home_team_id away_game.win = (self.away_runs > self.home_runs) ? true : false away_game.runs = self.away_runs away_game.opponent_runs = self.home_runs away_game.total_innings = self.total_innings if(boxscore) away_game.opponent_runs = boxscore.home_runs away_game.hits = boxscore.away_team_stats["hits"] away_game.opponent_hits = boxscore.home_team_stats["hits"] away_game.errs = boxscore.away_team_stats["errors"] away_game.opponent_errs = boxscore.home_team_stats["errors"] end away_game.save! true end def self.game_results_url(season) if(season != 1999) base_url = "#{Settings.web_reports_base_url}/#{season}" "#{base_url}/orggr.htm" else base_url = "#{Settings.web_reports_base_url}/#{season}" "#{base_url}/org1_orggr_1999.htm" end end def self.get_game_results_html(season) response = RestClient.get(self.game_results_url(season)) if(!response.code == 200) return nil end response.to_str end def self.parsed_gameresults_for_season(season) grp = GameResultsParser.new(self.get_game_results_html(season)) grp end def self.get_teams(hash_data) return_data = {} return_data['home_team_id'] = Team.id_for_abbreviation(hash_data['home_team_string']) return_data['away_team_id'] = Team.id_for_abbreviation(hash_data['away_team_string']) return_data end def self.create_or_update_for_season(season) gr_data = self.parsed_gameresults_for_season(season).game_results_data gr_data.each do |hashid,hash_data| if([2000,2001].include?(season)) hash_data['date'] = hash_data['date'] - 1.year else hash_data['date'] = hash_data['date'] end hash_data.merge!(self.get_teams(hash_data)) if(game = self.where(date: hash_data['date']) .where(season: season) .where(home_team_id: hash_data['home_team_id']) .where(away_team_id: hash_data['away_team_id']) .first) game.assign_attributes(hash_data) game.save! else game = self.new(season: season) game.assign_attributes(hash_data) game.save! end end true end end
dolittle-platform/monitor.ingress
Source/Pinger/src/main/java/io/dolittle/moose/pinger/config/PingConfig.java
<filename>Source/Pinger/src/main/java/io/dolittle/moose/pinger/config/PingConfig.java // Copyright (c) Dolittle. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. package io.dolittle.moose.pinger.config; import org.springframework.aop.interceptor.AsyncUncaughtExceptionHandler; import org.springframework.context.annotation.*; import org.springframework.scheduling.annotation.AsyncConfigurer; import org.springframework.scheduling.annotation.EnableAsync; import org.springframework.scheduling.annotation.EnableScheduling; import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; import java.util.concurrent.Executor; @Profile("pinger") @Configuration @ComponentScan(basePackages = {"io.dolittle.moose.pinger", "io.dolittle.moose.kubernetes.config"}) @Import(SecurityConfig.class) @EnableAsync @EnableScheduling @PropertySource(value = {"classpath:pinger.properties"}) public class PingConfig implements AsyncConfigurer { @Override public AsyncUncaughtExceptionHandler getAsyncUncaughtExceptionHandler() { return (throwable, method, objects)-> throwable.printStackTrace(); } @Override public Executor getAsyncExecutor() { ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor(); executor.setCorePoolSize(20); executor.setMaxPoolSize(20); executor.setQueueCapacity(100); executor.setThreadNamePrefix("asyncExecutor-"); executor.initialize(); return executor; } }
Pentacode-IAFA/Quad-Remeshing
libs/quadwild/libs/vcglib/wrap/bmt/strip_mesh.h
/**************************************************************************** * VCGLib o o * * Visual and Computer Graphics Library o o * * _ O _ * * Copyright(C) 2004-2016 \/)\/ * * Visual Computing Lab /\/| * * ISTI - Italian National Research Council | * * \ * * All rights reserved. * * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * * This program is distributed in the hope that it will be useful, * * but WITHOUT ANY WARRANTY; without even the implied warranty of * * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * * GNU General Public License (http://www.gnu.org/licenses/gpl.txt) * * for more details. * * * ****************************************************************************/ /**************************************************************************** History $Log: not supported by cvs2svn $ ****************************************************************************/ #ifndef VCG_STRIP_MESH_H #define VCG_STRIP_MESH_H #include <vector> namespace vcg { class StripMesh { public: enum Signature { NORMAL = 1, COLOR = 2, STRIP = 4 }; StripMesh(char *s); private: unsigned short _vert_size; unsigned short _norm_size; unsigned short _color_size; unsigned short _strip_size; Point3f *_vert_start; short *_norm_start; unsigned char *_color_start; unsigned short *_strip_start; }; class StripMeshBuilder { public: std::vector<Point3f> vert; std::vector<short> norm; std::vector<unsigned char> color; std::vector<unsigned short> strip; unsigned int Signature(); ///required size; unsigned int Size(); void Write(char *buffer, unsigned int size); }; }//namespace #endif
webOS-ports/org.webosports.app.maps
source/data/webOSGeolocation.js
<filename>source/data/webOSGeolocation.js /* webOSGeolocation.js - LuneOS/webOS geolocation API * */ enyo.kind({ name: "webOSGeolocation", kind: "Component", events: { onSuccess: "", onFailure: "" }, components: [{ name: "singleLocationService", kind: "PalmService", service: "palm://com.palm.location/", method: "getCurrentPosition", subscribe: false, resubscribe: false, onResponse: "LocationSuccess", onError: "LocationFail" }, { name: "trackingLocationService", kind: "PalmService", service: "palm://com.palm.location/", method: "startTracking", subscribe: true, //resubscribe: false, onResponse: "LocationSuccess", onError: "LocationFail" }], getCurrentPosition: function (inSender, inEvent) { this.$.singleLocationService.send({accuracy: 3, maximumAge: 5, responseTime: 1}); }, LocationSuccess: function (inSender, inEvent) { this.doSuccess(this.webOStoW3C(inEvent.data)); }, LocationFail: function (inSender, inEvent) { var msg = "errorCode: " + inEvent.data.errorCode + "errorText: " + inEvent.data.errorText; if (this.errorCodes[inEvent.data.errorCode]) { msg = this.errorCodes[inEvent.data.errorCode]; } this.log(msg); this.doFailure({message: msg}); }, startTracking: function() { this.$.trackingLocationService.send({subscribe: true}); }, stopTracking: function() { this.$.trackingLocationService.cancel(); }, webOStoW3C: function(inData) { /* WC3 Coordinates object Specification */ /** Coordinates.latitude Coordinates.longitude Coordinates.altitude Coordinates.accuracy Coordinates.altitudeAccuracy Coordinates.heading Coordinates.speed **/ /* webOS legacy geolocation API specifications */ /** ["altitude", "heading", "horizAccuracy", "latitude", "longitude", "timestamp", "velocity", "vertAccuracy"] **/ return { "coords": { "latitude": inData.latitude, "longitude": inData.longitude, "altitude": inData.altitude, "accuracy": inData.horizAccuracy, "altitudeAccuracy": inData.vertAccuracy, "heading": inData.heading, "speed": inData.velocity }, "timestamp": inData.timestamp } }, errorCodes: ["Success", "Timeout", "Position_Unavailable", "Unknown", "GPS_Permanent_Error - No GPS fix but can still get the cell and Wifi fixes. A TouchPad without GPS returns this error.", "LocationServiceOFF - No Location source available. Both Google and GPS are off.", "Permission Denied - The user has not accepted the terms of use for the Google Location Service, or the Google Service is off.", "The application already has a pending message ", "The application has been temporarily blacklisted. (The user is not allowing this application to use this service.)"] });
karinamg17/nutricionistas
nutricionistas/nutri_app/migrations/0005_datosbiomedicos.py
<gh_stars>0 # Generated by Django 3.1.13 on 2021-11-30 12:53 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('nutri_app', '0004_auto_20211128_1559'), ] operations = [ migrations.CreateModel( name='DatosBiomedicos', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('antecedentes', models.JSONField(default=dict)), ('ginecologicos', models.JSONField(default=dict)), ('medicamentos', models.JSONField(default=dict)), ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='nutri_paciente_datos_biomedicos', to=settings.AUTH_USER_MODEL)), ], ), ]
beatrizserrano/galaxy
test/unit/data/model/migrations/versions/db2/1bceec30363a_create_tsi_test_branch.py
<filename>test/unit/data/model/migrations/versions/db2/1bceec30363a_create_tsi_test_branch.py """create tsi test branch Revision ID: 1bceec30363a Revises: Create Date: 2021-11-05 16:28:45.450830 """ # revision identifiers, used by Alembic. revision = "1bceec30363a" down_revision = None branch_labels = ("tsi",) depends_on = None def upgrade(): pass def downgrade(): pass
Creativelair/aoe-joust
components/navBar.js
import Link from 'next/link' import Grid from '@material-ui/core/Grid' import MiniMenu from './menu/miniMenu' import utilStyles from '../styles/utils.module.scss' export default function NavBar() { return ( <Grid container justify="center"> <Grid item md={6} xs={12}> <Link href="/"> <a> <img src="/img/aoe-joust-white.png" alt="aoe-joust" className={utilStyles.miniLogo} /> </a> </Link> </Grid> <Grid item md={6} xs={12}> <MiniMenu /> </Grid> </Grid> ) }