text
stringlengths
7
1.01M
/* TEMPLATE GENERATED TESTCASE FILE Filename: CWE113_HTTP_Response_Splitting__Environment_addCookieServlet_67a.java Label Definition File: CWE113_HTTP_Response_Splitting.label.xml Template File: sources-sinks-67a.tmpl.java */ /* * @description * CWE: 113 HTTP Response Splitting * BadSource: Environment Read data from an environment variable * GoodSource: A hardcoded string * Sinks: addCookieServlet * GoodSink: URLEncode input * BadSink : querystring to addCookie() * Flow Variant: 67 Data flow: data passed in a class from one method to another in different source files in the same package * * */ import javax.servlet.http.*; public class CWE113_HTTP_Response_Splitting__Environment_addCookieServlet_67a extends AbstractTestCaseServlet { static class Container { public String containerOne; } public void bad(HttpServletRequest request, HttpServletResponse response) throws Throwable { String data; /* get environment variable ADD */ /* POTENTIAL FLAW: Read data from an environment variable */ data = System.getenv("ADD"); Container dataContainer = new Container(); dataContainer.containerOne = data; (new CWE113_HTTP_Response_Splitting__Environment_addCookieServlet_67b()).badSink(dataContainer , request, response ); } public void good(HttpServletRequest request, HttpServletResponse response) throws Throwable { goodG2B(request, response); goodB2G(request, response); } /* goodG2B() - use goodsource and badsink */ private void goodG2B(HttpServletRequest request, HttpServletResponse response) throws Throwable { String data; /* FIX: Use a hardcoded string */ data = "foo"; Container dataContainer = new Container(); dataContainer.containerOne = data; (new CWE113_HTTP_Response_Splitting__Environment_addCookieServlet_67b()).goodG2BSink(dataContainer , request, response ); } /* goodB2G() - use badsource and goodsink */ private void goodB2G(HttpServletRequest request, HttpServletResponse response) throws Throwable { String data; /* get environment variable ADD */ /* POTENTIAL FLAW: Read data from an environment variable */ data = System.getenv("ADD"); Container dataContainer = new Container(); dataContainer.containerOne = data; (new CWE113_HTTP_Response_Splitting__Environment_addCookieServlet_67b()).goodB2GSink(dataContainer , request, response ); } /* Below is the main(). It is only used when building this testcase on * its own for testing or for building a binary to use in testing binary * analysis tools. It is not used when compiling all the testcases as one * application, which is how source code analysis tools are tested. */ public static void main(String[] args) throws ClassNotFoundException, InstantiationException, IllegalAccessException { mainFromParent(args); } }
package ca.djiang.junction.core; public interface JunMemory<Word extends Number> { public Word loadWord(int addr); public void storeWord(int addr, Word value); }
package br.com.zupacademy.giovannimoratto.ecommerce.purchases.gateway; import br.com.zupacademy.giovannimoratto.ecommerce.purchases.costumer.PurchaseModel; import br.com.zupacademy.giovannimoratto.ecommerce.purchases.gateway.request.TransactionStatus; import org.hibernate.annotations.CreationTimestamp; import javax.persistence.*; import java.time.LocalDateTime; /** * @Author giovanni.moratto */ @Entity @Table(name = "tb_transacao") public class TransactionModel { /* Attributes */ @Id @GeneratedValue(strategy = GenerationType.IDENTITY) @Column(name = "id") private Long id; @Enumerated(EnumType.STRING) @Column(name = "status_transacao", nullable = false) private TransactionStatus status; @Column(name = "gateway_pagamento", nullable = false) private String gateway; @CreationTimestamp @Column(name = "data_criacao", nullable = false) private LocalDateTime createdAt; @ManyToOne @JoinColumn(name = "id_compra", nullable = false) private PurchaseModel purchase; /* Constructors */ // Default - JPA public TransactionModel() { } // Set Requests (PagseguroRequest.class or PaypalRequest.class) values in TransactionModel.class public TransactionModel(TransactionStatus status, String gateway, PurchaseModel purchase) { this.status = status; this.gateway = gateway; this.purchase = purchase; } // Check if Transaction Status is OK public boolean complete() { return this.status.equals(TransactionStatus.successful); } }
package pl.pp.tiplab.securevoipclient.client.register.dto; import java.util.List; import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; import pl.pp.tiplab.securevoipclient.client.user.VoIPUser; @Getter @Setter @NoArgsConstructor public class RegisterResponse { private String nick; private String userToken; private List<VoIPUser> users; }
/* * Copyright (c) 2021 DumbDogDiner <dumbdogdiner.com>. All rights reserved. * Licensed under the MIT license, see LICENSE for more information. */ package com.dumbdogdiner.stickycommands.api.player; /** * The speed type to modify for a player */ public enum SpeedType { WALK, FLY, }
package mekanism.client.gui.element.scroll; import java.util.ArrayList; import java.util.List; import javax.annotation.Nullable; import mekanism.client.gui.IGuiWrapper; import mekanism.client.gui.element.GuiInnerScreen; import net.minecraft.client.gui.AbstractGui; public class GuiTextScrollList extends GuiScrollList { private List<String> textEntries = new ArrayList<>(); private int selected = -1; public GuiTextScrollList(IGuiWrapper gui, int x, int y, int width, int height) { super(gui, x, y, width, height, 10, new GuiInnerScreen(gui, x, y, width, height)); } @Override protected int getMaxElements() { return textEntries.size(); } @Override public boolean hasSelection() { return selected != -1; } @Override protected void setSelected(int index) { selected = index; } public int getSelection() { return selected; } @Override public void clearSelection() { this.selected = -1; } public void setText(@Nullable List<String> text) { if (text == null) { textEntries.clear(); } else { if (selected > text.size() - 1) { clearSelection(); } textEntries = text; } if (!needsScrollBars()) { scroll = 0; } } @Override public void renderForeground(int mouseX, int mouseY, int xAxis, int yAxis) { super.renderForeground(mouseX, mouseY, xAxis, yAxis); if (!textEntries.isEmpty()) { //Render the text into the entries int scrollIndex = getCurrentSelection(); int focusedElements = getFocusedElements(); int maxElements = getMaxElements(); for (int i = 0; i < focusedElements; i++) { int index = scrollIndex + i; if (index < maxElements) { renderScaledText(textEntries.get(index), relativeX + 2, relativeY + 2 + elementHeight * i, 0x00CD00, barX - x - 2); } } } } @Override public void renderElements(int mouseX, int mouseY, float partialTicks) { //Draw Selected int scrollIndex = getCurrentSelection(); if (selected != -1 && selected >= scrollIndex && selected <= scrollIndex + getFocusedElements() - 1) { AbstractGui.blit(x + 1, y + 1 + (selected - scrollIndex) * elementHeight, barX - x - 2, elementHeight, 4, 2, 2, 2, TEXTURE_WIDTH, TEXTURE_HEIGHT); } } }
/******************************************************************************* * Copyright (c) 2013-2017 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, * Version 2.0 which accompanies this distribution and is available at * http://www.apache.org/licenses/LICENSE-2.0.txt ******************************************************************************/ package mil.nga.giat.geowave.adapter.raster.stats; import org.geotools.geometry.GeneralEnvelope; import org.opengis.coverage.grid.GridCoverage; import com.vividsolutions.jts.geom.Envelope; import mil.nga.giat.geowave.adapter.raster.FitToIndexGridCoverage; import mil.nga.giat.geowave.core.geotime.store.statistics.BoundingBoxDataStatistics; import mil.nga.giat.geowave.core.index.ByteArrayId; public class RasterBoundingBoxStatistics extends BoundingBoxDataStatistics<GridCoverage> { public RasterBoundingBoxStatistics() { super(); } public RasterBoundingBoxStatistics( final ByteArrayId dataAdapterId ) { super( dataAdapterId); } @Override protected Envelope getEnvelope( final GridCoverage entry ) { final org.opengis.geometry.Envelope indexedEnvelope = entry.getEnvelope(); final org.opengis.geometry.Envelope originalEnvelope; if (entry instanceof FitToIndexGridCoverage) { originalEnvelope = ((FitToIndexGridCoverage) entry).getOriginalEnvelope(); } else { originalEnvelope = null; } // we don't want to accumulate the envelope outside of the original if // it is fit to the index, so compute the intersection with the original // envelope final org.opengis.geometry.Envelope resultingEnvelope = getIntersection( originalEnvelope, indexedEnvelope); if (resultingEnvelope != null) { return new Envelope( resultingEnvelope.getMinimum(0), resultingEnvelope.getMaximum(0), resultingEnvelope.getMinimum(1), resultingEnvelope.getMaximum(1)); } return null; } private static org.opengis.geometry.Envelope getIntersection( final org.opengis.geometry.Envelope originalEnvelope, final org.opengis.geometry.Envelope indexedEnvelope ) { if (originalEnvelope == null) { return indexedEnvelope; } if (indexedEnvelope == null) { return originalEnvelope; } final int dimensions = originalEnvelope.getDimension(); final double[] minDP = new double[dimensions]; final double[] maxDP = new double[dimensions]; for (int d = 0; d < dimensions; d++) { // to perform the intersection of the original envelope and the // indexed envelope, use the max of the mins per dimension and the // min of the maxes minDP[d] = Math.max( originalEnvelope.getMinimum(d), indexedEnvelope.getMinimum(d)); maxDP[d] = Math.min( originalEnvelope.getMaximum(d), indexedEnvelope.getMaximum(d)); } return new GeneralEnvelope( minDP, maxDP); } }
/* * Copyright 2018 JDCLOUD.COM * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http:#www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Classification * 敏感数据保护-数据分级分类 * * OpenAPI spec version: v1 * Contact: * * NOTE: This class is auto generated by the jdcloud code generator program. */ package com.jdcloud.sdk.service.dcap.model; import java.util.List; import java.util.ArrayList; import com.jdcloud.sdk.service.dcap.model.CategoryDesc; import com.jdcloud.sdk.service.JdcloudResult; /** * 敏感数据分类列表 */ public class DescribeCategoryListResult extends JdcloudResult implements java.io.Serializable { private static final long serialVersionUID = 1L; /** * list */ private List<CategoryDesc> list; /** * get list * * @return */ public List<CategoryDesc> getList() { return list; } /** * set list * * @param list */ public void setList(List<CategoryDesc> list) { this.list = list; } /** * set list * * @param list */ public DescribeCategoryListResult list(List<CategoryDesc> list) { this.list = list; return this; } /** * add item to list * * @param list */ public void addList(CategoryDesc list) { if (this.list == null) { this.list = new ArrayList<>(); } this.list.add(list); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gobblin.runtime.embedded; import java.io.IOException; import java.net.URISyntaxException; import org.apache.commons.cli.CommandLine; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.util.ClassUtil; import org.apache.gobblin.annotation.Alias; import org.apache.gobblin.configuration.ConfigurationKeys; import org.apache.gobblin.data.management.copy.CopySource; import org.apache.gobblin.data.management.copy.RecursiveCopyableDataset; import org.apache.gobblin.runtime.api.JobTemplate; import org.apache.gobblin.runtime.api.SpecNotFoundException; import org.apache.gobblin.runtime.cli.CliObjectOption; import org.apache.gobblin.runtime.cli.NotOnCli; import org.apache.gobblin.runtime.cli.PublicMethodsGobblinCliFactory; import org.apache.gobblin.runtime.template.ResourceBasedJobTemplate; /** * Embedded version of distcp. * Usage: * new EmbeddedGobblinDistcp(new Path("/source"), new Path("/dest")).run(); */ public class EmbeddedGobblinDistcp extends EmbeddedGobblin { @Alias(value = "distcp", description = "Distributed copy between Hadoop compatibly file systems.") public static class CliFactory extends PublicMethodsGobblinCliFactory { public CliFactory() { super(EmbeddedGobblinDistcp.class); } @Override public EmbeddedGobblin constructEmbeddedGobblin(CommandLine cli) throws JobTemplate.TemplateException, IOException { String[] leftoverArgs = cli.getArgs(); if (leftoverArgs.length != 2) { throw new RuntimeException("Unexpected number of arguments."); } Path from = new Path(leftoverArgs[0]); Path to = new Path(leftoverArgs[1]); return new EmbeddedGobblinDistcp(from, to); } @Override public String getUsageString() { return "[OPTIONS] <source> <target>"; } } public EmbeddedGobblinDistcp(Path from, Path to) throws JobTemplate.TemplateException, IOException { super("Distcp"); try { setTemplate(ResourceBasedJobTemplate.forResourcePath("templates/distcp.template")); } catch (URISyntaxException | SpecNotFoundException exc) { throw new RuntimeException("Could not instantiate an " + EmbeddedGobblinDistcp.class.getName(), exc); } this.setConfiguration("from", from.toString()); this.setConfiguration("to", to.toString()); // Infer source and target fs uris from the input paths this.setConfiguration(ConfigurationKeys.SOURCE_FILEBASED_FS_URI, from.getFileSystem(new Configuration()).getUri().toString()); this.setConfiguration(ConfigurationKeys.WRITER_FILE_SYSTEM_URI, to.getFileSystem(new Configuration()).getUri().toString()); // add gobblin-data-management jar to distributed jars this.distributeJarByClassWithPriority(CopySource.class, 0); } /** * Specifies that files in the target should be updated if they have changed in the source. Equivalent to -update * option in Hadoop distcp. */ @CliObjectOption(description = "Specifies files should be updated if they're different in the source.") public EmbeddedGobblinDistcp update() { this.setConfiguration(RecursiveCopyableDataset.UPDATE_KEY, Boolean.toString(true)); return this; } /** * Specifies that files in the target that don't exist in the source should be deleted. Equivalent to -delete * option in Hadoop distcp. */ @CliObjectOption(description = "Delete files in target that don't exist on source.") public EmbeddedGobblinDistcp delete() { this.setConfiguration(RecursiveCopyableDataset.DELETE_KEY, Boolean.toString(true)); return this; } /** * If {@link #delete()} is used, specifies that newly empty parent directories should also be deleted. */ @CliObjectOption(description = "If deleting files on target, also delete newly empty parent directories.") public EmbeddedGobblinDistcp deleteEmptyParentDirectories() { this.setConfiguration(RecursiveCopyableDataset.DELETE_EMPTY_DIRECTORIES_KEY, Boolean.toString(true)); return this; } /** * Run in simulate mode. Will log everythin it would copy, but not actually copy anything. */ public EmbeddedGobblinDistcp simulate() { this.setConfiguration(CopySource.SIMULATE, Boolean.toString(true)); return this; } // Remove template from CLI @Override @NotOnCli public EmbeddedGobblin setTemplate(String templateURI) throws URISyntaxException, SpecNotFoundException, JobTemplate.TemplateException { return super.setTemplate(templateURI); } }
/** * Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.financial.credit.creditdefaultswap.pricing.vanilla.isda; import static com.opengamma.analytics.financial.credit.creditdefaultswap.CreditDefaultSwapDefinitionDataSets.getLegacyFixedRecoveryDefinition; import static com.opengamma.analytics.financial.credit.creditdefaultswap.CreditDefaultSwapDefinitionDataSets.getLegacyForwardStartingDefinition; import static com.opengamma.analytics.financial.credit.creditdefaultswap.CreditDefaultSwapDefinitionDataSets.getLegacyMuniDefinition; import static com.opengamma.analytics.financial.credit.creditdefaultswap.CreditDefaultSwapDefinitionDataSets.getLegacyQuantoDefinition; import static com.opengamma.analytics.financial.credit.creditdefaultswap.CreditDefaultSwapDefinitionDataSets.getLegacyRecoveryLockDefinition; import static com.opengamma.analytics.financial.credit.creditdefaultswap.CreditDefaultSwapDefinitionDataSets.getLegacySovereignDefinition; import static com.opengamma.analytics.financial.credit.creditdefaultswap.CreditDefaultSwapDefinitionDataSets.getLegacyVanillaDefinition; import static com.opengamma.analytics.financial.credit.creditdefaultswap.CreditDefaultSwapDefinitionDataSets.getStandardFixedRecoveryDefinition; import static com.opengamma.analytics.financial.credit.creditdefaultswap.CreditDefaultSwapDefinitionDataSets.getStandardForwardStartingDefinition; import static com.opengamma.analytics.financial.credit.creditdefaultswap.CreditDefaultSwapDefinitionDataSets.getStandardMuniDefinition; import static com.opengamma.analytics.financial.credit.creditdefaultswap.CreditDefaultSwapDefinitionDataSets.getStandardQuantoDefinition; import static com.opengamma.analytics.financial.credit.creditdefaultswap.CreditDefaultSwapDefinitionDataSets.getStandardRecoveryLockDefinition; import static com.opengamma.analytics.financial.credit.creditdefaultswap.CreditDefaultSwapDefinitionDataSets.getStandardSovereignpDefinition; import static com.opengamma.analytics.financial.credit.creditdefaultswap.CreditDefaultSwapDefinitionDataSets.getStandardVanillaDefinition; import static org.testng.AssertJUnit.assertEquals; import org.testng.annotations.Test; import org.threeten.bp.ZonedDateTime; import com.opengamma.analytics.financial.credit.BuySellProtection; import com.opengamma.analytics.financial.credit.ISDAYieldCurveAndHazardRateCurveProvider; import com.opengamma.analytics.financial.credit.PriceType; import com.opengamma.analytics.financial.credit.creditdefaultswap.definition.legacy.LegacyVanillaCreditDefaultSwapDefinition; import com.opengamma.analytics.financial.credit.creditdefaultswap.definition.vanilla.CreditDefaultSwapDefinition; import com.opengamma.analytics.financial.credit.creditdefaultswap.pricing.legacy.PresentValueLegacyCreditDefaultSwap; import com.opengamma.analytics.financial.credit.hazardratecurve.HazardRateCurve; import com.opengamma.analytics.financial.credit.isdayieldcurve.ISDADateCurve; import com.opengamma.financial.convention.daycount.DayCount; import com.opengamma.financial.convention.daycount.DayCountFactory; import com.opengamma.util.time.DateUtils; /** * */ public class ISDACreditDefaultSwapPVCalculatorTest { private static final PresentValueLegacyCreditDefaultSwap DEPRECATED_CALCULATOR = new PresentValueLegacyCreditDefaultSwap(); private static final ISDACreditDefaultSwapPVCalculator CALCULATOR = new ISDACreditDefaultSwapPVCalculator(); private static final ZonedDateTime VALUATION_DATE = DateUtils.getUTCDate(2013, 1, 6); private static final ZonedDateTime BASE_DATE = DateUtils.getUTCDate(2013, 3, 1); private static final ZonedDateTime[] HR_DATES = new ZonedDateTime[] {DateUtils.getUTCDate(2013, 3, 1), DateUtils.getUTCDate(2013, 6, 1), DateUtils.getUTCDate(2013, 9, 1), DateUtils.getUTCDate(2013, 12, 1), DateUtils.getUTCDate(2014, 3, 1), DateUtils.getUTCDate(2015, 3, 1), DateUtils.getUTCDate(2016, 3, 1), DateUtils.getUTCDate(2018, 3, 1), DateUtils.getUTCDate(2023, 3, 1) }; private static final double[] HR_TIMES; private static final double[] HR_RATES = new double[] {0.01, 0.02, 0.04, 0.03, 0.06, 0.03, 0.05, 0.03, 0.02 }; private static final ZonedDateTime[] YC_DATES = new ZonedDateTime[] {DateUtils.getUTCDate(2013, 3, 1), DateUtils.getUTCDate(2013, 6, 1), DateUtils.getUTCDate(2013, 9, 1), DateUtils.getUTCDate(2013, 12, 1), DateUtils.getUTCDate(2014, 3, 1), DateUtils.getUTCDate(2015, 3, 1), DateUtils.getUTCDate(2016, 3, 1), DateUtils.getUTCDate(2018, 3, 1), DateUtils.getUTCDate(2023, 3, 1) }; private static final HazardRateCurve HAZARD_RATE_CURVE; private static final double[] YC_TIMES; private static final double[] YC_RATES = new double[] {0.005, 0.006, 0.008, 0.009, 0.01, 0.012, 0.015, 0.02, 0.03 }; private static final DayCount DAY_COUNT = DayCountFactory.INSTANCE.getDayCount("ACT/365"); private static final double OFFSET = 1. / 365; private static final ISDADateCurve YIELD_CURVE; private static final ISDAYieldCurveAndHazardRateCurveProvider CURVE_PROVIDER; private static final double EPS = 1e-15; static { int n = HR_DATES.length; HR_TIMES = new double[n]; for (int i = 0; i < n; i++) { HR_TIMES[i] = DAY_COUNT.getDayCountFraction(BASE_DATE, HR_DATES[i]); } HAZARD_RATE_CURVE = new HazardRateCurve(HR_DATES, HR_TIMES, HR_RATES, OFFSET); n = YC_DATES.length; YC_TIMES = new double[n]; for (int i = 0; i < n; i++) { YC_TIMES[i] = DAY_COUNT.getDayCountFraction(BASE_DATE, YC_DATES[i]); } YIELD_CURVE = new ISDADateCurve("ISDA", BASE_DATE, YC_DATES, YC_RATES, OFFSET); CURVE_PROVIDER = new ISDAYieldCurveAndHazardRateCurveProvider(YIELD_CURVE, HAZARD_RATE_CURVE); } @Test(expectedExceptions = IllegalArgumentException.class) public void testNullCDS() { CALCULATOR.getPresentValue(null, CURVE_PROVIDER, VALUATION_DATE, PriceType.CLEAN); } @Test(expectedExceptions = IllegalArgumentException.class) public void testNullData() { final LegacyVanillaCreditDefaultSwapDefinition cds = getLegacyVanillaDefinition().withMaturityDate(VALUATION_DATE.plusYears(10)); CALCULATOR.getPresentValue(cds, null, VALUATION_DATE, PriceType.CLEAN); } @Test(expectedExceptions = IllegalArgumentException.class) public void testNullValuationDate() { final LegacyVanillaCreditDefaultSwapDefinition cds = getLegacyVanillaDefinition().withMaturityDate(VALUATION_DATE.plusYears(10)); CALCULATOR.getPresentValue(cds, CURVE_PROVIDER, null, PriceType.CLEAN); } @Test(expectedExceptions = IllegalArgumentException.class) public void testNullPriceType() { final LegacyVanillaCreditDefaultSwapDefinition cds = getLegacyVanillaDefinition().withMaturityDate(VALUATION_DATE.plusYears(10)); CALCULATOR.getPresentValue(cds, CURVE_PROVIDER, VALUATION_DATE, null); } @Test public void testCDSTypes() { final CreditDefaultSwapDefinition cds = getLegacyVanillaDefinition(); final PriceType priceType = PriceType.DIRTY; final double pv = CALCULATOR.getPresentValue(cds, CURVE_PROVIDER, VALUATION_DATE, priceType); assertEquals(pv, CALCULATOR.getPresentValue(getStandardVanillaDefinition(), CURVE_PROVIDER, VALUATION_DATE, priceType)); assertEquals(pv, CALCULATOR.getPresentValue(getStandardFixedRecoveryDefinition(), CURVE_PROVIDER, VALUATION_DATE, priceType)); assertEquals(pv, CALCULATOR.getPresentValue(getStandardForwardStartingDefinition(), CURVE_PROVIDER, VALUATION_DATE, priceType)); assertEquals(pv, CALCULATOR.getPresentValue(getStandardMuniDefinition(), CURVE_PROVIDER, VALUATION_DATE, priceType)); assertEquals(pv, CALCULATOR.getPresentValue(getStandardQuantoDefinition(), CURVE_PROVIDER, VALUATION_DATE, priceType)); assertEquals(pv, CALCULATOR.getPresentValue(getStandardRecoveryLockDefinition(), CURVE_PROVIDER, VALUATION_DATE, priceType)); assertEquals(pv, CALCULATOR.getPresentValue(getStandardSovereignpDefinition(), CURVE_PROVIDER, VALUATION_DATE, priceType)); assertEquals(pv, CALCULATOR.getPresentValue(getLegacyFixedRecoveryDefinition(), CURVE_PROVIDER, VALUATION_DATE, priceType)); assertEquals(pv, CALCULATOR.getPresentValue(getLegacyForwardStartingDefinition(), CURVE_PROVIDER, VALUATION_DATE, priceType)); assertEquals(pv, CALCULATOR.getPresentValue(getLegacyMuniDefinition(), CURVE_PROVIDER, VALUATION_DATE, priceType)); assertEquals(pv, CALCULATOR.getPresentValue(getLegacyQuantoDefinition(), CURVE_PROVIDER, VALUATION_DATE, priceType)); assertEquals(pv, CALCULATOR.getPresentValue(getLegacyRecoveryLockDefinition(), CURVE_PROVIDER, VALUATION_DATE, priceType)); assertEquals(pv, CALCULATOR.getPresentValue(getLegacySovereignDefinition(), CURVE_PROVIDER, VALUATION_DATE, priceType)); } @Test public void testBuySell() { CreditDefaultSwapDefinition buy = getStandardVanillaDefinition(BuySellProtection.BUY); CreditDefaultSwapDefinition sell = getStandardVanillaDefinition(BuySellProtection.SELL); assertEquals(-CALCULATOR.getPresentValue(sell, CURVE_PROVIDER, VALUATION_DATE, PriceType.CLEAN), CALCULATOR.getPresentValue(buy, CURVE_PROVIDER, VALUATION_DATE, PriceType.CLEAN)); buy = getLegacyVanillaDefinition(BuySellProtection.BUY); sell = getLegacyVanillaDefinition(BuySellProtection.SELL); assertEquals(-CALCULATOR.getPresentValue(sell, CURVE_PROVIDER, VALUATION_DATE, PriceType.CLEAN), CALCULATOR.getPresentValue(buy, CURVE_PROVIDER, VALUATION_DATE, PriceType.CLEAN)); } @Test(enabled = true) public void regressionTestCleanPrice() { final LegacyVanillaCreditDefaultSwapDefinition cds = getLegacyVanillaDefinition().withMaturityDate(VALUATION_DATE.plusYears(10)); final double deprecatedResult = DEPRECATED_CALCULATOR.getPresentValueLegacyCreditDefaultSwap(VALUATION_DATE, cds, YIELD_CURVE, HAZARD_RATE_CURVE, PriceType.CLEAN); final double result = CALCULATOR.getPresentValue(cds, CURVE_PROVIDER, VALUATION_DATE, PriceType.CLEAN); assertEquals(deprecatedResult, result, EPS); } @Test(enabled = true) public void regressionTestDirtyPrice() { final LegacyVanillaCreditDefaultSwapDefinition cds = getLegacyVanillaDefinition().withMaturityDate(VALUATION_DATE.plusYears(10)); final double deprecatedResult = DEPRECATED_CALCULATOR.getPresentValueLegacyCreditDefaultSwap(VALUATION_DATE, cds, YIELD_CURVE, HAZARD_RATE_CURVE, PriceType.DIRTY); final double result = CALCULATOR.getPresentValue(cds, CURVE_PROVIDER, VALUATION_DATE, PriceType.DIRTY); assertEquals(deprecatedResult, result, EPS); } @Test(enabled = false) public void timeABDeprecated() { final LegacyVanillaCreditDefaultSwapDefinition cds = getLegacyVanillaDefinition().withMaturityDate(VALUATION_DATE.plusYears(10)); final double startTime = System.currentTimeMillis(); int j = 0; for (int i = 0; i < 100000; i++) { DEPRECATED_CALCULATOR.getPresentValueLegacyCreditDefaultSwap(VALUATION_DATE, cds, YIELD_CURVE, HAZARD_RATE_CURVE, PriceType.CLEAN); j++; } final double endTime = System.currentTimeMillis(); System.out.println("Deprecated:\t" + (endTime - startTime) / j * 100); } @Test(enabled = false) public void timeACRefactored() { final LegacyVanillaCreditDefaultSwapDefinition cds = getLegacyVanillaDefinition().withMaturityDate(VALUATION_DATE.plusYears(10)); final double startTime = System.currentTimeMillis(); int j = 0; for (int i = 0; i < 100000; i++) { CALCULATOR.getPresentValue(cds, CURVE_PROVIDER, VALUATION_DATE, PriceType.CLEAN); j++; } final double endTime = System.currentTimeMillis(); System.out.println("Refactored:\t" + (endTime - startTime) / j * 100); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.processor.async; import org.apache.camel.AsyncCallback; import org.apache.camel.AsyncProcessor; import org.apache.camel.ContextTestSupport; import org.apache.camel.Exchange; import org.apache.camel.Processor; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.impl.JndiRegistry; import org.apache.camel.model.ProcessorDefinition; import org.apache.camel.spi.Policy; import org.apache.camel.spi.RouteContext; import org.apache.camel.util.AsyncProcessorConverterHelper; import org.apache.camel.util.AsyncProcessorHelper; import org.junit.Test; /** * @version */ public class AsyncEndpointPolicyTest extends ContextTestSupport { private static String beforeThreadName; private static String afterThreadName; @Override protected JndiRegistry createRegistry() throws Exception { JndiRegistry jndi = super.createRegistry(); jndi.bind("foo", new MyPolicy("foo")); return jndi; } @Test public void testAsyncEndpoint() throws Exception { getMockEndpoint("mock:foo").expectedMessageCount(1); getMockEndpoint("mock:foo").expectedHeaderReceived("foo", "was wrapped"); getMockEndpoint("mock:bar").expectedMessageCount(1); getMockEndpoint("mock:bar").expectedHeaderReceived("foo", "was wrapped"); getMockEndpoint("mock:result").expectedMessageCount(1); getMockEndpoint("mock:result").expectedHeaderReceived("foo", "was wrapped"); getMockEndpoint("mock:response").expectedMessageCount(1); getMockEndpoint("mock:response").expectedHeaderReceived("foo", "policy finished execution"); template.sendBody("direct:send", "Hello World"); assertMockEndpointsSatisfied(); MyPolicy foo = context.getRegistry().lookupByNameAndType("foo", MyPolicy.class); assertEquals("Should only be invoked 1 time", 1, foo.getInvoked()); assertFalse("Should use different threads", beforeThreadName.equalsIgnoreCase(afterThreadName)); } @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() throws Exception { context.addComponent("async", new MyAsyncComponent()); from("direct:start") // wraps the entire route in the same policy .policy("foo") .to("mock:foo") .to("async:bye:camel") .to("mock:bar") .to("mock:result"); from("direct:send") .to("mock:before") .to("log:before") .process(new Processor() { public void process(Exchange exchange) throws Exception { beforeThreadName = Thread.currentThread().getName(); } }) .to("direct:start") .process(new Processor() { public void process(Exchange exchange) throws Exception { afterThreadName = Thread.currentThread().getName(); } }) .to("log:after") .to("mock:after") .to("mock:response"); } }; } public static class MyPolicy implements Policy { private final String name; private int invoked; public MyPolicy(String name) { this.name = name; } public void beforeWrap(RouteContext routeContext, ProcessorDefinition<?> definition) { // no need to modify the route } public Processor wrap(RouteContext routeContext, final Processor processor) { return new AsyncProcessor() { public boolean process(final Exchange exchange, final AsyncCallback callback) { invoked++; // let the original processor continue routing exchange.getIn().setHeader(name, "was wrapped"); AsyncProcessor ap = AsyncProcessorConverterHelper.convert(processor); boolean sync = ap.process(exchange, new AsyncCallback() { public void done(boolean doneSync) { // we only have to handle async completion of this policy if (doneSync) { return; } exchange.getIn().setHeader(name, "policy finished execution"); callback.done(false); } }); if (!sync) { // continue routing async return false; } // we are done synchronously, so do our after work and invoke the callback exchange.getIn().setHeader(name, "policy finished execution"); callback.done(true); return true; } public void process(Exchange exchange) throws Exception { AsyncProcessorHelper.process(this, exchange); } }; } public int getInvoked() { return invoked; } } }
// Template Source: BaseEntityRequestBuilder.java.tt // ------------------------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information. // ------------------------------------------------------------------------------ package com.microsoft.graph.requests; import com.microsoft.graph.http.IRequestBuilder; import com.microsoft.graph.core.ClientException; import com.microsoft.graph.models.Security; import com.microsoft.graph.models.HuntingQueryResults; import com.microsoft.graph.requests.IncidentCollectionRequestBuilder; import com.microsoft.graph.requests.IncidentRequestBuilder; import com.microsoft.graph.requests.AttackSimulationRootRequestBuilder; import com.microsoft.graph.requests.AlertCollectionRequestBuilder; import com.microsoft.graph.requests.AlertRequestBuilder; import com.microsoft.graph.requests.CloudAppSecurityProfileCollectionRequestBuilder; import com.microsoft.graph.requests.CloudAppSecurityProfileRequestBuilder; import com.microsoft.graph.requests.DomainSecurityProfileCollectionRequestBuilder; import com.microsoft.graph.requests.DomainSecurityProfileRequestBuilder; import com.microsoft.graph.requests.FileSecurityProfileCollectionRequestBuilder; import com.microsoft.graph.requests.FileSecurityProfileRequestBuilder; import com.microsoft.graph.requests.HostSecurityProfileCollectionRequestBuilder; import com.microsoft.graph.requests.HostSecurityProfileRequestBuilder; import com.microsoft.graph.requests.IpSecurityProfileCollectionRequestBuilder; import com.microsoft.graph.requests.IpSecurityProfileRequestBuilder; import com.microsoft.graph.requests.ProviderTenantSettingCollectionRequestBuilder; import com.microsoft.graph.requests.ProviderTenantSettingRequestBuilder; import com.microsoft.graph.requests.SecureScoreControlProfileCollectionRequestBuilder; import com.microsoft.graph.requests.SecureScoreControlProfileRequestBuilder; import com.microsoft.graph.requests.SecureScoreCollectionRequestBuilder; import com.microsoft.graph.requests.SecureScoreRequestBuilder; import com.microsoft.graph.requests.SecurityActionCollectionRequestBuilder; import com.microsoft.graph.requests.SecurityActionRequestBuilder; import com.microsoft.graph.requests.TiIndicatorCollectionRequestBuilder; import com.microsoft.graph.requests.TiIndicatorRequestBuilder; import com.microsoft.graph.requests.UserSecurityProfileCollectionRequestBuilder; import com.microsoft.graph.requests.UserSecurityProfileRequestBuilder; import java.util.Arrays; import java.util.EnumSet; import javax.annotation.Nullable; import javax.annotation.Nonnull; import com.microsoft.graph.core.IBaseClient; import com.microsoft.graph.http.BaseRequestBuilder; import com.microsoft.graph.models.SecurityRunHuntingQueryParameterSet; // **NOTE** This file was generated by a tool and any changes will be overwritten. /** * The class for the Security Request Builder. */ public class SecurityRequestBuilder extends BaseRequestBuilder<Security> { /** * The request builder for the Security * * @param requestUrl the request URL * @param client the service client * @param requestOptions the options for this request */ public SecurityRequestBuilder(@Nonnull final String requestUrl, @Nonnull final IBaseClient<?> client, @Nullable final java.util.List<? extends com.microsoft.graph.options.Option> requestOptions) { super(requestUrl, client, requestOptions); } /** * Creates the request * * @param requestOptions the options for this request * @return the SecurityRequest instance */ @Nonnull public SecurityRequest buildRequest(@Nullable final com.microsoft.graph.options.Option... requestOptions) { return buildRequest(getOptions(requestOptions)); } /** * Creates the request with specific requestOptions instead of the existing requestOptions * * @param requestOptions the options for this request * @return the SecurityRequest instance */ @Nonnull public SecurityRequest buildRequest(@Nonnull final java.util.List<? extends com.microsoft.graph.options.Option> requestOptions) { return new com.microsoft.graph.requests.SecurityRequest(getRequestUrl(), getClient(), requestOptions); } /** * Gets a request builder for the Incident collection * * @return the collection request builder */ @Nonnull public IncidentCollectionRequestBuilder incidents() { return new IncidentCollectionRequestBuilder(getRequestUrlWithAdditionalSegment("incidents"), getClient(), null); } /** * Gets a request builder for the Incident item * * @return the request builder * @param id the item identifier */ @Nonnull public IncidentRequestBuilder incidents(@Nonnull final String id) { return new IncidentRequestBuilder(getRequestUrlWithAdditionalSegment("incidents") + "/" + id, getClient(), null); } /** * Gets the request builder for AttackSimulationRoot * * @return the AttackSimulationRootRequestBuilder instance */ @Nonnull public AttackSimulationRootRequestBuilder attackSimulation() { return new AttackSimulationRootRequestBuilder(getRequestUrlWithAdditionalSegment("attackSimulation"), getClient(), null); } /** * Gets a request builder for the Alert collection * * @return the collection request builder */ @Nonnull public AlertCollectionRequestBuilder alerts() { return new AlertCollectionRequestBuilder(getRequestUrlWithAdditionalSegment("alerts"), getClient(), null); } /** * Gets a request builder for the Alert item * * @return the request builder * @param id the item identifier */ @Nonnull public AlertRequestBuilder alerts(@Nonnull final String id) { return new AlertRequestBuilder(getRequestUrlWithAdditionalSegment("alerts") + "/" + id, getClient(), null); } /** * Gets a request builder for the CloudAppSecurityProfile collection * * @return the collection request builder */ @Nonnull public CloudAppSecurityProfileCollectionRequestBuilder cloudAppSecurityProfiles() { return new CloudAppSecurityProfileCollectionRequestBuilder(getRequestUrlWithAdditionalSegment("cloudAppSecurityProfiles"), getClient(), null); } /** * Gets a request builder for the CloudAppSecurityProfile item * * @return the request builder * @param id the item identifier */ @Nonnull public CloudAppSecurityProfileRequestBuilder cloudAppSecurityProfiles(@Nonnull final String id) { return new CloudAppSecurityProfileRequestBuilder(getRequestUrlWithAdditionalSegment("cloudAppSecurityProfiles") + "/" + id, getClient(), null); } /** * Gets a request builder for the DomainSecurityProfile collection * * @return the collection request builder */ @Nonnull public DomainSecurityProfileCollectionRequestBuilder domainSecurityProfiles() { return new DomainSecurityProfileCollectionRequestBuilder(getRequestUrlWithAdditionalSegment("domainSecurityProfiles"), getClient(), null); } /** * Gets a request builder for the DomainSecurityProfile item * * @return the request builder * @param id the item identifier */ @Nonnull public DomainSecurityProfileRequestBuilder domainSecurityProfiles(@Nonnull final String id) { return new DomainSecurityProfileRequestBuilder(getRequestUrlWithAdditionalSegment("domainSecurityProfiles") + "/" + id, getClient(), null); } /** * Gets a request builder for the FileSecurityProfile collection * * @return the collection request builder */ @Nonnull public FileSecurityProfileCollectionRequestBuilder fileSecurityProfiles() { return new FileSecurityProfileCollectionRequestBuilder(getRequestUrlWithAdditionalSegment("fileSecurityProfiles"), getClient(), null); } /** * Gets a request builder for the FileSecurityProfile item * * @return the request builder * @param id the item identifier */ @Nonnull public FileSecurityProfileRequestBuilder fileSecurityProfiles(@Nonnull final String id) { return new FileSecurityProfileRequestBuilder(getRequestUrlWithAdditionalSegment("fileSecurityProfiles") + "/" + id, getClient(), null); } /** * Gets a request builder for the HostSecurityProfile collection * * @return the collection request builder */ @Nonnull public HostSecurityProfileCollectionRequestBuilder hostSecurityProfiles() { return new HostSecurityProfileCollectionRequestBuilder(getRequestUrlWithAdditionalSegment("hostSecurityProfiles"), getClient(), null); } /** * Gets a request builder for the HostSecurityProfile item * * @return the request builder * @param id the item identifier */ @Nonnull public HostSecurityProfileRequestBuilder hostSecurityProfiles(@Nonnull final String id) { return new HostSecurityProfileRequestBuilder(getRequestUrlWithAdditionalSegment("hostSecurityProfiles") + "/" + id, getClient(), null); } /** * Gets a request builder for the IpSecurityProfile collection * * @return the collection request builder */ @Nonnull public IpSecurityProfileCollectionRequestBuilder ipSecurityProfiles() { return new IpSecurityProfileCollectionRequestBuilder(getRequestUrlWithAdditionalSegment("ipSecurityProfiles"), getClient(), null); } /** * Gets a request builder for the IpSecurityProfile item * * @return the request builder * @param id the item identifier */ @Nonnull public IpSecurityProfileRequestBuilder ipSecurityProfiles(@Nonnull final String id) { return new IpSecurityProfileRequestBuilder(getRequestUrlWithAdditionalSegment("ipSecurityProfiles") + "/" + id, getClient(), null); } /** * Gets a request builder for the ProviderTenantSetting collection * * @return the collection request builder */ @Nonnull public ProviderTenantSettingCollectionRequestBuilder providerTenantSettings() { return new ProviderTenantSettingCollectionRequestBuilder(getRequestUrlWithAdditionalSegment("providerTenantSettings"), getClient(), null); } /** * Gets a request builder for the ProviderTenantSetting item * * @return the request builder * @param id the item identifier */ @Nonnull public ProviderTenantSettingRequestBuilder providerTenantSettings(@Nonnull final String id) { return new ProviderTenantSettingRequestBuilder(getRequestUrlWithAdditionalSegment("providerTenantSettings") + "/" + id, getClient(), null); } /** * Gets a request builder for the SecureScoreControlProfile collection * * @return the collection request builder */ @Nonnull public SecureScoreControlProfileCollectionRequestBuilder secureScoreControlProfiles() { return new SecureScoreControlProfileCollectionRequestBuilder(getRequestUrlWithAdditionalSegment("secureScoreControlProfiles"), getClient(), null); } /** * Gets a request builder for the SecureScoreControlProfile item * * @return the request builder * @param id the item identifier */ @Nonnull public SecureScoreControlProfileRequestBuilder secureScoreControlProfiles(@Nonnull final String id) { return new SecureScoreControlProfileRequestBuilder(getRequestUrlWithAdditionalSegment("secureScoreControlProfiles") + "/" + id, getClient(), null); } /** * Gets a request builder for the SecureScore collection * * @return the collection request builder */ @Nonnull public SecureScoreCollectionRequestBuilder secureScores() { return new SecureScoreCollectionRequestBuilder(getRequestUrlWithAdditionalSegment("secureScores"), getClient(), null); } /** * Gets a request builder for the SecureScore item * * @return the request builder * @param id the item identifier */ @Nonnull public SecureScoreRequestBuilder secureScores(@Nonnull final String id) { return new SecureScoreRequestBuilder(getRequestUrlWithAdditionalSegment("secureScores") + "/" + id, getClient(), null); } /** * Gets a request builder for the SecurityAction collection * * @return the collection request builder */ @Nonnull public SecurityActionCollectionRequestBuilder securityActions() { return new SecurityActionCollectionRequestBuilder(getRequestUrlWithAdditionalSegment("securityActions"), getClient(), null); } /** * Gets a request builder for the SecurityAction item * * @return the request builder * @param id the item identifier */ @Nonnull public SecurityActionRequestBuilder securityActions(@Nonnull final String id) { return new SecurityActionRequestBuilder(getRequestUrlWithAdditionalSegment("securityActions") + "/" + id, getClient(), null); } /** * Gets a request builder for the TiIndicator collection * * @return the collection request builder */ @Nonnull public TiIndicatorCollectionRequestBuilder tiIndicators() { return new TiIndicatorCollectionRequestBuilder(getRequestUrlWithAdditionalSegment("tiIndicators"), getClient(), null); } /** * Gets a request builder for the TiIndicator item * * @return the request builder * @param id the item identifier */ @Nonnull public TiIndicatorRequestBuilder tiIndicators(@Nonnull final String id) { return new TiIndicatorRequestBuilder(getRequestUrlWithAdditionalSegment("tiIndicators") + "/" + id, getClient(), null); } /** * Gets a request builder for the UserSecurityProfile collection * * @return the collection request builder */ @Nonnull public UserSecurityProfileCollectionRequestBuilder userSecurityProfiles() { return new UserSecurityProfileCollectionRequestBuilder(getRequestUrlWithAdditionalSegment("userSecurityProfiles"), getClient(), null); } /** * Gets a request builder for the UserSecurityProfile item * * @return the request builder * @param id the item identifier */ @Nonnull public UserSecurityProfileRequestBuilder userSecurityProfiles(@Nonnull final String id) { return new UserSecurityProfileRequestBuilder(getRequestUrlWithAdditionalSegment("userSecurityProfiles") + "/" + id, getClient(), null); } /** * Gets a builder to execute the method * @return the request builder * @param parameters the parameters for the service method */ @Nonnull public SecurityRunHuntingQueryRequestBuilder runHuntingQuery(@Nonnull final SecurityRunHuntingQueryParameterSet parameters) { return new SecurityRunHuntingQueryRequestBuilder(getRequestUrlWithAdditionalSegment("microsoft.graph.runHuntingQuery"), getClient(), null, parameters); } }
package kz.edu.nu.cs.exercise; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; public class LogModel extends Model{ private String username = null; public LogModel() throws SQLException { super("cvbuilder_logs"); } public void add(String type, String content) throws Exception { List<String> fields = new ArrayList<String>(); fields.add("type"); fields.add("content"); List<String> values = new ArrayList<String>(); values.add(type); values.add(content); this.insert(fields, values); } }
// Copyright 2018-2022 Polyaxon, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /* * Polyaxon SDKs and REST API specification. * Polyaxon SDKs and REST API specification. * * The version of the OpenAPI document: 1.18.0 * Contact: contact@polyaxon.com * * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). * https://openapi-generator.tech * Do not edit the class manually. */ package org.openapitools.client.model; import com.google.gson.TypeAdapter; import com.google.gson.annotations.JsonAdapter; import com.google.gson.annotations.SerializedName; import com.google.gson.stream.JsonReader; import com.google.gson.stream.JsonWriter; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import java.io.IOException; import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; /** * Model tests for V1AverageStoppingPolicy */ public class V1AverageStoppingPolicyTest { private final V1AverageStoppingPolicy model = new V1AverageStoppingPolicy(); /** * Model tests for V1AverageStoppingPolicy */ @Test public void testV1AverageStoppingPolicy() { // TODO: test V1AverageStoppingPolicy } /** * Test the property 'kind' */ @Test public void kindTest() { // TODO: test kind } }
/* * * Copyright 2010 Leeds Metropolitan University * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * */ /* * To change this template, choose Tools | Templates * and open the template in the editor. */ package org.qyouti.qti1.element; import org.qyouti.qti1.QTIConditionUnsupported; /** * * @author jon */ public class QTIElementVarlte extends QTIConditionUnsupported { }
/** * Copyright 2016 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.appengine.remote; import java.io.IOException; import java.io.PrintWriter; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; // [START example] @SuppressWarnings("serial") @WebServlet(name = "remote", description = "Remote: yet another HelloWorld.", urlPatterns = "/remote") public class RemoteServlet extends HttpServlet { @Override public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException { PrintWriter out = resp.getWriter(); out.println("Hello, world"); } } // [END example]
/* * Copyright 2012-2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package lombok.core; import lombok.core.util.MethodDescriptor; import static lombok.core.util.MethodDescriptor.*; /** * @author Andres Almiray */ public interface MVCAwareConstants extends BaseConstants { String MVC_CLOSURE_TYPE = "griffon.core.MVCClosure"; String GRIFFON_MODEL_TYPE = "griffon.core.GriffonModel"; String GRIFFON_VIEW_TYPE = "griffon.core.GriffonView"; String GRIFFON_CONTROLLER_TYPE = "griffon.core.GriffonController"; String MVC_GROUP = "griffon.core.MVCGroup"; String MVC_HANDLER_TYPE = "griffon.core.MVCHandler"; String GRIFFON_MVC_ARTIFACT_TYPE = "griffon.core.GriffonMvcArtifact"; String METHOD_CREATE_MVC_GROUP = "createMVCGroup"; String METHOD_BUILD_MVC_GROUP = "buildMVCGroup"; String METHOD_WITH_MVC_GROUP = "withMVCGroup"; String METHOD_DESTROY_MVC_GROUP = "destroyMVCGroup"; String M = "M"; String V = "V"; String C = "C"; MethodDescriptor[] METHODS = new MethodDescriptor[]{ MethodDescriptor.method( type(VOID), METHOD_DESTROY_MVC_GROUP, args(type(JAVA_LANG_STRING)) ), MethodDescriptor.method( type(MVC_GROUP), METHOD_BUILD_MVC_GROUP, args(type(JAVA_LANG_STRING)) ), MethodDescriptor.method( type(MVC_GROUP), METHOD_BUILD_MVC_GROUP, args(type(JAVA_LANG_STRING), type(JAVA_LANG_STRING)) ), MethodDescriptor.method( type(MVC_GROUP), METHOD_BUILD_MVC_GROUP, args( type(JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT), type(JAVA_LANG_STRING)) ), MethodDescriptor.method( type(MVC_GROUP), METHOD_BUILD_MVC_GROUP, args( type(JAVA_LANG_STRING), type(JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT)) ), MethodDescriptor.method( type(MVC_GROUP), METHOD_BUILD_MVC_GROUP, args( type(JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT), type(JAVA_LANG_STRING), type(JAVA_LANG_STRING)) ), MethodDescriptor.method( type(MVC_GROUP), METHOD_BUILD_MVC_GROUP, args( type(JAVA_LANG_STRING), type(JAVA_LANG_STRING), type(JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT)) ), MethodDescriptor.method( typeWithParams(JAVA_UTIL_LIST, wildcard(GRIFFON_MVC_ARTIFACT_TYPE)), METHOD_CREATE_MVC_GROUP, args(type(JAVA_LANG_STRING)) ), MethodDescriptor.method( typeWithParams(JAVA_UTIL_LIST, wildcard(GRIFFON_MVC_ARTIFACT_TYPE)), METHOD_CREATE_MVC_GROUP, args( type(JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT), type(JAVA_LANG_STRING)) ), MethodDescriptor.method( typeWithParams(JAVA_UTIL_LIST, wildcard(GRIFFON_MVC_ARTIFACT_TYPE)), METHOD_CREATE_MVC_GROUP, args( type(JAVA_LANG_STRING), type(JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT)) ), MethodDescriptor.method( typeWithParams(JAVA_UTIL_LIST, wildcard(GRIFFON_MVC_ARTIFACT_TYPE)), METHOD_CREATE_MVC_GROUP, args( type(JAVA_LANG_STRING), type(JAVA_LANG_STRING)) ), MethodDescriptor.method( typeWithParams(JAVA_UTIL_LIST, wildcard(GRIFFON_MVC_ARTIFACT_TYPE)), METHOD_CREATE_MVC_GROUP, args( type(JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT), type(JAVA_LANG_STRING), type(JAVA_LANG_STRING)) ), MethodDescriptor.method( typeWithParams(JAVA_UTIL_LIST, wildcard(GRIFFON_MVC_ARTIFACT_TYPE)), METHOD_CREATE_MVC_GROUP, args( type(JAVA_LANG_STRING), type(JAVA_LANG_STRING), type(JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT)) ), MethodDescriptor.method( type(VOID), METHOD_WITH_MVC_GROUP, args( type(JAVA_LANG_STRING), type(GROOVY_LANG_CLOSURE)) ), MethodDescriptor.method( type(VOID), METHOD_WITH_MVC_GROUP, args( type(JAVA_LANG_STRING), type(JAVA_LANG_STRING), type(GROOVY_LANG_CLOSURE)) ), MethodDescriptor.method( type(VOID), METHOD_WITH_MVC_GROUP, args( type(JAVA_LANG_STRING), type(JAVA_LANG_STRING), type(JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT), type(GROOVY_LANG_CLOSURE)) ), MethodDescriptor.method( type(VOID), METHOD_WITH_MVC_GROUP, args( type(JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT), type(JAVA_LANG_STRING), type(JAVA_LANG_STRING), type(GROOVY_LANG_CLOSURE)) ), MethodDescriptor.method( type(VOID), METHOD_WITH_MVC_GROUP, args( type(JAVA_LANG_STRING), type(JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT), type(GROOVY_LANG_CLOSURE)) ), MethodDescriptor.method( type(VOID), METHOD_WITH_MVC_GROUP, args( type(JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT), type(JAVA_LANG_STRING), type(GROOVY_LANG_CLOSURE)) ), MethodDescriptor.method( type(VOID), typeParams( typeParam(M, GRIFFON_MODEL_TYPE), typeParam(V, GRIFFON_VIEW_TYPE), typeParam(C, GRIFFON_CONTROLLER_TYPE) ), METHOD_WITH_MVC_GROUP, args( type(JAVA_LANG_STRING), type(MVC_CLOSURE_TYPE, M, V, C)) ), MethodDescriptor.method( type(VOID), typeParams( typeParam(M, GRIFFON_MODEL_TYPE), typeParam(V, GRIFFON_VIEW_TYPE), typeParam(C, GRIFFON_CONTROLLER_TYPE) ), METHOD_WITH_MVC_GROUP, args( type(JAVA_LANG_STRING), type(JAVA_LANG_STRING), type(MVC_CLOSURE_TYPE, M, V, C)) ), MethodDescriptor.method( type(VOID), typeParams( typeParam(M, GRIFFON_MODEL_TYPE), typeParam(V, GRIFFON_VIEW_TYPE), typeParam(C, GRIFFON_CONTROLLER_TYPE) ), METHOD_WITH_MVC_GROUP, args( type(JAVA_LANG_STRING), type(JAVA_LANG_STRING), type(JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT), type(MVC_CLOSURE_TYPE, M, V, C)) ), MethodDescriptor.method( type(VOID), typeParams( typeParam(M, GRIFFON_MODEL_TYPE), typeParam(V, GRIFFON_VIEW_TYPE), typeParam(C, GRIFFON_CONTROLLER_TYPE) ), METHOD_WITH_MVC_GROUP, args( type(JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT), type(JAVA_LANG_STRING), type(JAVA_LANG_STRING), type(MVC_CLOSURE_TYPE, M, V, C)) ), MethodDescriptor.method( type(VOID), typeParams( typeParam(M, GRIFFON_MODEL_TYPE), typeParam(V, GRIFFON_VIEW_TYPE), typeParam(C, GRIFFON_CONTROLLER_TYPE) ), METHOD_WITH_MVC_GROUP, args( type(JAVA_LANG_STRING), type(JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT), type(MVC_CLOSURE_TYPE, M, V, C)) ), MethodDescriptor.method( type(VOID), typeParams( typeParam(M, GRIFFON_MODEL_TYPE), typeParam(V, GRIFFON_VIEW_TYPE), typeParam(C, GRIFFON_CONTROLLER_TYPE) ), METHOD_WITH_MVC_GROUP, args( type(JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT), type(JAVA_LANG_STRING), type(MVC_CLOSURE_TYPE, M, V, C)) ) }; }
/* * Copyright 2021 ConsenSys AG. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package tech.pegasys.teku.beaconrestapi.handlers.v1.validator; import static javax.servlet.http.HttpServletResponse.SC_BAD_REQUEST; import static javax.servlet.http.HttpServletResponse.SC_NOT_FOUND; import static tech.pegasys.teku.infrastructure.http.RestApiConstants.BEACON_BLOCK_ROOT; import static tech.pegasys.teku.infrastructure.http.RestApiConstants.RES_BAD_REQUEST; import static tech.pegasys.teku.infrastructure.http.RestApiConstants.RES_INTERNAL_ERROR; import static tech.pegasys.teku.infrastructure.http.RestApiConstants.RES_NOT_FOUND; import static tech.pegasys.teku.infrastructure.http.RestApiConstants.RES_OK; import static tech.pegasys.teku.infrastructure.http.RestApiConstants.SLOT; import static tech.pegasys.teku.infrastructure.http.RestApiConstants.SUBCOMMITTEE_INDEX; import static tech.pegasys.teku.infrastructure.http.RestApiConstants.TAG_VALIDATOR; import static tech.pegasys.teku.infrastructure.http.RestApiConstants.TAG_VALIDATOR_REQUIRED; import static tech.pegasys.teku.infrastructure.restapi.endpoints.SingleQueryParameterUtils.getParameterValueAsBytes32; import static tech.pegasys.teku.infrastructure.restapi.endpoints.SingleQueryParameterUtils.getParameterValueAsInt; import static tech.pegasys.teku.infrastructure.restapi.endpoints.SingleQueryParameterUtils.getParameterValueAsUInt64; import com.fasterxml.jackson.core.JsonProcessingException; import io.javalin.http.Context; import io.javalin.plugin.openapi.annotations.HttpMethod; import io.javalin.plugin.openapi.annotations.OpenApi; import io.javalin.plugin.openapi.annotations.OpenApiContent; import io.javalin.plugin.openapi.annotations.OpenApiParam; import io.javalin.plugin.openapi.annotations.OpenApiResponse; import java.util.List; import java.util.Map; import java.util.Optional; import org.apache.tuweni.bytes.Bytes32; import org.jetbrains.annotations.NotNull; import tech.pegasys.teku.api.DataProvider; import tech.pegasys.teku.api.ValidatorDataProvider; import tech.pegasys.teku.api.response.v1.validator.GetSyncCommitteeContributionResponse; import tech.pegasys.teku.api.schema.altair.SyncCommitteeContribution; import tech.pegasys.teku.beaconrestapi.handlers.AbstractHandler; import tech.pegasys.teku.beaconrestapi.schema.BadRequest; import tech.pegasys.teku.infrastructure.async.SafeFuture; import tech.pegasys.teku.infrastructure.unsigned.UInt64; import tech.pegasys.teku.provider.JsonProvider; import tech.pegasys.teku.spec.constants.NetworkConstants; public class GetSyncCommitteeContribution extends AbstractHandler { public static final String ROUTE = "/eth/v1/validator/sync_committee_contribution"; private final ValidatorDataProvider provider; public GetSyncCommitteeContribution( final DataProvider dataProvider, final JsonProvider jsonProvider) { super(jsonProvider); this.provider = dataProvider.getValidatorDataProvider(); } @OpenApi( path = ROUTE, method = HttpMethod.GET, summary = "Produce a sync committee contribution", tags = {TAG_VALIDATOR, TAG_VALIDATOR_REQUIRED}, queryParams = { @OpenApiParam( name = SLOT, description = "`uint64` The slot for which a sync committee contribution should be created.", required = true), @OpenApiParam( name = SUBCOMMITTEE_INDEX, description = "`uint64` The subcommittee index for which to produce the contribution.", required = true), @OpenApiParam( name = BEACON_BLOCK_ROOT, description = "`bytes32` The block root for which to produce the contribution.", required = true) }, description = "Returns a `SyncCommitteeContribution` that is the aggregate of `SyncCommitteeMessage` " + "values known to this node matching the specified slot, subcommittee index and beacon block root.", responses = { @OpenApiResponse( status = RES_OK, content = @OpenApiContent(from = GetSyncCommitteeContributionResponse.class)), @OpenApiResponse(status = RES_BAD_REQUEST, description = "Invalid request syntax."), @OpenApiResponse( status = RES_NOT_FOUND, description = "No matching sync committee messages were found"), @OpenApiResponse(status = RES_INTERNAL_ERROR, description = "Beacon node internal error.") }) @Override public void handle(@NotNull final Context ctx) throws Exception { try { final Map<String, List<String>> parameters = ctx.queryParamMap(); if (parameters.size() < 3) { throw new IllegalArgumentException( String.format( "Please specify all of %s, %s and %s", SLOT, SUBCOMMITTEE_INDEX, BEACON_BLOCK_ROOT)); } final UInt64 slot = getParameterValueAsUInt64(parameters, SLOT); final Bytes32 blockRoot = getParameterValueAsBytes32(parameters, BEACON_BLOCK_ROOT); final int subcommitteeIndex = getParameterValueAsInt(parameters, SUBCOMMITTEE_INDEX); if (subcommitteeIndex < 0 || subcommitteeIndex >= NetworkConstants.SYNC_COMMITTEE_SUBNET_COUNT) { throw new IllegalArgumentException( String.format( "Subcommittee index needs to be between 0 and %s, %s is outside of this range.", NetworkConstants.SYNC_COMMITTEE_SUBNET_COUNT - 1, subcommitteeIndex)); } if (provider.isPhase0Slot(slot)) { throw new IllegalArgumentException(String.format("Slot %s is not an Altair slot", slot)); } final SafeFuture<Optional<SyncCommitteeContribution>> future = provider.createSyncCommitteeContribution(slot, subcommitteeIndex, blockRoot); handleOptionalResult(ctx, future, this::processResult, SC_NOT_FOUND); } catch (final IllegalArgumentException e) { ctx.json(jsonProvider.objectToJSON(new BadRequest(e.getMessage()))); ctx.status(SC_BAD_REQUEST); } } private Optional<String> processResult( final Context ctx, final SyncCommitteeContribution contribution) throws JsonProcessingException { return Optional.of( jsonProvider.objectToJSON(new GetSyncCommitteeContributionResponse(contribution))); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.apex.malhar.lib.window.impl; import java.util.Map; import javax.validation.constraints.NotNull; import org.apache.apex.malhar.lib.state.spillable.Spillable; import org.apache.apex.malhar.lib.state.spillable.SpillableComplexComponent; import org.apache.apex.malhar.lib.utils.serde.GenericSerde; import org.apache.apex.malhar.lib.utils.serde.Serde; import org.apache.apex.malhar.lib.window.Window; import org.apache.apex.malhar.lib.window.WindowedStorage; import com.datatorrent.api.Context; /** * This is an implementation of WindowedPlainStorage that makes use of {@link Spillable} data structures * * @param <T> Type of the value per window * * @since 3.6.0 */ public class SpillableWindowedPlainStorage<T> implements WindowedStorage.WindowedPlainStorage<T> { @NotNull private SpillableComplexComponent scc; private long bucket; private Serde<Window> windowSerde; private Serde<T> valueSerde; protected Spillable.SpillableMap<Window, T> windowToDataMap; public SpillableWindowedPlainStorage() { } public SpillableWindowedPlainStorage(long bucket, Serde<Window> windowSerde, Serde<T> valueSerde) { this.bucket = bucket; this.windowSerde = windowSerde; this.valueSerde = valueSerde; } public void setSpillableComplexComponent(SpillableComplexComponent scc) { this.scc = scc; } public SpillableComplexComponent getSpillableComplexComponent() { return scc; } public void setBucket(long bucket) { this.bucket = bucket; } public void setWindowSerde(Serde<Window> windowSerde) { this.windowSerde = windowSerde; } public void setValueSerde(Serde<T> valueSerde) { this.valueSerde = valueSerde; } @Override public void put(Window window, T value) { windowToDataMap.put(window, value); } @Override public T get(Window window) { return windowToDataMap.get(window); } @Override public Iterable<Map.Entry<Window, T>> entries() { return windowToDataMap.entrySet(); } @Override public boolean containsWindow(Window window) { return windowToDataMap.containsKey(window); } @Override public long size() { return windowToDataMap.size(); } @Override public void remove(Window window) { windowToDataMap.remove(window); } @Override public void setup(Context.OperatorContext context) { if (bucket == 0) { // choose a bucket that is almost guaranteed to be unique bucket = (context.getValue(Context.DAGContext.APPLICATION_NAME) + "#" + context.getId()).hashCode(); } // set default serdes if (windowSerde == null) { windowSerde = new GenericSerde<>(); } if (valueSerde == null) { valueSerde = new GenericSerde<>(); } if (windowToDataMap == null) { windowToDataMap = scc.newSpillableMap(windowSerde, valueSerde, new WindowTimeExtractor()); } } @Override public void teardown() { } }
// FCMOD package net.minecraft.src; public abstract class FCBlockChunkOreStorage extends FCBlockFallingFullBlock { protected FCBlockChunkOreStorage( int iBlockID ) { super( iBlockID, Material.rock ); setHardness( 1F ); setResistance( 5F ); SetPicksEffectiveOn(); setStepSound( soundStoneFootstep ); setCreativeTab( CreativeTabs.tabBlock ); SetCanBeCookedByKiln( true ); } @Override public int GetCookTimeMultiplierInKiln( IBlockAccess blockAccess, int i, int j, int k ) { return 8; } //------------- Class Specific Methods ------------// //----------- Client Side Functionality -----------// }
package com.codeest.geeknews.ui.zhihu.activity; import android.app.ActivityOptions; import android.content.Intent; import android.support.v4.widget.SwipeRefreshLayout; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.Toolbar; import android.view.View; import com.codeest.geeknews.R; import com.codeest.geeknews.base.BaseActivity; import com.codeest.geeknews.model.bean.SectionChildListBean; import com.codeest.geeknews.presenter.SectionChildPresenter; import com.codeest.geeknews.presenter.contract.SectionChildContract; import com.codeest.geeknews.ui.zhihu.adapter.SectionChildAdapter; import com.codeest.geeknews.util.SnackbarUtil; import com.codeest.geeknews.widget.ProgressImageView; import java.util.ArrayList; import java.util.List; import butterknife.BindView; /** * Created by codeest on 16/8/28. */ public class SectionActivity extends BaseActivity<SectionChildPresenter> implements SectionChildContract.View { @BindView(R.id.rv_section_content) RecyclerView rvSectionContent; @BindView(R.id.swipe_refresh) SwipeRefreshLayout swipeRefresh; @BindView(R.id.tool_bar) Toolbar mToolBar; @BindView(R.id.iv_progress) ProgressImageView ivProgress; List<SectionChildListBean.StoriesBean> mList; SectionChildAdapter mAdapter; int id; String title; @Override protected void initInject() { getActivityComponent().inject(this); } @Override protected int getLayout() { return R.layout.activity_section; } @Override protected void initEventAndData() { Intent intent = getIntent(); id = intent.getIntExtra("id", 0); title = intent.getStringExtra("title"); setToolBar(mToolBar,title); mList = new ArrayList<>(); mAdapter = new SectionChildAdapter(mContext, mList); rvSectionContent.setLayoutManager(new LinearLayoutManager(mContext)); rvSectionContent.setAdapter(mAdapter); swipeRefresh.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() { @Override public void onRefresh() { mPresenter.getThemeChildData(id); } }); mAdapter.setOnItemClickListener(new SectionChildAdapter.OnItemClickListener() { @Override public void OnItemClick(int position, View shareView) { mPresenter.insertReadToDB(mList.get(position).getId()); mAdapter.setReadState(position, true); mAdapter.notifyItemChanged(position); Intent intent = new Intent(); intent.setClass(mContext, ZhihuDetailActivity.class); intent.putExtra("id", mList.get(position).getId()); if (shareView != null) { mContext.startActivity(intent, ActivityOptions.makeSceneTransitionAnimation(mContext, shareView, "shareView").toBundle()); } else { startActivity(intent, ActivityOptions.makeSceneTransitionAnimation(mContext).toBundle()); } } }); mPresenter.getThemeChildData(id); ivProgress.start(); } @Override public void showContent(SectionChildListBean sectionChildListBean) { if(swipeRefresh.isRefreshing()) { swipeRefresh.setRefreshing(false); } else { ivProgress.stop(); } mList.clear(); mList.addAll(sectionChildListBean.getStories()); mAdapter.notifyDataSetChanged(); } @Override public void showError(String msg) { if(swipeRefresh.isRefreshing()) { swipeRefresh.setRefreshing(false); } else { ivProgress.stop(); } SnackbarUtil.showShort(getWindow().getDecorView(),msg); } }
/** */ package edu.kit.ipd.sdq.kamp4bp.model.fieldofactivityannotations.provider; import edu.kit.ipd.sdq.kamp4bp.model.fieldofactivityannotations.BPFieldOfActivityAnnotationsPackage; import edu.kit.ipd.sdq.kamp4bp.model.fieldofactivityannotations.BPOrganizationalUnit; import java.util.Collection; import java.util.List; import org.eclipse.emf.common.notify.AdapterFactory; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.edit.provider.ComposeableAdapterFactory; import org.eclipse.emf.edit.provider.IItemPropertyDescriptor; /** * This is the item provider adapter for a {@link edu.kit.ipd.sdq.kamp4bp.model.fieldofactivityannotations.BPOrganizationalUnit} object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public class BPOrganizationalUnitItemProvider extends BPUserActionAnnotationItemProvider { /** * This constructs an instance from a factory and a notifier. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public BPOrganizationalUnitItemProvider(AdapterFactory adapterFactory) { super(adapterFactory); } /** * This returns the property descriptors for the adapted class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public List<IItemPropertyDescriptor> getPropertyDescriptors(Object object) { if (itemPropertyDescriptors == null) { super.getPropertyDescriptors(object); addAbstractUserActionPropertyDescriptor(object); } return itemPropertyDescriptors; } /** * This adds a property descriptor for the Abstract User Action feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void addAbstractUserActionPropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_BPOrganizationalUnit_abstractUserAction_feature"), getString("_UI_PropertyDescriptor_description", "_UI_BPOrganizationalUnit_abstractUserAction_feature", "_UI_BPOrganizationalUnit_type"), BPFieldOfActivityAnnotationsPackage.Literals.BP_ORGANIZATIONAL_UNIT__ABSTRACT_USER_ACTION, true, false, true, null, null, null)); } /** * This returns BPOrganizationalUnit.gif. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object getImage(Object object) { return overlayImage(object, getResourceLocator().getImage("full/obj16/BPOrganizationalUnit")); } /** * This returns the label text for the adapted class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String getText(Object object) { String label = ((BPOrganizationalUnit)object).getEntityName(); return label == null || label.length() == 0 ? getString("_UI_BPOrganizationalUnit_type") : getString("_UI_BPOrganizationalUnit_type") + " " + label; } /** * This handles model notifications by calling {@link #updateChildren} to update any cached * children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void notifyChanged(Notification notification) { updateChildren(notification); super.notifyChanged(notification); } /** * This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children * that can be created under this object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected void collectNewChildDescriptors(Collection<Object> newChildDescriptors, Object object) { super.collectNewChildDescriptors(newChildDescriptors, object); } }
package ru.tinkoff.qa.neptune.core.api.logical.lexemes; import ru.tinkoff.qa.neptune.core.api.steps.annotations.Description; import static ru.tinkoff.qa.neptune.core.api.localization.StepLocalization.translate; /** * It is used to link descriptions of operators in descriptions of XOR-expressions */ @Description("xor") public final class OnlyOne { public static final OnlyOne ONLY_ONE_LEXEME = new OnlyOne(); private OnlyOne() { super(); } public String toString() { return translate(this); } }
/* * Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazon.opendistroforelasticsearch.alerting.destination.factory; import com.amazon.opendistroforelasticsearch.alerting.destination.client.DestinationHttpClient; import com.amazon.opendistroforelasticsearch.alerting.destination.client.DestinationHttpClientPool; import com.amazon.opendistroforelasticsearch.alerting.destination.message.ChimeMessage; import com.amazon.opendistroforelasticsearch.alerting.destination.response.DestinationResponse; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.rest.RestStatus; /** * This class handles the client responsible for submitting the messages to Chime destination. */ public class ChimeDestinationFactory implements DestinationFactory<ChimeMessage, DestinationHttpClient>{ private static final Logger logger = LogManager.getLogger(ChimeDestinationFactory.class); private DestinationHttpClient destinationHttpClient; public ChimeDestinationFactory() { this.destinationHttpClient = DestinationHttpClientPool.getHttpClient(); } @Override public DestinationResponse publish(ChimeMessage message) { try { String response = getClient(message).execute(message); return new DestinationResponse.Builder().withStatusCode(RestStatus.OK.getStatus()).withResponseContent(response).build(); } catch (Exception ex) { logger.error("Exception publishing Message: " + message.toString(), ex); throw new IllegalStateException(ex); } } @Override public DestinationHttpClient getClient(ChimeMessage message) { return destinationHttpClient; } /* * This function can be used to mock the client for unit test */ public void setClient(DestinationHttpClient client) { this.destinationHttpClient = client; } }
/* * Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ /* * Changes for SnappyData data platform. * * Portions Copyright (c) 2017-2019 TIBCO Software Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ package io.snappydata.thrift.server; import java.nio.ByteBuffer; import java.security.SecureRandom; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.Properties; import com.gemstone.gemfire.internal.cache.locks.NonReentrantLock; import com.gemstone.gemfire.internal.shared.ClientSharedUtils; import com.gemstone.gemfire.internal.shared.FinalizeObject; import com.pivotal.gemfirexd.internal.iapi.jdbc.EngineConnection; import com.pivotal.gemfirexd.internal.iapi.jdbc.EngineStatement; import com.pivotal.gemfirexd.internal.jdbc.EmbedXAConnection; import com.pivotal.gemfirexd.internal.shared.common.reference.SQLState; import io.snappydata.thrift.OpenConnectionArgs; import io.snappydata.thrift.SecurityMechanism; import io.snappydata.thrift.StatementAttrs; import io.snappydata.thrift.common.ThriftExceptionUtil; import io.snappydata.thrift.snappydataConstants; /** * Holder for a connection on the server side for each open client connection. */ final class ConnectionHolder { private final EngineConnection conn; private final EmbedXAConnection xaConn; private final long connId; private final Properties props; private final ByteBuffer token; private final String clientHostName; private final String clientID; private final String clientHostId; private final String userName; private final boolean useStringForDecimal; private EngineStatement reusableStatement; private volatile StatementHolder activeStatement; private final ArrayList<StatementHolder> registeredStatements; private final NonReentrantLock sync; private final long startTime; ConnectionHolder(final EngineConnection conn, final EmbedXAConnection xaConn, final OpenConnectionArgs args, final long connId, final Properties props, final SecureRandom rnd) throws SQLException { this.conn = conn; this.xaConn = xaConn; this.connId = connId; this.props = props; // generate a unique ID for the connection; this is a secure random string // rather than the internal long connection ID to ensure security and is // checked in every client-server RPC call if the client has so requested if (args.getSecurity() == SecurityMechanism.PLAIN || args.getSecurity() == SecurityMechanism.DIFFIE_HELLMAN) { int tokenSize = snappydataConstants.DEFAULT_SESSION_TOKEN_SIZE; if (args.isSetTokenSize()) { if (args.getTokenSize() < tokenSize) { // don't accept small token sizes throw ThriftExceptionUtil.newSQLException( SQLState.NET_CONNECT_AUTH_FAILED, null, "specified connection token size " + args.getTokenSize() + " smaller than minimum allowed of " + tokenSize); } else { tokenSize = args.getTokenSize(); } } byte[] rndBytes = new byte[tokenSize]; rnd.nextBytes(rndBytes); this.token = ByteBuffer.wrap(rndBytes); } else { // no other security mechanism supported yet throw ThriftExceptionUtil.newSQLException( SQLState.NET_CONNECT_AUTH_FAILED, null, "unsupported security mechanism " + args.getSecurity()); } this.clientHostName = args.getClientHostName(); this.clientID = args.getClientID(); this.clientHostId = ClientTracker.getClientHostId(this.clientHostName, this.clientID); this.userName = args.getUserName(); this.useStringForDecimal = args.isSetUseStringForDecimal() && args.useStringForDecimal; this.reusableStatement = (EngineStatement)conn.createStatement(); this.registeredStatements = new ArrayList<>(4); this.sync = new NonReentrantLock(true); this.startTime = System.currentTimeMillis(); } static class ResultSetHolder { protected ResultSet resultSet; protected long rsCursorId; protected int rsOffset; ResultSetHolder(ResultSet rs, long cursorId, int offset) { this.resultSet = rs; this.rsCursorId = cursorId; this.rsOffset = offset; } } final class StatementHolder extends ResultSetHolder { private final Statement stmt; private final StatementAttrs statementAttrs; private final long stmtId; private final Object sql; private final long startTime; private volatile String status; private volatile int accessFrequency; private ArrayList<ResultSetHolder> moreResultSets; private StatementHolder(Statement stmt, StatementAttrs attrs, long stmtId, Object sql, long startTime, String status) { super(null, snappydataConstants.INVALID_ID, 0); this.stmt = stmt; this.statementAttrs = attrs; this.stmtId = stmtId; this.sql = sql; this.startTime = startTime; this.status = status; this.accessFrequency = 1; } final ConnectionHolder getConnectionHolder() { return ConnectionHolder.this; } final Statement getStatement() { return this.stmt; } final long getStatementId() { return this.stmtId; } final Object getSQL() { return this.sql; } final StatementAttrs getStatementAttrs() { return this.statementAttrs; } final long getStartTime() { return this.startTime; } final String getStatus() { return this.status; } final int getAccessFrequency() { return this.accessFrequency; } final void setStatus(String newStatus) { this.status = newStatus; } final void incrementAccessFrequency() { final int accessFrequency = this.accessFrequency; this.accessFrequency = accessFrequency + 1; } ResultSetHolder addResultSet(ResultSet rs, long cursorId) { final NonReentrantLock sync = ConnectionHolder.this.sync; sync.lock(); try { return addResultSetNoLock(rs, cursorId); } finally { sync.unlock(); } } private ResultSetHolder addResultSetNoLock(ResultSet rs, long cursorId) { if (this.resultSet == null) { this.resultSet = rs; this.rsCursorId = cursorId; // offset will always be zero in initial registration this.rsOffset = 0; return this; } else { if (this.moreResultSets == null) { this.moreResultSets = new ArrayList<>(4); } ResultSetHolder holder = new ResultSetHolder(rs, cursorId, 0); this.moreResultSets.add(holder); return holder; } } ResultSetHolder findResultSet(long cursorId) { final ArrayList<ResultSetHolder> moreResults; final NonReentrantLock sync = ConnectionHolder.this.sync; sync.lock(); try { if (this.rsCursorId == cursorId) { return this; } else if ((moreResults = this.moreResultSets) != null) { for (ResultSetHolder holder : moreResults) { if (holder.rsCursorId == cursorId) { return holder; } } } } finally { sync.unlock(); } return null; } ResultSet removeResultSet(long cursorId) { final ArrayList<ResultSetHolder> moreResults; final NonReentrantLock sync = ConnectionHolder.this.sync; sync.lock(); try { if (this.rsCursorId == cursorId) { final ResultSet rs = this.resultSet; // move from list if present if ((moreResults = this.moreResultSets) != null) { ResultSetHolder holder = moreResults.remove(moreResults.size() - 1); this.resultSet = holder.resultSet; this.rsCursorId = holder.rsCursorId; this.rsOffset = holder.rsOffset; } else { this.resultSet = null; this.rsCursorId = snappydataConstants.INVALID_ID; this.rsOffset = 0; } return rs; } else if ((moreResults = this.moreResultSets) != null) { Iterator<ResultSetHolder> itr = moreResults.iterator(); while (itr.hasNext()) { final ResultSetHolder holder = itr.next(); if (holder.rsCursorId == cursorId) { itr.remove(); if (moreResults.isEmpty()) { this.moreResultSets = null; } return holder.resultSet; } } } } finally { sync.unlock(); } return null; } void closeResultSet(long cursorId, final SnappyDataServiceImpl service) { final ResultSet rs = removeResultSet(cursorId); if (rs != null) { service.resultSetMap.removePrimitive(cursorId); try { rs.close(); } catch (Exception e) { // deliberately ignored } } } void closeAllResultSets(final SnappyDataServiceImpl service) { final ArrayList<ResultSetHolder> moreResults; final ResultSet rs = this.resultSet; if (rs != null) { try { rs.close(); } catch (SQLException sqle) { // ignore exception at this point service.logger.error("unexpected exception in ResultSet.close()", sqle); } finally { service.resultSetMap.removePrimitive(this.rsCursorId); this.resultSet = null; this.rsCursorId = snappydataConstants.INVALID_ID; this.rsOffset = 0; } if ((moreResults = this.moreResultSets) != null) { for (ResultSetHolder holder : moreResults) { try { holder.resultSet.close(); } catch (SQLException sqle) { // ignore exception at this point service.logger.error("unexpected exception in ResultSet.close()", sqle); } finally { service.resultSetMap.removePrimitive(holder.rsCursorId); } } this.moreResultSets = null; } } } } EngineStatement createNewStatement(StatementAttrs attrs) throws SQLException { // Get the result type int resultSetType = SnappyDataServiceImpl.getResultType(attrs); // Get the resultSetConcurrency int resultSetConcurrency = SnappyDataServiceImpl.getResultSetConcurrency(attrs); // Get the resultSetHoldability int resultSetHoldability = SnappyDataServiceImpl.getResultSetHoldability(attrs); this.sync.lock(); try { final EngineStatement stmt = this.reusableStatement; if (stmt != null) { stmt.reset(resultSetType, resultSetConcurrency, resultSetHoldability); this.reusableStatement = null; return stmt; } } finally { this.sync.unlock(); } return (EngineStatement)this.conn.createStatement(resultSetType, resultSetConcurrency, resultSetHoldability); } final EngineConnection getConnection() { return this.conn; } final EmbedXAConnection getXAConnection() { return this.xaConn; } final long getConnectionId() { return this.connId; } final Properties getProperties() { return this.props; } final ByteBuffer getToken() { return this.token; } /** * Get given session token as a hex string. */ static String getTokenAsString(ByteBuffer token) { if (token != null) { return ClientSharedUtils.toHexString(token); } else { return "NULL"; } } final String getClientHostName() { return this.clientHostName; } final String getClientID() { return this.clientID; } final String getClientHostId() { return this.clientHostId; } final String getUserName() { return this.userName; } final boolean useStringForDecimal() { return this.useStringForDecimal; } final long getStartTime() { return this.startTime; } void setStatementForReuse(EngineStatement stmt) throws SQLException { this.sync.lock(); try { setStatementForReuseNoLock(stmt); } finally { this.sync.unlock(); } } private void setStatementForReuseNoLock(final EngineStatement stmt) throws SQLException { if (this.reusableStatement == null) { stmt.resetForReuse(); this.reusableStatement = stmt; } else { stmt.close(); } } StatementHolder getActiveStatement() { return this.activeStatement; } void setActiveStatement(StatementHolder stmtHolder) { this.sync.lock(); this.activeStatement = stmtHolder; this.sync.unlock(); } void clearActiveStatement(Statement stmt) { if (stmt != null) { this.sync.lock(); final StatementHolder activeStatement = this.activeStatement; if (activeStatement != null && stmt == activeStatement.stmt) { this.activeStatement = null; } this.sync.unlock(); } } StatementHolder newStatementHolder(Statement stmt, StatementAttrs attrs, long stmtId, Object sql, boolean recordStart, String status) { final long startTime = recordStart ? System.nanoTime() : 0L; return new StatementHolder(stmt, attrs, stmtId, sql, startTime, status); } StatementHolder registerPreparedStatement(PreparedStatement pstmt, StatementAttrs attrs, long stmtId, String sql, boolean recordStart) { StatementHolder stmtHolder; this.sync.lock(); try { stmtHolder = newStatementHolder(pstmt, attrs, stmtId, sql, recordStart, "PREPARED"); this.registeredStatements.add(stmtHolder); this.activeStatement = stmtHolder; } finally { this.sync.unlock(); } return stmtHolder; } Statement uniqueActiveStatement(boolean skipPrepared) throws SQLException { Statement result = null; this.sync.lock(); try { StatementHolder activeStatement = this.activeStatement; if (activeStatement != null) { result = activeStatement.getStatement(); if (skipPrepared && result instanceof PreparedStatement) { result = null; } } for (StatementHolder holder : this.registeredStatements) { Statement stmt = holder.getStatement(); if (stmt != result && !(skipPrepared && stmt instanceof PreparedStatement)) { // if duplicate then throw exception if (result != null) { throw ThriftExceptionUtil.newSQLException( SQLState.CANCEL_NO_UNIQUE_STATEMENT, null); } else { result = stmt; } } } return result; } finally { this.sync.unlock(); } } ResultSetHolder registerResultSet(final StatementHolder stmtHolder, ResultSet rs, long cursorId) { this.sync.lock(); try { ResultSetHolder holder = stmtHolder.addResultSetNoLock(rs, cursorId); this.registeredStatements.add(stmtHolder); return holder; } finally { this.sync.unlock(); } } StatementHolder registerResultSet(Statement stmt, StatementAttrs attrs, long stmtId, ResultSet rs, long cursorId, String sql, boolean recordStart) { final StatementHolder stmtHolder = newStatementHolder(stmt, attrs, stmtId, sql, recordStart, "INIT"); registerResultSet(stmtHolder, rs, cursorId); return stmtHolder; } void closeStatement(final StatementHolder stmtHolder, final SnappyDataServiceImpl service) { final long stmtId = stmtHolder.getStatementId(); this.sync.lock(); try { final Statement stmt; final EngineStatement estmt; removeActiveStatementNoLock(stmtHolder); stmtHolder.closeAllResultSets(service); stmt = stmtHolder.getStatement(); // set statement for reuse now that it is being closed on client if (stmt instanceof EngineStatement && !(estmt = (EngineStatement)stmt).isPrepared()) { setStatementForReuseNoLock(estmt); } else if (stmt != null) { stmt.close(); } } catch (Exception e) { // deliberately ignored } finally { this.sync.unlock(); service.statementMap.removePrimitive(stmtId); } } private void removeActiveStatementNoLock( final StatementHolder stmtHolder) { final ArrayList<StatementHolder> statements = this.registeredStatements; int size = statements.size(); // usually we will find the statement faster from the back while (--size >= 0) { if (statements.get(size) == stmtHolder) { statements.remove(size); break; } } } void close(final SnappyDataServiceImpl service, boolean forceClose) { this.sync.lock(); try { for (StatementHolder stmtHolder : this.registeredStatements) { stmtHolder.closeAllResultSets(service); Statement stmt = stmtHolder.getStatement(); if (stmt != null) { try { if (forceClose) { if (stmt instanceof EngineStatement) { // connection is going to be force closed so no need for // any statement cleanup ((EngineStatement)stmt).clearFinalizer(); } } else { stmt.close(); } } catch (SQLException sqle) { // ignore exception at this point service.logger.error("unexpected exception in Statement.close()", sqle); } finally { service.statementMap.removePrimitive(stmtHolder.getStatementId()); } } } final EngineStatement reusableStatement = this.reusableStatement; if (reusableStatement != null) { try { if (forceClose) { reusableStatement.clearFinalizer(); } else { reusableStatement.close(); } } catch (SQLException sqle) { // ignore exception at this point service.logger.error("unexpected exception in Statement.close()", sqle); } } if (forceClose) { // enqueue distribution of close final FinalizeObject finalizer = this.conn.getAndClearFinalizer(); this.conn.forceClose(); if (finalizer != null) { finalizer.clear(); finalizer.getHolder().addToPendingQueue(finalizer); } } else { if (this.xaConn != null) { try { this.xaConn.close(); } catch (SQLException sqle) { // ignore exception } } try { if (!this.conn.isClosed()) { this.conn.close(); } } catch (SQLException sqle) { // force close at this point this.conn.forceClose(); } } } finally { this.sync.unlock(); } } final boolean sameToken(ByteBuffer otherId) { final ByteBuffer connToken = this.token; if (connToken == otherId) { return true; } // this.connId always wraps full array assert ClientSharedUtils.wrapsFullArray(connToken); if (otherId != null) { if (ClientSharedUtils.wrapsFullArray(otherId)) { return Arrays.equals(otherId.array(), connToken.array()); } else { // don't create intermediate byte[] return ClientSharedUtils.equalBuffers(connToken.array(), otherId); } } else { return false; } } @Override public final int hashCode() { return (int)(connId ^ (connId >>> 32)); } }
/* * Influx API Service * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) * * OpenAPI spec version: 0.1.0 * * * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). * https://openapi-generator.tech * Do not edit the class manually. */ package com.influxdb.client.domain; import java.util.Objects; import java.util.Arrays; import com.influxdb.client.domain.NotificationEndpointDiscrimator; /** * NotificationEndpoint */ public class NotificationEndpoint extends NotificationEndpointBase { @Override public boolean equals(java.lang.Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } return super.equals(o); } @Override public int hashCode() { return Objects.hash(super.hashCode()); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class NotificationEndpoint {\n"); sb.append(" ").append(toIndentedString(super.toString())).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2017 Serge Rider (serge@jkiss.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ext.oracle.model; import org.jkiss.code.NotNull; import org.jkiss.dbeaver.model.impl.DBObjectNameCaseTransformer; import org.jkiss.dbeaver.model.impl.jdbc.JDBCUtils; import org.jkiss.dbeaver.model.meta.Property; import org.jkiss.dbeaver.model.struct.rdb.DBSSequence; import java.math.BigDecimal; import java.sql.ResultSet; /** * Oracle sequence */ public class OracleSequence extends OracleSchemaObject implements DBSSequence { private BigDecimal minValue; private BigDecimal maxValue; private long incrementBy; private long cacheSize; private BigDecimal lastValue; private boolean flagCycle; private boolean flagOrder; public OracleSequence(OracleSchema schema, String name) { super(schema, name, false); this.minValue = null; this.maxValue = null; this.incrementBy = 0; this.cacheSize = 0; this.lastValue = new BigDecimal(0); this.flagCycle = false; this.flagOrder = false; } public OracleSequence(OracleSchema schema, ResultSet dbResult) { super(schema, JDBCUtils.safeGetString(dbResult, "SEQUENCE_NAME"), true); this.minValue = JDBCUtils.safeGetBigDecimal(dbResult, "MIN_VALUE"); this.maxValue = JDBCUtils.safeGetBigDecimal(dbResult, "MAX_VALUE"); this.incrementBy = JDBCUtils.safeGetLong(dbResult, "INCREMENT_BY"); this.cacheSize = JDBCUtils.safeGetLong(dbResult, "CACHE_SIZE"); this.lastValue = JDBCUtils.safeGetBigDecimal(dbResult, "LAST_NUMBER"); this.flagCycle = JDBCUtils.safeGetBoolean(dbResult, "CYCLE_FLAG", "Y"); this.flagOrder = JDBCUtils.safeGetBoolean(dbResult, "ORDER_FLAG", "Y"); } @NotNull @Override @Property(viewable = true, editable = true, valueTransformer = DBObjectNameCaseTransformer.class, order = 1) public String getName() { return super.getName(); } @Property(viewable = true, editable = true, updatable = true, order = 2) public BigDecimal getLastValue() { return lastValue; } public void setLastValue(BigDecimal lastValue) { this.lastValue = lastValue; } @Property(viewable = true, editable = true, updatable = true, order = 3) public BigDecimal getMinValue() { return minValue; } public void setMinValue(BigDecimal minValue) { this.minValue = minValue; } @Property(viewable = true, editable = true, updatable = true, order = 4) public BigDecimal getMaxValue() { return maxValue; } public void setMaxValue(BigDecimal maxValue) { this.maxValue = maxValue; } @Property(viewable = true, editable = true, updatable = true, order = 5) public Long getIncrementBy() { return incrementBy; } public void setIncrementBy(Long incrementBy) { this.incrementBy = incrementBy; } @Property(viewable = true, editable = true, updatable = true, order = 6) public long getCacheSize() { return cacheSize; } public void setCacheSize(long cacheSize) { this.cacheSize = cacheSize; } @Property(viewable = true, editable = true, updatable = true, order = 7) public boolean isCycle() { return flagCycle; } public void setCycle(boolean flagCycle) { this.flagCycle = flagCycle; } @Property(viewable = true, editable = true, updatable = true, order = 8) public boolean isOrder() { return flagOrder; } public void setOrder(boolean flagOrder) { this.flagOrder = flagOrder; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.eigenbase.sql.fun; import java.util.List; import org.eigenbase.reltype.*; import org.eigenbase.sql.*; import org.eigenbase.sql.type.*; import com.google.common.collect.ImmutableList; /** * <code>Sum</code> is an aggregator which returns the sum of the values which * go into it. It has precisely one argument of numeric type (<code>int</code>, * <code>long</code>, <code>float</code>, <code>double</code>), and the result * is the same type. */ public class SqlSumAggFunction extends SqlAggFunction { //~ Instance fields -------------------------------------------------------- private final RelDataType type; //~ Constructors ----------------------------------------------------------- public SqlSumAggFunction(RelDataType type) { super( "SUM", SqlKind.OTHER_FUNCTION, ReturnTypes.ARG0_NULLABLE_IF_EMPTY, null, OperandTypes.NUMERIC, SqlFunctionCategory.NUMERIC); this.type = type; } //~ Methods ---------------------------------------------------------------- public List<RelDataType> getParameterTypes(RelDataTypeFactory typeFactory) { return ImmutableList.of(type); } public RelDataType getType() { return type; } public RelDataType getReturnType(RelDataTypeFactory typeFactory) { return type; } } // End SqlSumAggFunction.java
/* * Copyright 2014 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2014.12.18 at 03:02:22 PM EST // package org.oasis_open.docs.s_ramp.ns.s_ramp_v1; import java.io.Serializable; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for faultTarget complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="faultTarget"> * &lt;simpleContent> * &lt;extension base="&lt;http://docs.oasis-open.org/s-ramp/ns/s-ramp-v1.0>target"> * &lt;attribute name="artifactType" use="required" type="{http://docs.oasis-open.org/s-ramp/ns/s-ramp-v1.0}faultEnum" /> * &lt;anyAttribute/> * &lt;/extension> * &lt;/simpleContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "faultTarget") public class FaultTarget extends Target implements Serializable { private static final long serialVersionUID = 5765266685199324066L; @XmlAttribute(name = "artifactType", required = true) protected FaultEnum artifactType; /** * Gets the value of the artifactType property. * * @return * possible object is * {@link FaultEnum } * */ public FaultEnum getArtifactType() { return artifactType; } /** * Sets the value of the artifactType property. * * @param value * allowed object is * {@link FaultEnum } * */ public void setArtifactType(FaultEnum value) { this.artifactType = value; } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.replacestring; import static org.junit.Assert.*; import static org.mockito.Mockito.*; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.repository.Repository; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.steps.loadsave.LoadSaveTester; import org.pentaho.di.trans.steps.loadsave.validator.ArrayLoadSaveValidator; import org.pentaho.di.trans.steps.loadsave.validator.BooleanLoadSaveValidator; import org.pentaho.di.trans.steps.loadsave.validator.FieldLoadSaveValidator; import org.pentaho.di.trans.steps.loadsave.validator.IntLoadSaveValidator; import org.pentaho.di.trans.steps.loadsave.validator.PrimitiveBooleanArrayLoadSaveValidator; import org.pentaho.di.trans.steps.loadsave.validator.PrimitiveIntegerArrayLoadSaveValidator; import org.pentaho.di.trans.steps.loadsave.validator.StringLoadSaveValidator; import org.pentaho.metastore.api.IMetaStore; public class ReplaceStringMetaTest { private static final String FIELD_NAME = "test"; private static final String ENCODING_NAME = "UTF-8"; @Test public void testGetFields() throws KettleStepException { ReplaceStringMeta meta = new ReplaceStringMeta(); meta.setFieldInStream( new String[] { FIELD_NAME } ); meta.setFieldOutStream( new String[] { FIELD_NAME } ); ValueMetaInterface inputFieldMeta = mock( ValueMetaInterface.class ); when( inputFieldMeta.getStringEncoding() ).thenReturn( ENCODING_NAME ); RowMetaInterface inputRowMeta = mock( RowMetaInterface.class ); when( inputRowMeta.searchValueMeta( anyString() ) ).thenReturn( inputFieldMeta ); StepMeta nextStep = mock( StepMeta.class ); VariableSpace space = mock( VariableSpace.class ); Repository repository = mock( Repository.class ); IMetaStore metaStore = mock( IMetaStore.class ); meta.getFields( inputRowMeta, "test", null, nextStep, space, repository, metaStore ); ArgumentCaptor<ValueMetaInterface> argument = ArgumentCaptor.forClass( ValueMetaInterface.class ); verify( inputRowMeta ).addValueMeta( argument.capture() ); assertEquals( ENCODING_NAME, argument.getValue().getStringEncoding() ); } @Test public void testRoundTrips() throws KettleException { List<String> attributes = Arrays.asList( "in_stream_name", "out_stream_name", "use_regex", "replace_string", "replace_by_string", "set_empty_string", "replace_field_by_string", "whole_word", "case_sensitive" ); Map<String, String> getterMap = new HashMap<String, String>(); getterMap.put( "in_stream_name", "getFieldInStream" ); getterMap.put( "out_stream_name", "getFieldOutStream" ); getterMap.put( "use_regex", "getUseRegEx" ); getterMap.put( "replace_string", "getReplaceString" ); getterMap.put( "replace_by_string", "getReplaceByString" ); getterMap.put( "set_empty_string", "isSetEmptyString" ); getterMap.put( "replace_field_by_string", "getFieldReplaceByString" ); getterMap.put( "whole_word", "getWholeWord" ); getterMap.put( "case_sensitive", "getCaseSensitive" ); Map<String, String> setterMap = new HashMap<String, String>(); setterMap.put( "in_stream_name", "setFieldInStream" ); setterMap.put( "out_stream_name", "setFieldOutStream" ); setterMap.put( "use_regex", "setUseRegEx" ); setterMap.put( "replace_string", "setReplaceString" ); setterMap.put( "replace_by_string", "setReplaceByString" ); setterMap.put( "set_empty_string", "setEmptyString" ); setterMap.put( "replace_field_by_string", "setFieldReplaceByString" ); setterMap.put( "whole_word", "setWholeWord" ); setterMap.put( "case_sensitive", "setCaseSensitive" ); Map<String, FieldLoadSaveValidator<?>> fieldLoadSaveValidatorAttributeMap = new HashMap<String, FieldLoadSaveValidator<?>>(); FieldLoadSaveValidator<String[]> stringArrayLoadSaveValidator = new ArrayLoadSaveValidator<String>( new StringLoadSaveValidator(), 25 ); FieldLoadSaveValidator<boolean[]> booleanArrayLoadSaveValidator = new PrimitiveBooleanArrayLoadSaveValidator( new BooleanLoadSaveValidator(), 25 ); FieldLoadSaveValidator<int[]> useRegExArrayLoadSaveValidator = new PrimitiveIntegerArrayLoadSaveValidator( new IntLoadSaveValidator( ReplaceStringMeta.useRegExCode.length ), 25 ); FieldLoadSaveValidator<int[]> wholeWordArrayLoadSaveValidator = new PrimitiveIntegerArrayLoadSaveValidator( new IntLoadSaveValidator( ReplaceStringMeta.wholeWordCode.length ), 25 ); FieldLoadSaveValidator<int[]> caseSensitiveArrayLoadSaveValidator = new PrimitiveIntegerArrayLoadSaveValidator( new IntLoadSaveValidator( ReplaceStringMeta.caseSensitiveCode.length ), 25 ); fieldLoadSaveValidatorAttributeMap.put( "in_stream_name", stringArrayLoadSaveValidator ); fieldLoadSaveValidatorAttributeMap.put( "out_stream_name", stringArrayLoadSaveValidator ); fieldLoadSaveValidatorAttributeMap.put( "use_regex", useRegExArrayLoadSaveValidator ); fieldLoadSaveValidatorAttributeMap.put( "replace_string", stringArrayLoadSaveValidator ); fieldLoadSaveValidatorAttributeMap.put( "replace_by_string", stringArrayLoadSaveValidator ); fieldLoadSaveValidatorAttributeMap.put( "set_empty_string", booleanArrayLoadSaveValidator ); fieldLoadSaveValidatorAttributeMap.put( "replace_field_by_string", stringArrayLoadSaveValidator ); fieldLoadSaveValidatorAttributeMap.put( "whole_word", wholeWordArrayLoadSaveValidator ); fieldLoadSaveValidatorAttributeMap.put( "case_sensitive", caseSensitiveArrayLoadSaveValidator ); LoadSaveTester loadSaveTester = new LoadSaveTester( ReplaceStringMeta.class, attributes, getterMap, setterMap, fieldLoadSaveValidatorAttributeMap, new HashMap<String, FieldLoadSaveValidator<?>>() ); loadSaveTester.testRepoRoundTrip(); loadSaveTester.testXmlRoundTrip(); } }
/* * PROPRIETARY and CONFIDENTIAL * * Copyright 2012 Magellan Distribution Corporation * * All rights reserved. */ package com.ajah.syndicate.fetch; import java.io.IOException; import lombok.extern.java.Log; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.util.EntityUtils; import org.jsoup.Jsoup; import org.jsoup.nodes.Document; import org.jsoup.nodes.Element; import org.jsoup.select.Elements; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import com.ajah.spring.jdbc.err.DataOperationException; import com.ajah.syndicate.FeedSource; import com.ajah.syndicate.data.FeedEntryManager; import com.ajah.syndicate.data.FeedManager; import com.ajah.syndicate.data.FeedSourceManager; /** * Simple utility for finding {@link FeedSource}s. * * @author <a href="http://efsavage.com">Eric F. Savage</a>, <a * href="mailto:code@efsavage.com">code@efsavage.com</a>. */ @Log @Component public class FeedDiscovery { @Autowired FeedSourceManager feedSourceManager; @Autowired FeedManager feedManager; @Autowired FeedEntryManager entryManager; /** * Pulls a page and attempts to discover a feed for it via * link[rel='alternate']. * * @param url * The URL of the page to try and discover the feed for. * @return The feedsource if matched or created, may be null. * @throws ClientProtocolException * If the page could not be pulled. * @throws IOException * If the page could not be pulled. * @throws DataOperationException * If a query could not be executed. */ public FeedSource discover(final String url) throws ClientProtocolException, IOException, DataOperationException { log.fine("Discovering feed for " + url); try (final CloseableHttpClient client = HttpClientBuilder.create().build()) { final HttpGet get = new HttpGet(url); try (final CloseableHttpResponse response = client.execute(get)) { final String html = EntityUtils.toString(response.getEntity()); final Document doc = Jsoup.parse(html); final Elements alternateLinks = doc.select("link"); for (final Element alternateLink : alternateLinks) { if ("alternate".equals(alternateLink.attr("rel"))) { if ("application/rss+xml".equals(alternateLink.attr("type"))) { log.fine("Found rss link " + alternateLink.attr("href")); final String rss = alternateLink.attr("href"); return this.feedSourceManager.findOrCreateByFeedUrl(rss); } log.fine("Found alternate link " + alternateLink.html()); } else { log.fine("Found link " + alternateLink.html()); } } } } return null; } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.cloud.agent.manager; import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.nio.channels.SocketChannel; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Timer; import java.util.concurrent.Executors; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import javax.inject.Inject; import javax.naming.ConfigurationException; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLEngine; import org.apache.cloudstack.framework.config.ConfigDepot; import org.apache.cloudstack.framework.config.ConfigKey; import org.apache.cloudstack.framework.config.dao.ConfigurationDao; import org.apache.cloudstack.managed.context.ManagedContextRunnable; import org.apache.cloudstack.managed.context.ManagedContextTimerTask; import org.apache.cloudstack.utils.identity.ManagementServerNode; import org.apache.cloudstack.utils.security.SSLUtils; import org.apache.log4j.Logger; import com.cloud.agent.api.Answer; import com.cloud.agent.api.CancelCommand; import com.cloud.agent.api.ChangeAgentAnswer; import com.cloud.agent.api.ChangeAgentCommand; import com.cloud.agent.api.Command; import com.cloud.agent.api.PropagateResourceEventCommand; import com.cloud.agent.api.ScheduleHostScanTaskCommand; import com.cloud.agent.api.TransferAgentCommand; import com.cloud.agent.transport.Request; import com.cloud.agent.transport.Request.Version; import com.cloud.agent.transport.Response; import com.cloud.cluster.ClusterManager; import com.cloud.cluster.ClusterManagerListener; import com.cloud.cluster.ClusterServicePdu; import com.cloud.cluster.ClusteredAgentRebalanceService; import com.cloud.cluster.ManagementServerHost; import com.cloud.cluster.ManagementServerHostVO; import com.cloud.cluster.agentlb.AgentLoadBalancerPlanner; import com.cloud.cluster.agentlb.HostTransferMapVO; import com.cloud.cluster.agentlb.HostTransferMapVO.HostTransferState; import com.cloud.cluster.agentlb.dao.HostTransferMapDao; import com.cloud.cluster.dao.ManagementServerHostDao; import com.cloud.exception.AgentUnavailableException; import com.cloud.exception.OperationTimedoutException; import com.cloud.exception.UnsupportedVersionException; import com.cloud.host.Host; import com.cloud.host.HostVO; import com.cloud.host.Status; import com.cloud.host.Status.Event; import com.cloud.resource.ServerResource; import com.cloud.serializer.GsonHelper; import com.cloud.utils.DateUtil; import com.cloud.utils.concurrency.NamedThreadFactory; import com.cloud.utils.db.QueryBuilder; import com.cloud.utils.db.SearchCriteria.Op; import com.cloud.utils.db.TransactionLegacy; import com.cloud.utils.exception.CloudRuntimeException; import com.cloud.utils.exception.TaskExecutionException; import com.cloud.utils.nio.Link; import com.cloud.utils.nio.Task; import com.google.gson.Gson; public class ClusteredAgentManagerImpl extends AgentManagerImpl implements ClusterManagerListener, ClusteredAgentRebalanceService { final static Logger s_logger = Logger.getLogger(ClusteredAgentManagerImpl.class); private static final ScheduledExecutorService s_transferExecutor = Executors.newScheduledThreadPool(2, new NamedThreadFactory("Cluster-AgentRebalancingExecutor")); private final long rebalanceTimeOut = 300000; // 5 mins - after this time remove the agent from the transfer list public final static long STARTUP_DELAY = 5000; public final static long SCAN_INTERVAL = 90000; // 90 seconds, it takes 60 sec for xenserver to fail login public final static int ACQUIRE_GLOBAL_LOCK_TIMEOUT_FOR_COOPERATION = 5; // 5 seconds protected Set<Long> _agentToTransferIds = new HashSet<Long>(); Gson _gson; protected HashMap<String, SocketChannel> _peers; protected HashMap<String, SSLEngine> _sslEngines; private final Timer _timer = new Timer("ClusteredAgentManager Timer"); boolean _agentLbHappened = false; @Inject protected ClusterManager _clusterMgr = null; @Inject protected ManagementServerHostDao _mshostDao; @Inject protected HostTransferMapDao _hostTransferDao; @Inject protected List<AgentLoadBalancerPlanner> _lbPlanners; @Inject ConfigurationDao _configDao; @Inject ConfigDepot _configDepot; protected ClusteredAgentManagerImpl() { super(); } protected final ConfigKey<Boolean> EnableLB = new ConfigKey<Boolean>(Boolean.class, "agent.lb.enabled", "Advanced", "false", "Enable agent load balancing between management server nodes", true); protected final ConfigKey<Double> ConnectedAgentThreshold = new ConfigKey<Double>(Double.class, "agent.load.threshold", "Advanced", "0.7", "What percentage of the agents can be held by one management server before load balancing happens", true); protected final ConfigKey<Integer> LoadSize = new ConfigKey<Integer>(Integer.class, "direct.agent.load.size", "Advanced", "16", "How many agents to connect to in each round", true); protected final ConfigKey<Integer> ScanInterval = new ConfigKey<Integer>(Integer.class, "direct.agent.scan.interval", "Advanced", "90", "Interval between scans to load agents", false, ConfigKey.Scope.Global, 1000); @Override public boolean configure(final String name, final Map<String, Object> xmlParams) throws ConfigurationException { _peers = new HashMap<String, SocketChannel>(7); _sslEngines = new HashMap<String, SSLEngine>(7); _nodeId = ManagementServerNode.getManagementServerId(); s_logger.info("Configuring ClusterAgentManagerImpl. management server node id(msid): " + _nodeId); ClusteredAgentAttache.initialize(this); _clusterMgr.registerListener(this); _clusterMgr.registerDispatcher(new ClusterDispatcher()); _gson = GsonHelper.getGson(); return super.configure(name, xmlParams); } @Override public boolean start() { if (!super.start()) { return false; } _timer.schedule(new DirectAgentScanTimerTask(), STARTUP_DELAY, ScanInterval.value()); if (s_logger.isDebugEnabled()) { s_logger.debug("Scheduled direct agent scan task to run at an interval of " + ScanInterval.value() + " seconds"); } // Schedule tasks for agent rebalancing if (isAgentRebalanceEnabled()) { s_transferExecutor.scheduleAtFixedRate(getAgentRebalanceScanTask(), 60000, 60000, TimeUnit.MILLISECONDS); s_transferExecutor.scheduleAtFixedRate(getTransferScanTask(), 60000, ClusteredAgentRebalanceService.DEFAULT_TRANSFER_CHECK_INTERVAL, TimeUnit.MILLISECONDS); } return true; } public void scheduleHostScanTask() { _timer.schedule(new DirectAgentScanTimerTask(), 0); if (s_logger.isDebugEnabled()) { s_logger.debug("Scheduled a direct agent scan task"); } } private void runDirectAgentScanTimerTask() { scanDirectAgentToLoad(); } private void scanDirectAgentToLoad() { if (s_logger.isTraceEnabled()) { s_logger.trace("Begin scanning directly connected hosts"); } // for agents that are self-managed, threshold to be considered as disconnected after pingtimeout final long cutSeconds = (System.currentTimeMillis() >> 10) - getTimeout(); final List<HostVO> hosts = _hostDao.findAndUpdateDirectAgentToLoad(cutSeconds, LoadSize.value().longValue(), _nodeId); final List<HostVO> appliances = _hostDao.findAndUpdateApplianceToLoad(cutSeconds, _nodeId); if (hosts != null) { hosts.addAll(appliances); if (hosts.size() > 0) { s_logger.debug("Found " + hosts.size() + " unmanaged direct hosts, processing connect for them..."); for (final HostVO host : hosts) { try { final AgentAttache agentattache = findAttache(host.getId()); if (agentattache != null) { // already loaded, skip if (agentattache.forForward()) { if (s_logger.isInfoEnabled()) { s_logger.info(host + " is detected down, but we have a forward attache running, disconnect this one before launching the host"); } removeAgent(agentattache, Status.Disconnected); } else { continue; } } if (s_logger.isDebugEnabled()) { s_logger.debug("Loading directly connected host " + host.getId() + "(" + host.getName() + ")"); } loadDirectlyConnectedHost(host, false); } catch (final Throwable e) { s_logger.warn(" can not load directly connected host " + host.getId() + "(" + host.getName() + ") due to ", e); } } } } if (s_logger.isTraceEnabled()) { s_logger.trace("End scanning directly connected hosts"); } } private class DirectAgentScanTimerTask extends ManagedContextTimerTask { @Override protected void runInContext() { try { runDirectAgentScanTimerTask(); } catch (final Throwable e) { s_logger.error("Unexpected exception " + e.getMessage(), e); } } } @Override public Task create(final Task.Type type, final Link link, final byte[] data) { return new ClusteredAgentHandler(type, link, data); } protected AgentAttache createAttache(final long id) { s_logger.debug("create forwarding ClusteredAgentAttache for " + id); final HostVO host = _hostDao.findById(id); final AgentAttache attache = new ClusteredAgentAttache(this, id, host.getName()); AgentAttache old = null; synchronized (_agents) { old = _agents.get(id); _agents.put(id, attache); } if (old != null) { if (s_logger.isDebugEnabled()) { s_logger.debug("Remove stale agent attache from current management server"); } removeAgent(old, Status.Removed); } return attache; } @Override protected AgentAttache createAttacheForConnect(final HostVO host, final Link link) { s_logger.debug("create ClusteredAgentAttache for " + host.getId()); final AgentAttache attache = new ClusteredAgentAttache(this, host.getId(), host.getName(), link, host.isInMaintenanceStates()); link.attach(attache); AgentAttache old = null; synchronized (_agents) { old = _agents.get(host.getId()); _agents.put(host.getId(), attache); } if (old != null) { old.disconnect(Status.Removed); } return attache; } @Override protected AgentAttache createAttacheForDirectConnect(final Host host, final ServerResource resource) { s_logger.debug("create ClusteredDirectAgentAttache for " + host.getId()); final DirectAgentAttache attache = new ClusteredDirectAgentAttache(this, host.getId(), host.getName(), _nodeId, resource, host.isInMaintenanceStates()); AgentAttache old = null; synchronized (_agents) { old = _agents.get(host.getId()); _agents.put(host.getId(), attache); } if (old != null) { old.disconnect(Status.Removed); } return attache; } @Override protected boolean handleDisconnectWithoutInvestigation(final AgentAttache attache, final Status.Event event, final boolean transitState, final boolean removeAgent) { return handleDisconnect(attache, event, false, true, removeAgent); } @Override protected boolean handleDisconnectWithInvestigation(final AgentAttache attache, final Status.Event event) { return handleDisconnect(attache, event, true, true, true); } protected boolean handleDisconnect(final AgentAttache agent, final Status.Event event, final boolean investigate, final boolean broadcast, final boolean removeAgent) { boolean res; if (!investigate) { res = super.handleDisconnectWithoutInvestigation(agent, event, true, removeAgent); } else { res = super.handleDisconnectWithInvestigation(agent, event); } if (res) { if (broadcast) { notifyNodesInCluster(agent); } return true; } else { return false; } } @Override public boolean executeUserRequest(final long hostId, final Event event) throws AgentUnavailableException { if (event == Event.AgentDisconnected) { if (s_logger.isDebugEnabled()) { s_logger.debug("Received agent disconnect event for host " + hostId); } final AgentAttache attache = findAttache(hostId); if (attache != null) { // don't process disconnect if the host is being rebalanced if (isAgentRebalanceEnabled()) { final HostTransferMapVO transferVO = _hostTransferDao.findById(hostId); if (transferVO != null) { if (transferVO.getFutureOwner() == _nodeId && transferVO.getState() == HostTransferState.TransferStarted) { s_logger.debug("Not processing " + Event.AgentDisconnected + " event for the host id=" + hostId + " as the host is being connected to " + _nodeId); return true; } } } // don't process disconnect if the disconnect came for the host via delayed cluster notification, // but the host has already reconnected to the current management server if (!attache.forForward()) { s_logger.debug("Not processing " + Event.AgentDisconnected + " event for the host id=" + hostId + " as the host is directly connected to the current management server " + _nodeId); return true; } return super.handleDisconnectWithoutInvestigation(attache, Event.AgentDisconnected, false, true); } return true; } else { return super.executeUserRequest(hostId, event); } } @Override public boolean reconnect(final long hostId) { Boolean result; try { result = propagateAgentEvent(hostId, Event.ShutdownRequested); if (result != null) { return result; } } catch (final AgentUnavailableException e) { s_logger.debug("cannot propagate agent reconnect because agent is not available", e); return false; } return super.reconnect(hostId); } public void notifyNodesInCluster(final AgentAttache attache) { s_logger.debug("Notifying other nodes of to disconnect"); final Command[] cmds = new Command[] {new ChangeAgentCommand(attache.getId(), Event.AgentDisconnected)}; _clusterMgr.broadcast(attache.getId(), _gson.toJson(cmds)); } // notifies MS peers to schedule a host scan task immediately, triggered during addHost operation public void notifyNodesInClusterToScheduleHostScanTask() { if (s_logger.isDebugEnabled()) { s_logger.debug("Notifying other MS nodes to run host scan task"); } final Command[] cmds = new Command[] {new ScheduleHostScanTaskCommand()}; _clusterMgr.broadcast(0, _gson.toJson(cmds)); } protected static void logT(final byte[] bytes, final String msg) { s_logger.trace("Seq " + Request.getAgentId(bytes) + "-" + Request.getSequence(bytes) + ": MgmtId " + Request.getManagementServerId(bytes) + ": " + (Request.isRequest(bytes) ? "Req: " : "Resp: ") + msg); } protected static void logD(final byte[] bytes, final String msg) { s_logger.debug("Seq " + Request.getAgentId(bytes) + "-" + Request.getSequence(bytes) + ": MgmtId " + Request.getManagementServerId(bytes) + ": " + (Request.isRequest(bytes) ? "Req: " : "Resp: ") + msg); } protected static void logI(final byte[] bytes, final String msg) { s_logger.info("Seq " + Request.getAgentId(bytes) + "-" + Request.getSequence(bytes) + ": MgmtId " + Request.getManagementServerId(bytes) + ": " + (Request.isRequest(bytes) ? "Req: " : "Resp: ") + msg); } public boolean routeToPeer(final String peer, final byte[] bytes) { int i = 0; SocketChannel ch = null; SSLEngine sslEngine = null; while (i++ < 5) { ch = connectToPeer(peer, ch); if (ch == null) { try { logD(bytes, "Unable to route to peer: " + Request.parse(bytes).toString()); } catch (ClassNotFoundException | UnsupportedVersionException e) { // Request.parse thrown exception when we try to log it, log as much as we can logD(bytes, "Unable to route to peer, and Request.parse further caught exception" + e.getMessage()); } return false; } sslEngine = getSSLEngine(peer); if (sslEngine == null) { logD(bytes, "Unable to get SSLEngine of peer: " + peer); return false; } try { if (s_logger.isDebugEnabled()) { logD(bytes, "Routing to peer"); } Link.write(ch, new ByteBuffer[] {ByteBuffer.wrap(bytes)}, sslEngine); return true; } catch (final IOException e) { try { logI(bytes, "Unable to route to peer: " + Request.parse(bytes).toString() + " due to " + e.getMessage()); } catch (ClassNotFoundException | UnsupportedVersionException ex) { // Request.parse thrown exception when we try to log it, log as much as we can logI(bytes, "Unable to route to peer due to" + e.getMessage() + ". Also caught exception when parsing request: " + ex.getMessage()); } } } return false; } public String findPeer(final long hostId) { return getPeerName(hostId); } public SSLEngine getSSLEngine(final String peerName) { return _sslEngines.get(peerName); } public void cancel(final String peerName, final long hostId, final long sequence, final String reason) { final CancelCommand cancel = new CancelCommand(sequence, reason); final Request req = new Request(hostId, _nodeId, cancel, true); req.setControl(true); routeToPeer(peerName, req.getBytes()); } public void closePeer(final String peerName) { synchronized (_peers) { final SocketChannel ch = _peers.get(peerName); if (ch != null) { try { ch.close(); } catch (final IOException e) { s_logger.warn("Unable to close peer socket connection to " + peerName); } } _peers.remove(peerName); _sslEngines.remove(peerName); } } public SocketChannel connectToPeer(final String peerName, final SocketChannel prevCh) { synchronized (_peers) { final SocketChannel ch = _peers.get(peerName); SSLEngine sslEngine = null; if (prevCh != null) { try { prevCh.close(); } catch (final Exception e) { s_logger.info("[ignored]" + "failed to get close resource for previous channel Socket: " + e.getLocalizedMessage()); } } if (ch == null || ch == prevCh) { final ManagementServerHost ms = _clusterMgr.getPeer(peerName); if (ms == null) { s_logger.info("Unable to find peer: " + peerName); return null; } final String ip = ms.getServiceIP(); InetAddress addr; try { addr = InetAddress.getByName(ip); } catch (final UnknownHostException e) { throw new CloudRuntimeException("Unable to resolve " + ip); } SocketChannel ch1 = null; try { ch1 = SocketChannel.open(new InetSocketAddress(addr, Port.value())); ch1.configureBlocking(true); // make sure we are working at blocking mode ch1.socket().setKeepAlive(true); ch1.socket().setSoTimeout(60 * 1000); try { final SSLContext sslContext = Link.initSSLContext(true); sslEngine = sslContext.createSSLEngine(ip, Port.value()); sslEngine.setUseClientMode(true); sslEngine.setEnabledProtocols(SSLUtils.getSupportedProtocols(sslEngine.getEnabledProtocols())); Link.doHandshake(ch1, sslEngine, true); s_logger.info("SSL: Handshake done"); } catch (final Exception e) { ch1.close(); throw new IOException("SSL: Fail to init SSL! " + e); } if (s_logger.isDebugEnabled()) { s_logger.debug("Connection to peer opened: " + peerName + ", ip: " + ip); } _peers.put(peerName, ch1); _sslEngines.put(peerName, sslEngine); return ch1; } catch (final IOException e) { try { ch1.close(); } catch (final IOException ex) { s_logger.error("failed to close failed peer socket: " + ex); } s_logger.warn("Unable to connect to peer management server: " + peerName + ", ip: " + ip + " due to " + e.getMessage(), e); return null; } } if (s_logger.isTraceEnabled()) { s_logger.trace("Found open channel for peer: " + peerName); } return ch; } } public SocketChannel connectToPeer(final long hostId, final SocketChannel prevCh) { final String peerName = getPeerName(hostId); if (peerName == null) { return null; } return connectToPeer(peerName, prevCh); } @Override protected AgentAttache getAttache(final Long hostId) throws AgentUnavailableException { assert hostId != null : "Who didn't check their id value?"; final HostVO host = _hostDao.findById(hostId); if (host == null) { throw new AgentUnavailableException("Can't find the host ", hostId); } AgentAttache agent = findAttache(hostId); if (agent == null || !agent.forForward()) { if (isHostOwnerSwitched(host)) { if (s_logger.isDebugEnabled()) { s_logger.debug("Host " + hostId + " has switched to another management server, need to update agent map with a forwarding agent attache"); } agent = createAttache(hostId); } } if (agent == null) { final AgentUnavailableException ex = new AgentUnavailableException("Host with specified id is not in the right state: " + host.getStatus(), hostId); ex.addProxyObject(_entityMgr.findById(Host.class, hostId).getUuid()); throw ex; } return agent; } @Override public boolean stop() { if (_peers != null) { for (final SocketChannel ch : _peers.values()) { try { s_logger.info("Closing: " + ch.toString()); ch.close(); } catch (final IOException e) { s_logger.info("[ignored] error on closing channel: " +ch.toString(), e); } } } _timer.cancel(); // cancel all transfer tasks s_transferExecutor.shutdownNow(); cleanupTransferMap(_nodeId); return super.stop(); } @Override public void startDirectlyConnectedHosts() { // override and let it be dummy for purpose, we will scan and load direct agents periodically. // We may also pickup agents that have been left over from other crashed management server } public class ClusteredAgentHandler extends AgentHandler { public ClusteredAgentHandler(final Task.Type type, final Link link, final byte[] data) { super(type, link, data); } @Override protected void doTask(final Task task) throws TaskExecutionException { final TransactionLegacy txn = TransactionLegacy.open(TransactionLegacy.CLOUD_DB); try { if (task.getType() != Task.Type.DATA) { super.doTask(task); return; } final byte[] data = task.getData(); final Version ver = Request.getVersion(data); if (ver.ordinal() != Version.v1.ordinal() && ver.ordinal() != Version.v3.ordinal()) { s_logger.warn("Wrong version for clustered agent request"); super.doTask(task); return; } final long hostId = Request.getAgentId(data); final Link link = task.getLink(); if (Request.fromServer(data)) { final AgentAttache agent = findAttache(hostId); if (Request.isControl(data)) { if (agent == null) { logD(data, "No attache to process cancellation"); return; } final Request req = Request.parse(data); final Command[] cmds = req.getCommands(); final CancelCommand cancel = (CancelCommand)cmds[0]; if (s_logger.isDebugEnabled()) { logD(data, "Cancel request received"); } agent.cancel(cancel.getSequence()); final Long current = agent._currentSequence; // if the request is the current request, always have to trigger sending next request in // sequence, // otherwise the agent queue will be blocked if (req.executeInSequence() && current != null && current == Request.getSequence(data)) { agent.sendNext(Request.getSequence(data)); } return; } try { if (agent == null || agent.isClosed()) { throw new AgentUnavailableException("Unable to route to agent ", hostId); } if (Request.isRequest(data) && Request.requiresSequentialExecution(data)) { // route it to the agent. // But we have the serialize the control commands here so we have // to deserialize this and send it through the agent attache. final Request req = Request.parse(data); agent.send(req, null); return; } else { if (agent instanceof Routable) { final Routable cluster = (Routable)agent; cluster.routeToAgent(data); } else { agent.send(Request.parse(data)); } return; } } catch (final AgentUnavailableException e) { logD(data, e.getMessage()); cancel(Long.toString(Request.getManagementServerId(data)), hostId, Request.getSequence(data), e.getMessage()); } } else { final long mgmtId = Request.getManagementServerId(data); if (mgmtId != -1 && mgmtId != _nodeId) { routeToPeer(Long.toString(mgmtId), data); if (Request.requiresSequentialExecution(data)) { final AgentAttache attache = (AgentAttache)link.attachment(); if (attache != null) { attache.sendNext(Request.getSequence(data)); } else if (s_logger.isDebugEnabled()) { logD(data, "No attache to process " + Request.parse(data).toString()); } } return; } else { if (Request.isRequest(data)) { super.doTask(task); } else { // received an answer. final Response response = Response.parse(data); final AgentAttache attache = findAttache(response.getAgentId()); if (attache == null) { s_logger.info("SeqA " + response.getAgentId() + "-" + response.getSequence() + "Unable to find attache to forward " + response.toString()); return; } if (!attache.processAnswers(response.getSequence(), response)) { s_logger.info("SeqA " + attache.getId() + "-" + response.getSequence() + ": Response is not processed: " + response.toString()); } } return; } } } catch (final ClassNotFoundException e) { final String message = String.format("ClassNotFoundException occured when executing taks! Error '%s'", e.getMessage()); s_logger.error(message); throw new TaskExecutionException(message, e); } catch (final UnsupportedVersionException e) { final String message = String.format("UnsupportedVersionException occured when executing taks! Error '%s'", e.getMessage()); s_logger.error(message); throw new TaskExecutionException(message, e); } finally { txn.close(); } } } @Override public void onManagementNodeJoined(final List<? extends ManagementServerHost> nodeList, final long selfNodeId) { } @Override public void onManagementNodeLeft(final List<? extends ManagementServerHost> nodeList, final long selfNodeId) { for (final ManagementServerHost vo : nodeList) { s_logger.info("Marking hosts as disconnected on Management server" + vo.getMsid()); final long lastPing = (System.currentTimeMillis() >> 10) - getTimeout(); _hostDao.markHostsAsDisconnected(vo.getMsid(), lastPing); s_logger.info("Deleting entries from op_host_transfer table for Management server " + vo.getMsid()); cleanupTransferMap(vo.getMsid()); } } @Override public void onManagementNodeIsolated() { } @Override public void removeAgent(final AgentAttache attache, final Status nextState) { if (attache == null) { return; } super.removeAgent(attache, nextState); } @Override public boolean executeRebalanceRequest(final long agentId, final long currentOwnerId, final long futureOwnerId, final Event event) throws AgentUnavailableException, OperationTimedoutException { boolean result = false; if (event == Event.RequestAgentRebalance) { return setToWaitForRebalance(agentId, currentOwnerId, futureOwnerId); } else if (event == Event.StartAgentRebalance) { try { result = rebalanceHost(agentId, currentOwnerId, futureOwnerId); } catch (final Exception e) { s_logger.warn("Unable to rebalance host id=" + agentId, e); } } return result; } @Override public void scheduleRebalanceAgents() { _timer.schedule(new AgentLoadBalancerTask(), 30000); } public class AgentLoadBalancerTask extends ManagedContextTimerTask { protected volatile boolean cancelled = false; public AgentLoadBalancerTask() { s_logger.debug("Agent load balancer task created"); } @Override public synchronized boolean cancel() { if (!cancelled) { cancelled = true; s_logger.debug("Agent load balancer task cancelled"); return super.cancel(); } return true; } @Override protected synchronized void runInContext() { try { if (!cancelled) { startRebalanceAgents(); if (s_logger.isInfoEnabled()) { s_logger.info("The agent load balancer task is now being cancelled"); } cancelled = true; } } catch (final Throwable e) { s_logger.error("Unexpected exception " + e.toString(), e); } } } public void startRebalanceAgents() { s_logger.debug("Management server " + _nodeId + " is asking other peers to rebalance their agents"); final List<ManagementServerHostVO> allMS = _mshostDao.listBy(ManagementServerHost.State.Up); final QueryBuilder<HostVO> sc = QueryBuilder.create(HostVO.class); sc.and(sc.entity().getManagementServerId(), Op.NNULL); sc.and(sc.entity().getType(), Op.EQ, Host.Type.Routing); final List<HostVO> allManagedAgents = sc.list(); int avLoad = 0; if (!allManagedAgents.isEmpty() && !allMS.isEmpty()) { avLoad = allManagedAgents.size() / allMS.size(); } else { if (s_logger.isDebugEnabled()) { s_logger.debug("There are no hosts to rebalance in the system. Current number of active management server nodes in the system is " + allMS.size() + "; number of managed agents is " + allManagedAgents.size()); } return; } if (avLoad == 0L) { if (s_logger.isDebugEnabled()) { s_logger.debug("As calculated average load is less than 1, rounding it to 1"); } avLoad = 1; } for (final ManagementServerHostVO node : allMS) { if (node.getMsid() != _nodeId) { List<HostVO> hostsToRebalance = new ArrayList<HostVO>(); for (final AgentLoadBalancerPlanner lbPlanner : _lbPlanners) { hostsToRebalance = lbPlanner.getHostsToRebalance(node.getMsid(), avLoad); if (hostsToRebalance != null && !hostsToRebalance.isEmpty()) { break; } else { s_logger.debug("Agent load balancer planner " + lbPlanner.getName() + " found no hosts to be rebalanced from management server " + node.getMsid()); } } if (hostsToRebalance != null && !hostsToRebalance.isEmpty()) { s_logger.debug("Found " + hostsToRebalance.size() + " hosts to rebalance from management server " + node.getMsid()); for (final HostVO host : hostsToRebalance) { final long hostId = host.getId(); s_logger.debug("Asking management server " + node.getMsid() + " to give away host id=" + hostId); boolean result = true; if (_hostTransferDao.findById(hostId) != null) { s_logger.warn("Somebody else is already rebalancing host id: " + hostId); continue; } HostTransferMapVO transfer = null; try { transfer = _hostTransferDao.startAgentTransfering(hostId, node.getMsid(), _nodeId); final Answer[] answer = sendRebalanceCommand(node.getMsid(), hostId, node.getMsid(), _nodeId, Event.RequestAgentRebalance); if (answer == null) { s_logger.warn("Failed to get host id=" + hostId + " from management server " + node.getMsid()); result = false; } } catch (final Exception ex) { s_logger.warn("Failed to get host id=" + hostId + " from management server " + node.getMsid(), ex); result = false; } finally { if (transfer != null) { final HostTransferMapVO transferState = _hostTransferDao.findByIdAndFutureOwnerId(transfer.getId(), _nodeId); if (!result && transferState != null && transferState.getState() == HostTransferState.TransferRequested) { if (s_logger.isDebugEnabled()) { s_logger.debug("Removing mapping from op_host_transfer as it failed to be set to transfer mode"); } // just remove the mapping (if exists) as nothing was done on the peer management // server yet _hostTransferDao.remove(transfer.getId()); } } } } } else { s_logger.debug("Found no hosts to rebalance from the management server " + node.getMsid()); } } } } private Answer[] sendRebalanceCommand(final long peer, final long agentId, final long currentOwnerId, final long futureOwnerId, final Event event) { final TransferAgentCommand transfer = new TransferAgentCommand(agentId, currentOwnerId, futureOwnerId, event); final Commands commands = new Commands(Command.OnError.Stop); commands.addCommand(transfer); final Command[] cmds = commands.toCommands(); try { if (s_logger.isDebugEnabled()) { s_logger.debug("Forwarding " + cmds[0].toString() + " to " + peer); } final String peerName = Long.toString(peer); final String cmdStr = _gson.toJson(cmds); final String ansStr = _clusterMgr.execute(peerName, agentId, cmdStr, true); final Answer[] answers = _gson.fromJson(ansStr, Answer[].class); return answers; } catch (final Exception e) { s_logger.warn("Caught exception while talking to " + currentOwnerId, e); return null; } } public String getPeerName(final long agentHostId) { final HostVO host = _hostDao.findById(agentHostId); if (host != null && host.getManagementServerId() != null) { if (_clusterMgr.getSelfPeerName().equals(Long.toString(host.getManagementServerId()))) { return null; } return Long.toString(host.getManagementServerId()); } return null; } public Boolean propagateAgentEvent(final long agentId, final Event event) throws AgentUnavailableException { final String msPeer = getPeerName(agentId); if (msPeer == null) { return null; } if (s_logger.isDebugEnabled()) { s_logger.debug("Propagating agent change request event:" + event.toString() + " to agent:" + agentId); } final Command[] cmds = new Command[1]; cmds[0] = new ChangeAgentCommand(agentId, event); final String ansStr = _clusterMgr.execute(msPeer, agentId, _gson.toJson(cmds), true); if (ansStr == null) { throw new AgentUnavailableException(agentId); } final Answer[] answers = _gson.fromJson(ansStr, Answer[].class); if (s_logger.isDebugEnabled()) { s_logger.debug("Result for agent change is " + answers[0].getResult()); } return answers[0].getResult(); } private Runnable getTransferScanTask() { return new ManagedContextRunnable() { @Override protected void runInContext() { try { if (s_logger.isTraceEnabled()) { s_logger.trace("Clustered agent transfer scan check, management server id:" + _nodeId); } synchronized (_agentToTransferIds) { if (_agentToTransferIds.size() > 0) { s_logger.debug("Found " + _agentToTransferIds.size() + " agents to transfer"); // for (Long hostId : _agentToTransferIds) { for (final Iterator<Long> iterator = _agentToTransferIds.iterator(); iterator.hasNext();) { final Long hostId = iterator.next(); final AgentAttache attache = findAttache(hostId); // if the thread: // 1) timed out waiting for the host to reconnect // 2) recipient management server is not active any more // 3) if the management server doesn't own the host any more // remove the host from re-balance list and delete from op_host_transfer DB // no need to do anything with the real attache as we haven't modified it yet final Date cutTime = DateUtil.currentGMTTime(); final HostTransferMapVO transferMap = _hostTransferDao.findActiveHostTransferMapByHostId(hostId, new Date(cutTime.getTime() - rebalanceTimeOut)); if (transferMap == null) { s_logger.debug("Timed out waiting for the host id=" + hostId + " to be ready to transfer, skipping rebalance for the host"); iterator.remove(); _hostTransferDao.completeAgentTransfer(hostId); continue; } if (transferMap.getInitialOwner() != _nodeId || attache == null || attache.forForward()) { s_logger.debug("Management server " + _nodeId + " doesn't own host id=" + hostId + " any more, skipping rebalance for the host"); iterator.remove(); _hostTransferDao.completeAgentTransfer(hostId); continue; } final ManagementServerHostVO ms = _mshostDao.findByMsid(transferMap.getFutureOwner()); if (ms != null && ms.getState() != ManagementServerHost.State.Up) { s_logger.debug("Can't transfer host " + hostId + " as it's future owner is not in UP state: " + ms + ", skipping rebalance for the host"); iterator.remove(); _hostTransferDao.completeAgentTransfer(hostId); continue; } if (attache.getQueueSize() == 0 && attache.getNonRecurringListenersSize() == 0) { iterator.remove(); try { _executor.execute(new RebalanceTask(hostId, transferMap.getInitialOwner(), transferMap.getFutureOwner())); } catch (final RejectedExecutionException ex) { s_logger.warn("Failed to submit rebalance task for host id=" + hostId + "; postponing the execution"); continue; } } else { s_logger.debug("Agent " + hostId + " can't be transfered yet as its request queue size is " + attache.getQueueSize() + " and listener queue size is " + attache.getNonRecurringListenersSize()); } } } else { if (s_logger.isTraceEnabled()) { s_logger.trace("Found no agents to be transfered by the management server " + _nodeId); } } } } catch (final Throwable e) { s_logger.error("Problem with the clustered agent transfer scan check!", e); } } }; } private boolean setToWaitForRebalance(final long hostId, final long currentOwnerId, final long futureOwnerId) { s_logger.debug("Adding agent " + hostId + " to the list of agents to transfer"); synchronized (_agentToTransferIds) { return _agentToTransferIds.add(hostId); } } protected boolean rebalanceHost(final long hostId, final long currentOwnerId, final long futureOwnerId) throws AgentUnavailableException { boolean result = true; if (currentOwnerId == _nodeId) { if (!startRebalance(hostId)) { s_logger.debug("Failed to start agent rebalancing"); finishRebalance(hostId, futureOwnerId, Event.RebalanceFailed); return false; } try { final Answer[] answer = sendRebalanceCommand(futureOwnerId, hostId, currentOwnerId, futureOwnerId, Event.StartAgentRebalance); if (answer == null || !answer[0].getResult()) { result = false; } } catch (final Exception ex) { s_logger.warn("Host " + hostId + " failed to connect to the management server " + futureOwnerId + " as a part of rebalance process", ex); result = false; } if (result) { s_logger.debug("Successfully transfered host id=" + hostId + " to management server " + futureOwnerId); finishRebalance(hostId, futureOwnerId, Event.RebalanceCompleted); } else { s_logger.warn("Failed to transfer host id=" + hostId + " to management server " + futureOwnerId); finishRebalance(hostId, futureOwnerId, Event.RebalanceFailed); } } else if (futureOwnerId == _nodeId) { final HostVO host = _hostDao.findById(hostId); try { if (s_logger.isDebugEnabled()) { s_logger.debug("Disconnecting host " + host.getId() + "(" + host.getName() + " as a part of rebalance process without notification"); } final AgentAttache attache = findAttache(hostId); if (attache != null) { result = handleDisconnect(attache, Event.AgentDisconnected, false, false, true); } if (result) { if (s_logger.isDebugEnabled()) { s_logger.debug("Loading directly connected host " + host.getId() + "(" + host.getName() + ") to the management server " + _nodeId + " as a part of rebalance process"); } result = loadDirectlyConnectedHost(host, true); } else { s_logger.warn("Failed to disconnect " + host.getId() + "(" + host.getName() + " as a part of rebalance process without notification"); } } catch (final Exception ex) { s_logger.warn("Failed to load directly connected host " + host.getId() + "(" + host.getName() + ") to the management server " + _nodeId + " as a part of rebalance process due to:", ex); result = false; } if (result) { s_logger.debug("Successfully loaded directly connected host " + host.getId() + "(" + host.getName() + ") to the management server " + _nodeId + " as a part of rebalance process"); } else { s_logger.warn("Failed to load directly connected host " + host.getId() + "(" + host.getName() + ") to the management server " + _nodeId + " as a part of rebalance process"); } } return result; } protected void finishRebalance(final long hostId, final long futureOwnerId, final Event event) { final boolean success = event == Event.RebalanceCompleted ? true : false; if (s_logger.isDebugEnabled()) { s_logger.debug("Finishing rebalancing for the agent " + hostId + " with event " + event); } final AgentAttache attache = findAttache(hostId); if (attache == null || !(attache instanceof ClusteredAgentAttache)) { s_logger.debug("Unable to find forward attache for the host id=" + hostId + ", assuming that the agent disconnected already"); _hostTransferDao.completeAgentTransfer(hostId); return; } final ClusteredAgentAttache forwardAttache = (ClusteredAgentAttache)attache; if (success) { // 1) Set transfer mode to false - so the agent can start processing requests normally forwardAttache.setTransferMode(false); // 2) Get all transfer requests and route them to peer Request requestToTransfer = forwardAttache.getRequestToTransfer(); while (requestToTransfer != null) { s_logger.debug("Forwarding request " + requestToTransfer.getSequence() + " held in transfer attache " + hostId + " from the management server " + _nodeId + " to " + futureOwnerId); final boolean routeResult = routeToPeer(Long.toString(futureOwnerId), requestToTransfer.getBytes()); if (!routeResult) { logD(requestToTransfer.getBytes(), "Failed to route request to peer"); } requestToTransfer = forwardAttache.getRequestToTransfer(); } s_logger.debug("Management server " + _nodeId + " completed agent " + hostId + " rebalance to " + futureOwnerId); } else { failRebalance(hostId); } s_logger.debug("Management server " + _nodeId + " completed agent " + hostId + " rebalance"); _hostTransferDao.completeAgentTransfer(hostId); } protected void failRebalance(final long hostId) { try { s_logger.debug("Management server " + _nodeId + " failed to rebalance agent " + hostId); _hostTransferDao.completeAgentTransfer(hostId); handleDisconnectWithoutInvestigation(findAttache(hostId), Event.RebalanceFailed, true, true); } catch (final Exception ex) { s_logger.warn("Failed to reconnect host id=" + hostId + " as a part of failed rebalance task cleanup"); } } protected boolean startRebalance(final long hostId) { final HostVO host = _hostDao.findById(hostId); if (host == null || host.getRemoved() != null) { s_logger.warn("Unable to find host record, fail start rebalancing process"); return false; } synchronized (_agents) { final ClusteredDirectAgentAttache attache = (ClusteredDirectAgentAttache)_agents.get(hostId); if (attache != null && attache.getQueueSize() == 0 && attache.getNonRecurringListenersSize() == 0) { handleDisconnectWithoutInvestigation(attache, Event.StartAgentRebalance, true, true); final ClusteredAgentAttache forwardAttache = (ClusteredAgentAttache)createAttache(hostId); if (forwardAttache == null) { s_logger.warn("Unable to create a forward attache for the host " + hostId + " as a part of rebalance process"); return false; } s_logger.debug("Putting agent id=" + hostId + " to transfer mode"); forwardAttache.setTransferMode(true); _agents.put(hostId, forwardAttache); } else { if (attache == null) { s_logger.warn("Attache for the agent " + hostId + " no longer exists on management server " + _nodeId + ", can't start host rebalancing"); } else { s_logger.warn("Attache for the agent " + hostId + " has request queue size= " + attache.getQueueSize() + " and listener queue size " + attache.getNonRecurringListenersSize() + ", can't start host rebalancing"); } return false; } } _hostTransferDao.startAgentTransfer(hostId); return true; } protected void cleanupTransferMap(final long msId) { final List<HostTransferMapVO> hostsJoingingCluster = _hostTransferDao.listHostsJoiningCluster(msId); for (final HostTransferMapVO hostJoingingCluster : hostsJoingingCluster) { _hostTransferDao.remove(hostJoingingCluster.getId()); } final List<HostTransferMapVO> hostsLeavingCluster = _hostTransferDao.listHostsLeavingCluster(msId); for (final HostTransferMapVO hostLeavingCluster : hostsLeavingCluster) { _hostTransferDao.remove(hostLeavingCluster.getId()); } } protected class RebalanceTask extends ManagedContextRunnable { Long hostId = null; Long currentOwnerId = null; Long futureOwnerId = null; public RebalanceTask(final long hostId, final long currentOwnerId, final long futureOwnerId) { this.hostId = hostId; this.currentOwnerId = currentOwnerId; this.futureOwnerId = futureOwnerId; } @Override protected void runInContext() { try { if (s_logger.isDebugEnabled()) { s_logger.debug("Rebalancing host id=" + hostId); } rebalanceHost(hostId, currentOwnerId, futureOwnerId); } catch (final Exception e) { s_logger.warn("Unable to rebalance host id=" + hostId, e); } } } private String handleScheduleHostScanTaskCommand(final ScheduleHostScanTaskCommand cmd) { if (s_logger.isDebugEnabled()) { s_logger.debug("Intercepting resource manager command: " + _gson.toJson(cmd)); } try { scheduleHostScanTask(); } catch (final Exception e) { // Scheduling host scan task in peer MS is a best effort operation during host add, regular host scan // happens at fixed intervals anyways. So handling any exceptions that may be thrown s_logger.warn("Exception happened while trying to schedule host scan task on mgmt server " + _clusterMgr.getSelfPeerName() + ", ignoring as regular host scan happens at fixed interval anyways", e); return null; } final Answer[] answers = new Answer[1]; answers[0] = new Answer(cmd, true, null); return _gson.toJson(answers); } public Answer[] sendToAgent(final Long hostId, final Command[] cmds, final boolean stopOnError) throws AgentUnavailableException, OperationTimedoutException { final Commands commands = new Commands(stopOnError ? Command.OnError.Stop : Command.OnError.Continue); for (final Command cmd : cmds) { commands.addCommand(cmd); } return send(hostId, commands); } protected class ClusterDispatcher implements ClusterManager.Dispatcher { @Override public String getName() { return "ClusterDispatcher"; } @Override public String dispatch(final ClusterServicePdu pdu) { if (s_logger.isDebugEnabled()) { s_logger.debug("Dispatch ->" + pdu.getAgentId() + ", json: " + pdu.getJsonPackage()); } Command[] cmds = null; try { cmds = _gson.fromJson(pdu.getJsonPackage(), Command[].class); } catch (final Throwable e) { assert false; s_logger.error("Excection in gson decoding : ", e); } if (cmds.length == 1 && cmds[0] instanceof ChangeAgentCommand) { // intercepted final ChangeAgentCommand cmd = (ChangeAgentCommand)cmds[0]; if (s_logger.isDebugEnabled()) { s_logger.debug("Intercepting command for agent change: agent " + cmd.getAgentId() + " event: " + cmd.getEvent()); } boolean result = false; try { result = executeAgentUserRequest(cmd.getAgentId(), cmd.getEvent()); if (s_logger.isDebugEnabled()) { s_logger.debug("Result is " + result); } } catch (final AgentUnavailableException e) { s_logger.warn("Agent is unavailable", e); return null; } final Answer[] answers = new Answer[1]; answers[0] = new ChangeAgentAnswer(cmd, result); return _gson.toJson(answers); } else if (cmds.length == 1 && cmds[0] instanceof TransferAgentCommand) { final TransferAgentCommand cmd = (TransferAgentCommand)cmds[0]; if (s_logger.isDebugEnabled()) { s_logger.debug("Intercepting command for agent rebalancing: agent " + cmd.getAgentId() + " event: " + cmd.getEvent()); } boolean result = false; try { result = rebalanceAgent(cmd.getAgentId(), cmd.getEvent(), cmd.getCurrentOwner(), cmd.getFutureOwner()); if (s_logger.isDebugEnabled()) { s_logger.debug("Result is " + result); } } catch (final AgentUnavailableException e) { s_logger.warn("Agent is unavailable", e); return null; } catch (final OperationTimedoutException e) { s_logger.warn("Operation timed out", e); return null; } final Answer[] answers = new Answer[1]; answers[0] = new Answer(cmd, result, null); return _gson.toJson(answers); } else if (cmds.length == 1 && cmds[0] instanceof PropagateResourceEventCommand) { final PropagateResourceEventCommand cmd = (PropagateResourceEventCommand)cmds[0]; s_logger.debug("Intercepting command to propagate event " + cmd.getEvent().name() + " for host " + cmd.getHostId()); boolean result = false; try { result = _resourceMgr.executeUserRequest(cmd.getHostId(), cmd.getEvent()); s_logger.debug("Result is " + result); } catch (final AgentUnavailableException ex) { s_logger.warn("Agent is unavailable", ex); return null; } final Answer[] answers = new Answer[1]; answers[0] = new Answer(cmd, result, null); return _gson.toJson(answers); } else if (cmds.length == 1 && cmds[0] instanceof ScheduleHostScanTaskCommand) { final ScheduleHostScanTaskCommand cmd = (ScheduleHostScanTaskCommand)cmds[0]; final String response = handleScheduleHostScanTaskCommand(cmd); return response; } try { final long startTick = System.currentTimeMillis(); if (s_logger.isDebugEnabled()) { s_logger.debug("Dispatch -> " + pdu.getAgentId() + ", json: " + pdu.getJsonPackage()); } final Answer[] answers = sendToAgent(pdu.getAgentId(), cmds, pdu.isStopOnError()); if (answers != null) { final String jsonReturn = _gson.toJson(answers); if (s_logger.isDebugEnabled()) { s_logger.debug("Completed dispatching -> " + pdu.getAgentId() + ", json: " + pdu.getJsonPackage() + " in " + (System.currentTimeMillis() - startTick) + " ms, return result: " + jsonReturn); } return jsonReturn; } else { if (s_logger.isDebugEnabled()) { s_logger.debug("Completed dispatching -> " + pdu.getAgentId() + ", json: " + pdu.getJsonPackage() + " in " + (System.currentTimeMillis() - startTick) + " ms, return null result"); } } } catch (final AgentUnavailableException e) { s_logger.warn("Agent is unavailable", e); } catch (final OperationTimedoutException e) { s_logger.warn("Timed Out", e); } return null; } } public boolean executeAgentUserRequest(final long agentId, final Event event) throws AgentUnavailableException { return executeUserRequest(agentId, event); } public boolean rebalanceAgent(final long agentId, final Event event, final long currentOwnerId, final long futureOwnerId) throws AgentUnavailableException, OperationTimedoutException { return executeRebalanceRequest(agentId, currentOwnerId, futureOwnerId, event); } public boolean isAgentRebalanceEnabled() { return EnableLB.value(); } private Runnable getAgentRebalanceScanTask() { return new ManagedContextRunnable() { @Override protected void runInContext() { try { if (s_logger.isTraceEnabled()) { s_logger.trace("Agent rebalance task check, management server id:" + _nodeId); } // initiate agent lb task will be scheduled and executed only once, and only when number of agents // loaded exceeds _connectedAgentsThreshold if (!_agentLbHappened) { QueryBuilder<HostVO> sc = QueryBuilder.create(HostVO.class); sc.and(sc.entity().getManagementServerId(), Op.NNULL); sc.and(sc.entity().getType(), Op.EQ, Host.Type.Routing); final List<HostVO> allManagedRoutingAgents = sc.list(); sc = QueryBuilder.create(HostVO.class); sc.and(sc.entity().getType(), Op.EQ, Host.Type.Routing); final List<HostVO> allAgents = sc.list(); final double allHostsCount = allAgents.size(); final double managedHostsCount = allManagedRoutingAgents.size(); if (allHostsCount > 0.0) { final double load = managedHostsCount / allHostsCount; if (load >= ConnectedAgentThreshold.value()) { s_logger.debug("Scheduling agent rebalancing task as the average agent load " + load + " is more than the threshold " + ConnectedAgentThreshold.value()); scheduleRebalanceAgents(); _agentLbHappened = true; } else { s_logger.debug("Not scheduling agent rebalancing task as the averages load " + load + " is less than the threshold " + ConnectedAgentThreshold.value()); } } } } catch (final Throwable e) { s_logger.error("Problem with the clustered agent transfer scan check!", e); } } }; } @Override public void rescan() { // schedule a scan task immediately if (s_logger.isDebugEnabled()) { s_logger.debug("Scheduling a host scan task"); } // schedule host scan task on current MS scheduleHostScanTask(); if (s_logger.isDebugEnabled()) { s_logger.debug("Notifying all peer MS to schedule host scan task"); } } @Override public ConfigKey<?>[] getConfigKeys() { final ConfigKey<?>[] keys = super.getConfigKeys(); final List<ConfigKey<?>> keysLst = new ArrayList<ConfigKey<?>>(); keysLst.addAll(Arrays.asList(keys)); keysLst.add(EnableLB); keysLst.add(ConnectedAgentThreshold); keysLst.add(LoadSize); keysLst.add(ScanInterval); return keysLst.toArray(new ConfigKey<?>[keysLst.size()]); } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.cosmos.implementation.changefeed.implementation; import com.azure.cosmos.BridgeInternal; import com.azure.cosmos.CosmosAsyncContainer; import com.azure.cosmos.models.CosmosChangeFeedRequestOptions; import com.azure.cosmos.models.CosmosContainerResponse; import com.azure.cosmos.CosmosAsyncDatabase; import com.azure.cosmos.models.CosmosDatabaseResponse; import com.azure.cosmos.models.CosmosItemResponse; import com.azure.cosmos.CosmosBridgeInternal; import com.azure.cosmos.models.CosmosContainerProperties; import com.azure.cosmos.models.CosmosContainerRequestOptions; import com.azure.cosmos.models.CosmosDatabaseRequestOptions; import com.azure.cosmos.models.CosmosItemRequestOptions; import com.azure.cosmos.models.CosmosQueryRequestOptions; import com.azure.cosmos.models.FeedResponse; import com.azure.cosmos.models.ModelBridgeInternal; import com.azure.cosmos.models.PartitionKey; import com.azure.cosmos.models.SqlQuerySpec; import com.azure.cosmos.implementation.AsyncDocumentClient; import com.azure.cosmos.implementation.PartitionKeyRange; import com.azure.cosmos.implementation.changefeed.ChangeFeedContextClient; import com.fasterxml.jackson.databind.JsonNode; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.core.scheduler.Scheduler; import reactor.core.scheduler.Schedulers; import java.net.URI; import java.util.List; import java.util.stream.Collectors; import static com.azure.cosmos.CosmosBridgeInternal.getContextClient; /** * Implementation for ChangeFeedDocumentClient. */ public class ChangeFeedContextClientImpl implements ChangeFeedContextClient { private final AsyncDocumentClient documentClient; private final CosmosAsyncContainer cosmosContainer; private final Scheduler rxScheduler; /** * Initializes a new instance of the {@link ChangeFeedContextClient} interface. * @param cosmosContainer existing client. */ public ChangeFeedContextClientImpl(CosmosAsyncContainer cosmosContainer) { if (cosmosContainer == null) { throw new IllegalArgumentException("cosmosContainer"); } this.cosmosContainer = cosmosContainer; this.documentClient = getContextClient(cosmosContainer); this.rxScheduler = Schedulers.boundedElastic(); } /** * Initializes a new instance of the {@link ChangeFeedContextClient} interface. * @param cosmosContainer existing client. * @param rxScheduler the RX Java scheduler to observe on. */ public ChangeFeedContextClientImpl(CosmosAsyncContainer cosmosContainer, Scheduler rxScheduler) { if (cosmosContainer == null) { throw new IllegalArgumentException("cosmosContainer"); } this.cosmosContainer = cosmosContainer; this.documentClient = getContextClient(cosmosContainer); this.rxScheduler = rxScheduler; } @Override public Flux<FeedResponse<PartitionKeyRange>> readPartitionKeyRangeFeed(String partitionKeyRangesOrCollectionLink, CosmosQueryRequestOptions cosmosQueryRequestOptions) { return this.documentClient.readPartitionKeyRanges(partitionKeyRangesOrCollectionLink, cosmosQueryRequestOptions) .publishOn(this.rxScheduler); } @Override public Flux<FeedResponse<JsonNode>> createDocumentChangeFeedQuery( CosmosAsyncContainer collectionLink, CosmosChangeFeedRequestOptions changeFeedRequestOptions) { // ChangeFeed processor relies on getting GoneException signals // to handle split of leases - so we need to suppress the split-proofing // in the underlying fetcher/pipeline for the change feed processor. CosmosChangeFeedRequestOptions effectiveRequestOptions = ModelBridgeInternal.disableSplitHandling(changeFeedRequestOptions); AsyncDocumentClient clientWrapper = CosmosBridgeInternal.getAsyncDocumentClient(collectionLink.getDatabase()); Flux<FeedResponse<JsonNode>> feedResponseFlux = clientWrapper .getCollectionCache() .resolveByNameAsync( null, BridgeInternal.extractContainerSelfLink(collectionLink), null) .flatMapMany((collection) -> { if (collection == null) { throw new IllegalStateException("Collection cannot be null"); } return clientWrapper .queryDocumentChangeFeed(collection, effectiveRequestOptions) .map(response -> { List<JsonNode> results = response.getResults() .stream() .map(document -> ModelBridgeInternal.toObjectFromJsonSerializable( document, JsonNode.class)) .collect(Collectors.toList()); return BridgeInternal.toFeedResponsePage( results, response.getResponseHeaders(), false); }); }); return feedResponseFlux.publishOn(this.rxScheduler); } @Override public Mono<CosmosDatabaseResponse> readDatabase(CosmosAsyncDatabase database, CosmosDatabaseRequestOptions options) { return database.read() .publishOn(this.rxScheduler); } @Override public Mono<CosmosContainerResponse> readContainer(CosmosAsyncContainer containerLink, CosmosContainerRequestOptions options) { return containerLink.read(options) .publishOn(this.rxScheduler); } @Override public <T> Mono<CosmosItemResponse<T>> createItem(CosmosAsyncContainer containerLink, T document, CosmosItemRequestOptions options, boolean disableAutomaticIdGeneration) { if (options != null) { return containerLink.createItem(document, options) .publishOn(this.rxScheduler); } else { return containerLink.createItem(document) .publishOn(this.rxScheduler); } } @Override public Mono<CosmosItemResponse<Object>> deleteItem(String itemId, PartitionKey partitionKey, CosmosItemRequestOptions options) { return cosmosContainer.deleteItem(itemId, partitionKey, options) .publishOn(this.rxScheduler); } @Override public <T> Mono<CosmosItemResponse<T>> replaceItem(String itemId, PartitionKey partitionKey, T document, CosmosItemRequestOptions options) { return cosmosContainer.replaceItem(document, itemId, partitionKey, options) .publishOn(this.rxScheduler); } @Override public <T> Mono<CosmosItemResponse<T>> readItem(String itemId, PartitionKey partitionKey, CosmosItemRequestOptions options, Class<T> itemType) { return cosmosContainer.readItem(itemId, partitionKey, options, itemType) .publishOn(this.rxScheduler); } @Override public <T> Flux<FeedResponse<T>> queryItems(CosmosAsyncContainer containerLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options, Class<T> klass) { return containerLink.queryItems(querySpec, options, klass) .byPage() .publishOn(this.rxScheduler); } @Override public URI getServiceEndpoint() { return documentClient.getServiceEndpoint(); } @Override public Mono<CosmosContainerProperties> readContainerSettings(CosmosAsyncContainer containerLink, CosmosContainerRequestOptions options) { return containerLink.read(options) .map(CosmosContainerResponse::getProperties); } @Override public CosmosAsyncContainer getContainerClient() { return this.cosmosContainer; } @Override public CosmosAsyncDatabase getDatabaseClient() { return this.cosmosContainer.getDatabase(); } @Override public void close() { } }
/******************************************************************************* * Copyright (c) 1998, 2015 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.oxm.mappings; import java.security.AccessController; import java.security.PrivilegedActionException; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Map.Entry; import java.util.Vector; import javax.xml.namespace.QName; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.exceptions.DatabaseException; import org.eclipse.persistence.exceptions.DescriptorException; import org.eclipse.persistence.exceptions.ValidationException; import org.eclipse.persistence.internal.descriptors.DescriptorIterator; import org.eclipse.persistence.internal.descriptors.InstanceVariableAttributeAccessor; import org.eclipse.persistence.internal.helper.ClassConstants; import org.eclipse.persistence.internal.helper.DatabaseField; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.internal.oxm.NodeValue; import org.eclipse.persistence.internal.oxm.XMLChoiceFieldToClassAssociation; import org.eclipse.persistence.internal.oxm.XMLConversionManager; import org.eclipse.persistence.internal.oxm.XPathFragment; import org.eclipse.persistence.internal.oxm.mappings.ChoiceCollectionMapping; import org.eclipse.persistence.internal.oxm.mappings.Field; import org.eclipse.persistence.internal.oxm.mappings.XMLContainerMapping; import org.eclipse.persistence.internal.queries.CollectionContainerPolicy; import org.eclipse.persistence.internal.queries.ContainerPolicy; import org.eclipse.persistence.internal.queries.JoinedAttributeManager; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.security.PrivilegedClassForName; import org.eclipse.persistence.internal.sessions.AbstractRecord; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.internal.sessions.ChangeRecord; import org.eclipse.persistence.internal.sessions.MergeManager; import org.eclipse.persistence.internal.sessions.ObjectChangeSet; import org.eclipse.persistence.internal.sessions.UnitOfWorkImpl; import org.eclipse.persistence.mappings.AttributeAccessor; import org.eclipse.persistence.mappings.DatabaseMapping; import org.eclipse.persistence.mappings.converters.Converter; import org.eclipse.persistence.mappings.foundation.AbstractCompositeCollectionMapping; import org.eclipse.persistence.mappings.foundation.AbstractCompositeDirectCollectionMapping; import org.eclipse.persistence.oxm.XMLField; import org.eclipse.persistence.oxm.XMLMarshaller; import org.eclipse.persistence.oxm.XMLRoot; import org.eclipse.persistence.oxm.XMLUnmarshaller; import org.eclipse.persistence.oxm.mappings.converters.XMLConverter; import org.eclipse.persistence.oxm.mappings.nullpolicy.AbstractNullPolicy; import org.eclipse.persistence.oxm.record.DOMRecord; import org.eclipse.persistence.oxm.record.XMLEntry; import org.eclipse.persistence.oxm.record.XMLRecord; import org.eclipse.persistence.queries.ObjectBuildingQuery; import org.eclipse.persistence.queries.ObjectLevelReadQuery; import org.eclipse.persistence.sessions.Session; import org.eclipse.persistence.sessions.remote.DistributedSession; /** * PUBLIC: * <p><b>Purpose:</b>Provide a mapping that can map a single attribute to a number of * different elements in an XML Document. This will be used to map to Choices or Substitution * Groups in an XML Schema * <p><b>Responsibilities:</b><ul> * <li>Allow the user to specify XPath {@literal ->} Type mappings</li> * <li>Handle reading and writing of XML Documents containing a collection of choice or substitution * group elements</li> * </ul> * <p>The XMLChoiceCollectionMapping is the collection version of the XMLChoiceMapping. This mapping * allows the user to specify a number of different xpaths, and types associated with those xpaths. * When any of these elements are encountered in the XML Document, they are read in as the correct * type and added to the collection. * <p><b>Setting up XPath mappings:</b>Unlike other OXM Mappings, instead of setting a single xpath, * the addChoiceElement method is used to specify an xpath and the type associated with this xpath. * <br> * xmlChoiceCollectionMapping.addChoiceElement("mystring/text()", String.class); * <br> * xmlChoiceCollectionMapping.addChoiceElement("myaddress", Address.class); * */ public class XMLChoiceCollectionMapping extends DatabaseMapping implements ChoiceCollectionMapping<AbstractSession, AttributeAccessor, ContainerPolicy, Converter, ClassDescriptor, DatabaseField, XMLMarshaller, Session, XMLUnmarshaller, XMLField, XMLMapping, XMLRecord>, XMLMapping { private Map<XMLField, Class> fieldToClassMappings; private Map<Class, XMLField> classToFieldMappings; private Map<Class, List<XMLField>> classToSourceFieldsMappings; private Map<String, List<XMLField>> classNameToSourceFieldsMappings; private Map<XMLField, XMLMapping> choiceElementMappings; private Map<String, XMLMapping> choiceElementMappingsByClassName; private Map<Class, XMLMapping> choiceElementMappingsByClass; private Map<XMLField, String> fieldToClassNameMappings; private Map<String, XMLField> classNameToFieldMappings; private Map<XMLField, Converter> fieldsToConverters; private ContainerPolicy containerPolicy; private boolean isDefaultEmptyContainer = XMLContainerMapping.EMPTY_CONTAINER_DEFAULT; private boolean isMixedContent; private String mixedGroupingElement; private AbstractNullPolicy wrapperNullPolicy; private boolean isAny; private boolean isWriteOnly; private static final AttributeAccessor temporaryAccessor = new InstanceVariableAttributeAccessor();; private boolean reuseContainer; private Converter converter; private XMLCompositeDirectCollectionMapping mixedContentMapping; private XMLAnyCollectionMapping anyMapping; private static final String DATA_HANDLER = "javax.activation.DataHandler"; private static final String MIME_MULTIPART = "javax.mail.MimeMultipart"; private static final String IMAGE = "java.awt.Image"; public XMLChoiceCollectionMapping() { fieldToClassMappings = new HashMap<XMLField, Class>(); fieldToClassNameMappings = new HashMap<XMLField, String>(); classToFieldMappings = new HashMap<Class, XMLField>(); classNameToFieldMappings = new HashMap<String, XMLField>(); choiceElementMappings = new LinkedHashMap<XMLField, XMLMapping>(); fieldsToConverters = new HashMap<XMLField, Converter>(); choiceElementMappingsByClassName = new LinkedHashMap<String, XMLMapping>(); choiceElementMappingsByClass = new LinkedHashMap<Class, XMLMapping>(); this.containerPolicy = ContainerPolicy.buildDefaultPolicy(); } /** * Return the converter on the mapping. * A converter can be used to convert between the object's value and database value of the attribute. */ public Converter getConverter() { return converter; } /** * Set the converter on the mapping. * A converter can be used to convert between the object's value and database value of the attribute. */ public void setConverter(Converter converter) { this.converter = converter; } /** * INTERNAL: * Clone the attribute from the clone and assign it to the backup. */ public void buildBackupClone(Object clone, Object backup, UnitOfWorkImpl unitOfWork) { throw DescriptorException.invalidMappingOperation(this, "buildBackupClone"); } /** * INTERNAL: * Clone the attribute from the original and assign it to the clone. */ @Override public void buildClone(Object original, CacheKey cacheKey, Object clone, Integer refreshCascade, AbstractSession cloningSession) { throw DescriptorException.invalidMappingOperation(this, "buildClone"); } public void buildCloneFromRow(AbstractRecord databaseRow, JoinedAttributeManager joinManager, Object clone, CacheKey sharedCacheKey, ObjectBuildingQuery sourceQuery, UnitOfWorkImpl unitOfWork, AbstractSession executionSession) { throw DescriptorException.invalidMappingOperation(this, "buildCloneFromRow"); } /** * INTERNAL: * Cascade perform delete through mappings that require the cascade */ public void cascadePerformRemoveIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) { //objects referenced by this mapping are not registered as they have // no identity, this is a no-op. } /** * INTERNAL: * Cascade registerNew for Create through mappings that require the cascade */ public void cascadeRegisterNewIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) { //Our current XML support does not make use of the UNitOfWork. } /** * INTERNAL: * This method was created in VisualAge. * @return prototype.changeset.ChangeRecord */ public ChangeRecord compareForChange(Object clone, Object backup, ObjectChangeSet owner, AbstractSession session) { throw DescriptorException.invalidMappingOperation(this, "compareForChange"); } /** * INTERNAL: * Compare the attributes belonging to this mapping for the objects. */ public boolean compareObjects(Object firstObject, Object secondObject, AbstractSession session) { throw DescriptorException.invalidMappingOperation(this, "compareObjects"); } /** * INTERNAL: * An object has been serialized from the server to the client. * Replace the transient attributes of the remote value holders * with client-side objects. */ public void fixObjectReferences(Object object, Map objectDescriptors, Map processedObjects, ObjectLevelReadQuery query, DistributedSession session) { throw DescriptorException.invalidMappingOperation(this, "fixObjectReferences"); } /** * INTERNAL: * Iterate on the appropriate attribute value. */ public void iterate(DescriptorIterator iterator) { throw DescriptorException.invalidMappingOperation(this, "iterate"); } /** * INTERNAL: * Merge changes from the source to the target object. */ @Override public void mergeChangesIntoObject(Object target, ChangeRecord changeRecord, Object source, MergeManager mergeManager, AbstractSession targetSession) { throw DescriptorException.invalidMappingOperation(this, "mergeChangesIntoObject"); } /** * INTERNAL: * Merge changes from the source to the target object. */ @Override public void mergeIntoObject(Object target, boolean isTargetUninitialized, Object source, MergeManager mergeManager, AbstractSession targetSession) { throw DescriptorException.invalidMappingOperation(this, "mergeIntoObject"); } public Object valueFromRow(AbstractRecord row, JoinedAttributeManager joinManager, ObjectBuildingQuery sourceQuery, CacheKey cacheKey, AbstractSession executionSession, boolean isTargetProtected, Boolean[] wasCacheUsed) throws DatabaseException { List<XMLEntry> values = ((DOMRecord)row).getValuesIndicatingNoEntry(this.getFields()); Object container = getContainerPolicy().containerInstance(values.size()); for(XMLEntry next:values) { Field valueField = next.getXMLField(); DatabaseMapping nextMapping = (DatabaseMapping)this.choiceElementMappings.get(valueField); if(nextMapping.isAbstractCompositeCollectionMapping()) { XMLCompositeCollectionMapping xmlMapping = (XMLCompositeCollectionMapping)nextMapping; Object value = xmlMapping.buildObjectFromNestedRow((AbstractRecord)next.getValue(), joinManager, sourceQuery, executionSession, isTargetProtected); value = convertDataValueToObjectValue(value, executionSession, ((XMLRecord) row).getUnmarshaller()); getContainerPolicy().addInto(value, container, executionSession); } else if(nextMapping instanceof XMLCompositeDirectCollectionMapping){ XMLCompositeDirectCollectionMapping xmlMapping = (XMLCompositeDirectCollectionMapping)nextMapping; Object value = next.getValue(); value = convertDataValueToObjectValue(value, executionSession, ((XMLRecord) row).getUnmarshaller()); getContainerPolicy().addInto(value, container, executionSession); } } ArrayList<XMLMapping> processedMappings = new ArrayList<XMLMapping>(); for(XMLMapping mapping:choiceElementMappings.values()) { if(((DatabaseMapping)mapping).isObjectReferenceMapping() && ((DatabaseMapping)mapping).isCollectionMapping() && !(processedMappings.contains(mapping))) { ((XMLCollectionReferenceMapping)mapping).readFromRowIntoObject(row, joinManager, ((XMLRecord)row).getCurrentObject(), cacheKey, sourceQuery, executionSession, isTargetProtected, container); processedMappings.add(mapping); } } return container; } @Override public void writeFromObjectIntoRow(Object object, AbstractRecord row, AbstractSession session, WriteType writeType) throws DescriptorException { if(this.isReadOnly()) { return; } Object attributeValue = getAttributeValueFromObject(object); List<XMLEntry> nestedRows = new ArrayList<XMLEntry>(); XMLRecord record = (XMLRecord)row; //First determine which Field is associated with each value: if(null != attributeValue) { ContainerPolicy cp = getContainerPolicy(); Object iterator = cp.iteratorFor(attributeValue); if(null != iterator) { while(cp.hasNext(iterator)) { Object value = cp.next(iterator, session); value = convertObjectValueToDataValue(value, session, record.getMarshaller()); NodeValue associatedNodeValue = null; XMLField associatedField = null; Object fieldValue = value; if(value instanceof XMLRoot) { XMLRoot rootValue = (XMLRoot)value; String localName = rootValue.getLocalName(); String namespaceUri = rootValue.getNamespaceURI(); fieldValue = rootValue.getObject(); associatedField = getFieldForName(localName, namespaceUri); if(associatedField == null) { associatedField = getClassToFieldMappings().get(fieldValue.getClass()); } } else { associatedField = getClassToFieldMappings().get(value.getClass()); } if(associatedField == null) { //this may be a reference mapping List<XMLField> sourceFields = classToSourceFieldsMappings.get(value.getClass()); if(sourceFields != null && sourceFields.size() > 0) { DatabaseMapping xmlMapping = (DatabaseMapping)this.choiceElementMappings.get(sourceFields.get(0)); for(XMLField next:sourceFields) { fieldValue = ((XMLCollectionReferenceMapping)xmlMapping).buildFieldValue(value, next, session); XMLEntry entry = new XMLEntry(); entry.setValue(fieldValue); entry.setXMLField(next); nestedRows.add(entry); } } } else { DatabaseMapping xmlMapping = (DatabaseMapping)this.choiceElementMappings.get(associatedField); if(xmlMapping.isAbstractCompositeCollectionMapping()) { fieldValue = ((XMLCompositeCollectionMapping)xmlMapping).buildCompositeRow(fieldValue, session, row, writeType); } XMLEntry entry = new XMLEntry(); entry.setValue(fieldValue); entry.setXMLField(associatedField); nestedRows.add(entry); } } } } ((DOMRecord)row).put(getFields(), nestedRows); } private XMLField getFieldForName(String localName, String namespaceUri) { Iterator fields = getFields().iterator(); while(fields.hasNext()) { XMLField nextField = (XMLField)fields.next(); XPathFragment fragment = nextField.getXPathFragment(); while(fragment != null && (!fragment.nameIsText())) { if(fragment.getNextFragment() == null || fragment.getHasText()) { if(fragment.getLocalName().equals(localName)) { String fragUri = fragment.getNamespaceURI(); if((namespaceUri == null && fragUri == null) || (namespaceUri != null && fragUri != null && namespaceUri.equals(fragUri))) { return nextField; } } } fragment = fragment.getNextFragment(); } } return null; } public void writeSingleValue(Object value, Object parent, XMLRecord row, AbstractSession session) { } public boolean isXMLMapping() { return true; } public Vector<DatabaseField> getFields() { if(fields == null || fields.size() == 0) { fields = this.collectFields(); } return this.fields; } protected Vector<DatabaseField> collectFields() { return new Vector<DatabaseField>(fieldToClassMappings.keySet()); } public void addChoiceElement(String xpath, Class elementType) { XMLField field = new XMLField(xpath); addChoiceElement(field, elementType); } public void addChoiceElement(String xpath, String elementTypeName) { XMLField field = new XMLField(xpath); addChoiceElement(field, elementTypeName); } public void addChoiceElement(XMLField xmlField, Class elementType) { getFieldToClassMappings().put(xmlField, elementType); if(!(this.fieldToClassNameMappings.containsKey(xmlField))) { this.fieldToClassNameMappings.put(xmlField, elementType.getName()); } if (classToFieldMappings.get(elementType) == null) { classToFieldMappings.put(elementType, xmlField); } addChoiceElementMapping(xmlField, elementType); } public void addChoiceElement(List<XMLField> srcFields, Class elementType, List<XMLField> tgtFields) { for(XMLField sourceField:srcFields) { getFieldToClassMappings().put(sourceField, elementType); this.fieldToClassNameMappings.put(sourceField, elementType.getName()); } if (getClassToSourceFieldsMappings().get(elementType) == null) { getClassToSourceFieldsMappings().put(elementType, srcFields); } addChoiceElementMapping(srcFields, elementType, tgtFields); } public void addChoiceElement(List<XMLField> srcFields, String elementTypeName, List<XMLField> tgtFields) { for(XMLField sourceField:srcFields) { this.fieldToClassNameMappings.put(sourceField, elementTypeName); } if (getClassNameToSourceFieldsMappings().get(elementTypeName) == null) { getClassNameToSourceFieldsMappings().put(elementTypeName, srcFields); } addChoiceElementMapping(srcFields, elementTypeName, tgtFields); } public void addChoiceElement(String srcXPath, Class elementType, String tgtXPath) { XMLField srcField = new XMLField(srcXPath); XMLField tgtField = new XMLField(tgtXPath); addChoiceElement(srcField, elementType, tgtField); } public void addChoiceElement(String srcXpath, String elementTypeName, String tgtXpath) { XMLField field = new XMLField(srcXpath); XMLField tgtField = new XMLField(tgtXpath); this.fieldToClassNameMappings.put(field, elementTypeName); if(this.classNameToFieldMappings.get(elementTypeName) == null) { this.classNameToFieldMappings.put(elementTypeName, field); } addChoiceElementMapping(field, elementTypeName, tgtField); } public void addChoiceElement(XMLField sourceField, Class elementType, XMLField targetField) { getFieldToClassMappings().put(sourceField, elementType); this.fieldToClassNameMappings.put(sourceField, elementType.getName()); if (classToFieldMappings.get(elementType) == null) { classToFieldMappings.put(elementType, sourceField); } addChoiceElementMapping(sourceField, elementType, targetField); } private void addChoiceElementMapping(List<XMLField> sourceFields, Class theClass, List<XMLField> targetFields) { XMLCollectionReferenceMapping xmlMapping = new XMLCollectionReferenceMapping(); xmlMapping.setReferenceClass(theClass); xmlMapping.setAttributeAccessor(temporaryAccessor); for(int i = 0; i < sourceFields.size(); i++) { XMLField sourceField = sourceFields.get(i); xmlMapping.addSourceToTargetKeyFieldAssociation(sourceField, targetFields.get(i)); this.choiceElementMappings.put(sourceField, xmlMapping); } this.choiceElementMappingsByClass.put(theClass, xmlMapping); } private void addChoiceElementMapping(List<XMLField> sourceFields, String theClass, List<XMLField> targetFields) { XMLCollectionReferenceMapping xmlMapping = new XMLCollectionReferenceMapping(); xmlMapping.setReferenceClassName(theClass); xmlMapping.setAttributeAccessor(temporaryAccessor); for(int i = 0; i < sourceFields.size(); i++) { XMLField sourceField = sourceFields.get(i); xmlMapping.addSourceToTargetKeyFieldAssociation(sourceField, targetFields.get(i)); this.choiceElementMappings.put(sourceField, xmlMapping); } this.choiceElementMappingsByClassName.put(theClass, xmlMapping); } private void addChoiceElementMapping(XMLField sourceField, Class theClass, XMLField targetField) { XMLCollectionReferenceMapping mapping = new XMLCollectionReferenceMapping(); mapping.setReferenceClass(theClass); mapping.setAttributeAccessor(temporaryAccessor); mapping.addSourceToTargetKeyFieldAssociation(sourceField, targetField); this.choiceElementMappings.put(sourceField, mapping); this.choiceElementMappingsByClass.put(theClass, mapping); } private void addChoiceElementMapping(XMLField sourceField, String className, XMLField targetField) { XMLCollectionReferenceMapping mapping = new XMLCollectionReferenceMapping(); mapping.setReferenceClassName(className); mapping.setAttributeAccessor(temporaryAccessor); mapping.addSourceToTargetKeyFieldAssociation(sourceField, targetField); this.choiceElementMappings.put(sourceField, mapping); this.choiceElementMappingsByClassName.put(className, mapping); } public void addChoiceElement(XMLField field, String elementTypeName) { this.fieldToClassNameMappings.put(field, elementTypeName); if (classNameToFieldMappings.get(elementTypeName) == null) { classNameToFieldMappings.put(elementTypeName, field); } addChoiceElementMapping(field, elementTypeName); } public Map<XMLField, Class> getFieldToClassMappings() { return fieldToClassMappings; } public void initialize(AbstractSession session) throws DescriptorException { super.initialize(session); if (this.converter != null) { this.converter.initialize(this, session); } ArrayList<XMLMapping> mappingsList = new ArrayList<XMLMapping>(); mappingsList.addAll(getChoiceElementMappings().values()); for(XMLMapping next:getChoiceElementMappingsByClass().values()) { if(!(mappingsList.contains(next))) { mappingsList.add(next); } } if(isAny){ //anyMapping = new XMLAnyCollectionMapping(); mappingsList.add(anyMapping); } Iterator<XMLMapping> mappings = mappingsList.iterator(); while(mappings.hasNext()){ DatabaseMapping nextMapping = (DatabaseMapping)mappings.next(); Converter converter = null; if(fieldsToConverters != null) { converter = fieldsToConverters.get(nextMapping.getField()); } if(nextMapping.isAbstractCompositeDirectCollectionMapping()){ XMLConversionManager xmlConversionManager = (XMLConversionManager) session.getDatasourcePlatform().getConversionManager(); QName schemaType = xmlConversionManager.schemaType(((AbstractCompositeDirectCollectionMapping)nextMapping).getAttributeElementClass()); if(schemaType != null) { ((XMLField)nextMapping.getField()).setSchemaType(schemaType); } if(converter != null){ ((AbstractCompositeDirectCollectionMapping)nextMapping).setValueConverter(converter); } ((AbstractCompositeDirectCollectionMapping)nextMapping).setContainerPolicy(getContainerPolicy()); }else if(nextMapping.isAbstractCompositeCollectionMapping()){ if(converter != null){ ((AbstractCompositeCollectionMapping)nextMapping).setConverter(converter); } ((AbstractCompositeCollectionMapping)nextMapping).setContainerPolicy(getContainerPolicy()); } else if(nextMapping instanceof XMLBinaryDataCollectionMapping) { ((XMLBinaryDataCollectionMapping)nextMapping).setContainerPolicy(getContainerPolicy()); if(converter != null) { ((XMLBinaryDataCollectionMapping)nextMapping).setValueConverter(converter); } } else if (nextMapping instanceof XMLAnyCollectionMapping){ ((XMLAnyCollectionMapping)nextMapping).setContainerPolicy(getContainerPolicy()); if(converter != null && converter instanceof XMLConverter) { ((XMLAnyCollectionMapping)nextMapping).setConverter((XMLConverter)converter); } }else{ ((XMLCollectionReferenceMapping)nextMapping).setContainerPolicy(getContainerPolicy()); ((XMLCollectionReferenceMapping)nextMapping).setReuseContainer(true); } nextMapping.initialize(session); } } public Map<Class, XMLField> getClassToFieldMappings() { return classToFieldMappings; } public Map<XMLField, XMLMapping> getChoiceElementMappings() { return choiceElementMappings; } public ContainerPolicy getContainerPolicy() { return containerPolicy; } public void setContainerPolicy(ContainerPolicy cp) { this.containerPolicy = cp; } public void useCollectionClass(Class concreteContainerClass) { this.setContainerPolicy(ContainerPolicy.buildPolicyFor(concreteContainerClass)); } public void useCollectionClassName(String concreteContainerClassName) { this.setContainerPolicy(new CollectionContainerPolicy(concreteContainerClassName)); } public void convertClassNamesToClasses(ClassLoader classLoader) { Iterator<Entry<XMLField, String>> entries = fieldToClassNameMappings.entrySet().iterator(); while (entries.hasNext()) { Map.Entry<XMLField, String> entry = entries.next(); String className = entry.getValue(); Class elementType = null; try { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()) { try { elementType = AccessController.doPrivileged(new PrivilegedClassForName(className, true, classLoader)); } catch (PrivilegedActionException exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(className, exception.getException()); } } else { elementType = org.eclipse.persistence.internal.security.PrivilegedAccessHelper.getClassForName(className, true, classLoader); } } catch (ClassNotFoundException exc) { throw ValidationException.classNotFoundWhileConvertingClassNames(className, exc); } XMLMapping mapping = this.choiceElementMappings.get(entry.getKey()); mapping.convertClassNamesToClasses(classLoader); if(fieldToClassMappings.get(entry.getKey()) == null) { fieldToClassMappings.put(entry.getKey(), elementType); } } for(Entry<String, XMLField> next: this.classNameToFieldMappings.entrySet()) { String className = next.getKey(); Class elementType = null; try { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()) { try { elementType = AccessController.doPrivileged(new PrivilegedClassForName(className, true, classLoader)); } catch (PrivilegedActionException exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(className, exception.getException()); } } else { elementType = org.eclipse.persistence.internal.security.PrivilegedAccessHelper.getClassForName(className, true, classLoader); } } catch (ClassNotFoundException exc) { throw ValidationException.classNotFoundWhileConvertingClassNames(className, exc); } classToFieldMappings.put(elementType, next.getValue()); } if(classNameToSourceFieldsMappings != null) { Iterator<Entry<String, List<XMLField>>> sourceFieldEntries = classNameToSourceFieldsMappings.entrySet().iterator(); while(sourceFieldEntries.hasNext()) { Entry<String, List<XMLField>> nextEntry = sourceFieldEntries.next(); String className = nextEntry.getKey(); List<XMLField> fields = nextEntry.getValue(); Class elementType = null; try { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()) { try { elementType = AccessController.doPrivileged(new PrivilegedClassForName(className, true, classLoader)); } catch (PrivilegedActionException exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(className, exception.getException()); } } else { elementType = org.eclipse.persistence.internal.security.PrivilegedAccessHelper.getClassForName(className, true, classLoader); } } catch (ClassNotFoundException exc) { throw ValidationException.classNotFoundWhileConvertingClassNames(className, exc); } this.getClassToSourceFieldsMappings().put(elementType,fields); } } if(!choiceElementMappingsByClassName.isEmpty()) { for(Entry<String, XMLMapping> next:choiceElementMappingsByClassName.entrySet()) { Class elementType = null; String className = next.getKey(); try { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()) { try { elementType = AccessController.doPrivileged(new PrivilegedClassForName(className, true, classLoader)); } catch (PrivilegedActionException exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(className, exception.getException()); } } else { elementType = org.eclipse.persistence.internal.security.PrivilegedAccessHelper.getClassForName(className, true, classLoader); } } catch (ClassNotFoundException exc) { throw ValidationException.classNotFoundWhileConvertingClassNames(className, exc); } if(this.choiceElementMappingsByClass.get(elementType) == null) { this.choiceElementMappingsByClass.put(elementType, next.getValue()); } next.getValue().convertClassNamesToClasses(classLoader); } } } public void addConverter(XMLField field, Converter converter) { if(this.fieldsToConverters == null) { fieldsToConverters = new HashMap<XMLField, Converter>(); } fieldsToConverters.put(field, converter); } public Converter getConverter(XMLField field) { if(null != this.fieldsToConverters) { Converter converter = fieldsToConverters.get(field); if(null != converter) { return converter; } if(null != this.choiceElementMappings) { DatabaseMapping mapping = (DatabaseMapping) this.choiceElementMappings.get(field); if(null == mapping) { return null; } if(mapping.isAbstractCompositeDirectCollectionMapping()) { return ((XMLCompositeDirectCollectionMapping)mapping).getValueConverter(); } else if(mapping.isAbstractDirectMapping()) { return ((XMLDirectMapping)mapping).getConverter(); } } } return null; } public ArrayList getChoiceFieldToClassAssociations() { ArrayList associations = new ArrayList(); if(this.fieldToClassNameMappings.size() > 0) { Set<Entry<XMLField, String>> entries = fieldToClassNameMappings.entrySet(); Iterator<Entry<XMLField, String>> iter = entries.iterator(); while(iter.hasNext()){ Entry<XMLField, String> nextEntry = iter.next(); XMLField xmlField = nextEntry.getKey(); String className = nextEntry.getValue(); XMLChoiceFieldToClassAssociation association = new XMLChoiceFieldToClassAssociation(xmlField, className); associations.add(association); } } return associations; } public void setChoiceFieldToClassAssociations(ArrayList associations) { if(associations.size() > 0) { for(Object next:associations) { XMLChoiceFieldToClassAssociation<Converter, XMLField> association = (XMLChoiceFieldToClassAssociation)next; this.addChoiceElement(association.getXmlField(), association.getClassName()); if(association.getConverter() != null) { this.addConverter(association.getXmlField(), association.getConverter()); } } } } private void addChoiceElementMapping(XMLField xmlField, String className){ if (xmlField.getLastXPathFragment().nameIsText() || xmlField.getLastXPathFragment().isAttribute()) { XMLCompositeDirectCollectionMapping xmlMapping = new XMLCompositeDirectCollectionMapping(); xmlMapping.setAttributeElementClassName(className); xmlMapping.setField(xmlField); xmlMapping.setAttributeAccessor(temporaryAccessor); this.choiceElementMappings.put(xmlField, xmlMapping); this.choiceElementMappingsByClassName.put(className, xmlMapping); } else { if(isBinaryType(className)) { XMLBinaryDataCollectionMapping xmlMapping = new XMLBinaryDataCollectionMapping(); xmlMapping.setField(xmlField); xmlMapping.setAttributeAccessor(temporaryAccessor); Class theClass = XMLConversionManager.getDefaultXMLManager().convertClassNameToClass(className); xmlMapping.setAttributeElementClass(theClass); this.choiceElementMappings.put(xmlField, xmlMapping); this.choiceElementMappingsByClassName.put(className, xmlMapping); } else { XMLCompositeCollectionMapping xmlMapping = new XMLCompositeCollectionMapping(); if(!className.equals("java.lang.Object")){ xmlMapping.setReferenceClassName(className); } xmlMapping.setField(xmlField); xmlMapping.setAttributeAccessor(temporaryAccessor); this.choiceElementMappings.put(xmlField, xmlMapping); this.choiceElementMappingsByClassName.put(className, xmlMapping); } } } private void addChoiceElementMapping(XMLField xmlField, Class theClass){ if (xmlField.getLastXPathFragment().nameIsText() || xmlField.getLastXPathFragment().isAttribute()) { XMLCompositeDirectCollectionMapping xmlMapping = new XMLCompositeDirectCollectionMapping(); xmlMapping.setAttributeElementClass(theClass); xmlMapping.setField(xmlField); xmlMapping.setAttributeAccessor(temporaryAccessor); this.choiceElementMappings.put(xmlField, xmlMapping); this.choiceElementMappingsByClass.put(theClass, xmlMapping); } else { if(isBinaryType(theClass)) { XMLBinaryDataCollectionMapping xmlMapping = new XMLBinaryDataCollectionMapping(); xmlMapping.setField(xmlField); xmlMapping.setAttributeElementClass(theClass); xmlMapping.setAttributeAccessor(temporaryAccessor); this.fieldsToConverters.put(xmlField, xmlMapping.getValueConverter()); this.choiceElementMappings.put(xmlField, xmlMapping); this.choiceElementMappingsByClass.put(theClass, xmlMapping); } else { XMLCompositeCollectionMapping xmlMapping = new XMLCompositeCollectionMapping(); if(!theClass.equals(ClassConstants.OBJECT)){ xmlMapping.setReferenceClass(theClass); } xmlMapping.setField(xmlField); xmlMapping.setAttributeAccessor(temporaryAccessor); this.choiceElementMappings.put(xmlField, xmlMapping); this.choiceElementMappingsByClass.put(theClass, xmlMapping); } } } public boolean isWriteOnly() { return this.isWriteOnly; } public void setIsWriteOnly(boolean b) { this.isWriteOnly = b; } public boolean isAny() { return this.isAny; } public void setIsAny(boolean b) { this.isAny = b; } public void preInitialize(AbstractSession session) throws DescriptorException { getAttributeAccessor().setIsWriteOnly(this.isWriteOnly()); getAttributeAccessor().setIsReadOnly(this.isReadOnly()); super.preInitialize(session); //Collection<XMLMapping> allMappings = new ArrayList<XMLMapping>(); ArrayList<XMLMapping> mappingsList = new ArrayList<XMLMapping>(); mappingsList.addAll(getChoiceElementMappings().values()); if(isAny){ anyMapping = new XMLAnyCollectionMapping(); //if(mixedGroupingElement != null){ // anyMapping.setField(new XMLField(mixedGroupingElement)); //} anyMapping.setMixedContent(false); anyMapping.setKeepAsElementPolicy(UnmarshalKeepAsElementPolicy.KEEP_UNKNOWN_AS_ELEMENT); anyMapping.setUseXMLRoot(true); mappingsList.add(anyMapping); } for(XMLMapping next:getChoiceElementMappingsByClass().values()) { if(!(mappingsList.contains(next))) { mappingsList.add(next); } } for(XMLMapping next:getChoiceElementMappingsByClass().values()) { if(!(mappingsList.contains(next))) { mappingsList.add(next); } } Iterator<XMLMapping> mappings = mappingsList.iterator(); while(mappings.hasNext()){ DatabaseMapping nextMapping = (DatabaseMapping)mappings.next(); nextMapping.setAttributeName(this.getAttributeName()); if(nextMapping.getAttributeAccessor() == temporaryAccessor){ nextMapping.setAttributeAccessor(getAttributeAccessor()); } nextMapping.setIsReadOnly(this.isReadOnly()); ((XMLMapping)nextMapping).setIsWriteOnly(this.isWriteOnly()); nextMapping.setDescriptor(getDescriptor()); nextMapping.preInitialize(session); } } public void setAttributeValueInObject(Object object, Object value) throws DescriptorException { if(isWriteOnly()) { return; } super.setAttributeValueInObject(object, value); } /** * Return true if the original container on the object should be used if * present. If it is not present then the container policy will be used to * create the container. */ public boolean getReuseContainer() { return reuseContainer; } /** * Specify whether the original container on the object should be used if * present. If it is not present then the container policy will be used to * create the container. */ public void setReuseContainer(boolean reuseContainer) { this.reuseContainer = reuseContainer; } public Map<Class, List<XMLField>> getClassToSourceFieldsMappings() { if(this.classToSourceFieldsMappings == null) { this.classToSourceFieldsMappings = new HashMap<Class, List<XMLField>>(); } return this.classToSourceFieldsMappings; } private Map<String, List<XMLField>> getClassNameToSourceFieldsMappings() { if(this.classNameToSourceFieldsMappings == null) { this.classNameToSourceFieldsMappings = new HashMap<String, List<XMLField>>(); } return this.classNameToSourceFieldsMappings; } private boolean isBinaryType(String className) { if(className.equals(byte[].class.getName()) || className.equals(Byte[].class.getName()) || className.equals(DATA_HANDLER) || className.equals(IMAGE) || className.equals(MIME_MULTIPART)) { return true; } return false; } private boolean isBinaryType(Class theClass) { String className = theClass.getName(); if(className.equals(byte[].class.getName()) || className.equals(Byte[].class.getName()) || className.equals(DATA_HANDLER) || className.equals(IMAGE) || className.equals(MIME_MULTIPART)) { return true; } return false; } public Map<String, XMLField> getClassNameToFieldMappings() { return classNameToFieldMappings; } public boolean isMixedContent() { // return this.mixedContentMapping != null; return isMixedContent; } /** * PUBLIC: * Allows the user to indicate that this mapping should also allow for mixed content in addition to * any of the elements in the choice. The grouping element parameter is used in the case that there is * a common grouping element to all the other elements in this choice. If so, that grouping element can * be specified here to allow the mixed content to be written/detected inside the wrapper element. * @since EclipseLink 2.3.1 */ public void setMixedContent(String groupingElement) { isMixedContent = true; String xpath = groupingElement; if(groupingElement.length() == 0) { xpath = "text()"; } else { xpath += "/" + "text()"; } XMLField field = new XMLField(xpath); XMLCompositeDirectCollectionMapping xmlMapping = new XMLCompositeDirectCollectionMapping(); Class theClass = ClassConstants.STRING; xmlMapping.setAttributeElementClass(theClass); xmlMapping.setField(field); xmlMapping.setAttributeAccessor(temporaryAccessor); this.mixedContentMapping = xmlMapping; this.choiceElementMappings.put(field, xmlMapping); } /** * PUBLIC: * Allows the user to indicate that this mapping should also allow for mixed content in addition to * any of the elements in the choice. * @since EclipseLink 2.3.1 */ public void setMixedContent(boolean mixed) { if(!mixed) { this.mixedContentMapping = null; } else { setMixedContent(""); } isMixedContent = mixed; } public XMLCompositeDirectCollectionMapping getMixedContentMapping() { return this.mixedContentMapping; } public XMLAnyCollectionMapping getAnyMapping(){ return anyMapping; } /** * INTERNAL * Return true if an empty container should be set on the object if there * is no presence of the collection in the XML document. * @since EclipseLink 2.3.3 */ public boolean isDefaultEmptyContainer() { return isDefaultEmptyContainer; } /** * INTERNAL * Indicate whether by default an empty container should be set on the * field/property if the collection is not present in the XML document. * @since EclipseLink 2.3.3 */ public void setDefaultEmptyContainer(boolean defaultEmptyContainer) { this.isDefaultEmptyContainer = defaultEmptyContainer; } public AbstractNullPolicy getWrapperNullPolicy() { return this.wrapperNullPolicy; } public void setWrapperNullPolicy(AbstractNullPolicy policy) { this.wrapperNullPolicy = policy; } public Map<Class, XMLMapping> getChoiceElementMappingsByClass() { return choiceElementMappingsByClass; } public void setChoiceElementMappingsByClass(Map<Class, XMLMapping> choiceElementMappingsByClass) { this.choiceElementMappingsByClass = choiceElementMappingsByClass; } /** * INTERNAL * @since EclipseLink 2.5.0 */ public Object convertObjectValueToDataValue(Object value, Session session, XMLMarshaller marshaller) { if (null != converter) { if (converter instanceof XMLConverter) { return ((XMLConverter)converter).convertObjectValueToDataValue(value, session, marshaller); } else { return converter.convertObjectValueToDataValue(value, session); } } return value; } /** * INTERNAL * @since EclipseLink 2.5.0 */ public Object convertDataValueToObjectValue(Object fieldValue, Session session, XMLUnmarshaller unmarshaller) { if (null != converter) { if (converter instanceof XMLConverter) { return ((XMLConverter)converter).convertDataValueToObjectValue(fieldValue, session, unmarshaller); } else { return converter.convertDataValueToObjectValue(fieldValue, session); } } return fieldValue; } }
/* * Copyright (C) 2007 The Android Open Source Project * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.preferencefragment.preference; import android.content.Context; import android.content.res.TypedArray; import android.os.Parcel; import android.os.Parcelable; import android.support.v4.preference.Preference; import android.util.AttributeSet; import android.view.View; import android.widget.TextView; import com.github.preferencefragment.R; /** * This is an example of a custom preference type. The preference counts the * number of clicks it has received and stores/retrieves it from the storage. */ public class MyPreference extends Preference { private int mClickCounter; // This is the constructor called by the inflater public MyPreference(Context context, AttributeSet attrs) { super(context, attrs); setWidgetLayoutResource(R.layout.preference_widget_mypreference); } @Override protected void onBindView(View view) { super.onBindView(view); // Set our custom views inside the layout final TextView myTextView = (TextView) view.findViewById(R.id.mypreference_widget); if (myTextView != null) { myTextView.setText(String.valueOf(mClickCounter)); } } @Override protected void onClick() { int newValue = mClickCounter + 1; // Give the client a chance to ignore this change if they deem it // invalid if (!callChangeListener(newValue)) { // They don't want the value to be set return; } // Increment counter mClickCounter = newValue; // Save to persistent storage (this method will make sure this // preference should be persistent, along with other useful checks) persistInt(mClickCounter); // Data has changed, notify so UI can be refreshed! notifyChanged(); } @Override protected Object onGetDefaultValue(TypedArray a, int index) { // This preference type's value type is Integer, so we read the default // value from the attributes as an Integer. return a.getInteger(index, 0); } @Override protected void onSetInitialValue(boolean restoreValue, Object defaultValue) { if (restoreValue) { // Restore state mClickCounter = getPersistedInt(mClickCounter); } else { // Set state int value = (Integer) defaultValue; mClickCounter = value; persistInt(value); } } @Override protected Parcelable onSaveInstanceState() { /* * Suppose a client uses this preference type without persisting. We * must save the instance state so it is able to, for example, survive * orientation changes. */ final Parcelable superState = super.onSaveInstanceState(); if (isPersistent()) { // No need to save instance state since it's persistent return superState; } // Save the instance state final SavedState myState = new SavedState(superState); myState.clickCounter = mClickCounter; return myState; } @Override protected void onRestoreInstanceState(Parcelable state) { if (!state.getClass().equals(SavedState.class)) { // Didn't save state for us in onSaveInstanceState super.onRestoreInstanceState(state); return; } // Restore the instance state SavedState myState = (SavedState) state; super.onRestoreInstanceState(myState.getSuperState()); mClickCounter = myState.clickCounter; notifyChanged(); } /** * SavedState, a subclass of {@link BaseSavedState}, will store the state * of MyPreference, a subclass of Preference. * <p> * It is important to always call through to super methods. */ private static class SavedState extends BaseSavedState { int clickCounter; public SavedState(Parcel source) { super(source); // Restore the click counter clickCounter = source.readInt(); } @Override public void writeToParcel(Parcel dest, int flags) { super.writeToParcel(dest, flags); // Save the click counter dest.writeInt(clickCounter); } public SavedState(Parcelable superState) { super(superState); } public static final Parcelable.Creator<SavedState> CREATOR = new Parcelable.Creator<SavedState>() { public SavedState createFromParcel(Parcel in) { return new SavedState(in); } public SavedState[] newArray(int size) { return new SavedState[size]; } }; } }
/** * Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). 2006-2016 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.glite.security.voms.admin.view.actions.user; import org.apache.struts2.convention.annotation.Result; import org.apache.struts2.convention.annotation.Results; @Results({ @Result(name = UserActionSupport.SUCCESS, location = "userChangeOrgDbId") }) public class ChangeOrgdbIdAction extends UserActionSupport { /** * */ private static final long serialVersionUID = 1L; }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina; /** * <p>Interface describing a collection of Valves that should be executed * in sequence when the <code>invoke()</code> method is invoked. It is * required that a Valve somewhere in the pipeline (usually the last one) * must process the request and create the corresponding response, rather * than trying to pass the request on.</p> * * <p>There is generally a single Pipeline instance associated with each * Container. The container's normal request processing functionality is * generally encapsulated in a container-specific Valve, which should always * be executed at the end of a pipeline. To facilitate this, the * <code>setBasic()</code> method is provided to set the Valve instance that * will always be executed last. Other Valves will be executed in the order * that they were added, before the basic Valve is executed.</p> * * @author Craig R. McClanahan * @author Peter Donald * @version $Revision: 467222 $ $Date: 2006-10-24 05:17:11 +0200 (mar., 24 oct. 2006) $ */ public interface Pipeline { // ------------------------------------------------------------- Properties /** * <p>Return the Valve instance that has been distinguished as the basic * Valve for this Pipeline (if any). */ public Valve getBasic(); /** * <p>Set the Valve instance that has been distinguished as the basic * Valve for this Pipeline (if any). Prioer to setting the basic Valve, * the Valve's <code>setContainer()</code> will be called, if it * implements <code>Contained</code>, with the owning Container as an * argument. The method may throw an <code>IllegalArgumentException</code> * if this Valve chooses not to be associated with this Container, or * <code>IllegalStateException</code> if it is already associated with * a different Container.</p> * * @param valve Valve to be distinguished as the basic Valve */ public void setBasic(Valve valve); // --------------------------------------------------------- Public Methods /** * <p>Add a new Valve to the end of the pipeline associated with this * Container. Prior to adding the Valve, the Valve's * <code>setContainer()</code> method will be called, if it implements * <code>Contained</code>, with the owning Container as an argument. * The method may throw an * <code>IllegalArgumentException</code> if this Valve chooses not to * be associated with this Container, or <code>IllegalStateException</code> * if it is already associated with a different Container.</p> * * @param valve Valve to be added * * @exception IllegalArgumentException if this Container refused to * accept the specified Valve * @exception IllegalArgumentException if the specifie Valve refuses to be * associated with this Container * @exception IllegalStateException if the specified Valve is already * associated with a different Container */ public void addValve(Valve valve); /** * Return the set of Valves in the pipeline associated with this * Container, including the basic Valve (if any). If there are no * such Valves, a zero-length array is returned. */ public Valve[] getValves(); /** * Remove the specified Valve from the pipeline associated with this * Container, if it is found; otherwise, do nothing. If the Valve is * found and removed, the Valve's <code>setContainer(null)</code> method * will be called if it implements <code>Contained</code>. * * @param valve Valve to be removed */ public void removeValve(Valve valve); /** * <p>Return the Valve instance that has been distinguished as the basic * Valve for this Pipeline (if any). */ public Valve getFirst(); }
package sql2o; import java.sql.ResultSet; import java.sql.SQLException; /** * User: dimzon * Date: 4/7/14 * Time: 11:06 PM */ public class DelegatingResultSetHandler<E> implements ResultSetHandler<E> { private volatile ResultSetHandler<E> inner = null; private final ResultSetHandlerFactory<E> factory; public DelegatingResultSetHandler(ResultSetHandlerFactory<E> factory) { this.factory = factory; } public E handle(ResultSet resultSet) throws SQLException { if(inner==null) inner = factory.newResultSetHandler(resultSet.getMetaData()); return inner.handle(resultSet); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.fortress.core.impl; import java.util.List; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; import org.apache.directory.fortress.core.FinderException; import org.apache.directory.fortress.core.GlobalErrIds; import org.apache.directory.fortress.core.GlobalIds; import org.apache.directory.fortress.core.SecurityException; import org.apache.directory.fortress.core.ValidationException; import org.apache.directory.fortress.core.util.ConstraintValidator; import org.apache.directory.fortress.core.model.Graphable; import org.apache.directory.fortress.core.model.Group; import org.apache.directory.fortress.core.model.Role; import org.apache.directory.fortress.core.model.UserRole; import org.apache.directory.fortress.core.util.VUtil; /** * Process module for the Role entity. This class performs data validations and error mapping. It is typically called * by internal Fortress manager classes ({@link AdminMgrImpl}, {@link AccessMgrImpl}, * {@link ReviewMgrImpl}, ...) and not intended for external non-Fortress clients. This class will accept, * {@link org.apache.directory.fortress.core.model.Role}, validate its contents and forward on to it's corresponding DAO class {@link RoleDAO}. * <p> * Class will throw {@link SecurityException} to caller in the event of security policy, data constraint violation or system * error internal to DAO object. This class will forward DAO exceptions ({@link org.apache.directory.fortress.core.FinderException}, * {@link org.apache.directory.fortress.core.CreateException},{@link org.apache.directory.fortress.core.UpdateException},{@link org.apache.directory.fortress.core.RemoveException}), * or {@link org.apache.directory.fortress.core.ValidationException} as {@link SecurityException}s with appropriate * error id from {@link org.apache.directory.fortress.core.GlobalErrIds}. * <p> * This class is thread safe. * <p> * * @author Kevin McKinney */ final class RoleP { private RoleDAO rDao = new RoleDAO(); private static final ConstraintValidator constraintValidator = VUtil.getConstraintValidator(); /** * Return a fully populated Role entity for a given RBAC role name. If matching record not found a * SecurityException will be thrown. * * @param role contains full role name for RBAC role in directory. * @return Role entity containing all attributes associated with Role in directory. * @throws SecurityException in the event Role not found or DAO search error. */ Role read( Role role ) throws SecurityException { return rDao.getRole( role ); } /** * Takes a search string that contains full or partial RBAC Role name in directory. * * @param role contains full or partial RBAC role name. * @return List of type Role containing fully populated matching RBAC Role entities. If no records found this will be empty. * @throws SecurityException in the event of DAO search error. */ List<Role> search( Role role ) throws SecurityException { return rDao.findRoles( role ); } /** * Takes a search string that contains full or partial RBAC Role name in directory. * This search is used by RealmMgr for Websphere. * * @param role contains full or partial RBAC role name. * @param limit specify the max number of records to return in result set. * @return List of type String containing RBAC Role name of all matching User entities. If no records found this will be empty. * @throws SecurityException in the event of DAO search error. */ List<String> search( Role role, int limit ) throws SecurityException { return rDao.findRoles( role, limit ); } /** * Given a particular group, containing a set of members, return all Roles with a matching member. * * @param group contains a list of member names pertaining to roles. * @return List of type Role containing fully populated matching RBAC Role entities. If no records found this will be empty. * @throws SecurityException in the event not the right type of Group or DAO search error. */ List<Role> search( Group group ) throws SecurityException { if ( group.getType() != Group.Type.ROLE ) { String info = "search failed for Group [" + group.getName() + "], group must be of type ROLE."; throw new ValidationException( GlobalErrIds.GROUP_TYPE_INVLD, info ); } return rDao.groupRoles( group ); } /** * Return all Roles that have a parent assignment. This used for hierarchical processing. * * @param contextId maps to sub-tree in DIT, e.g. ou=contextId, dc=example, dc=com. * @return List of type Role containing {@link Role#name} and {@link Role#parents} populated. * @throws SecurityException in the event of DAO search error. */ List<Graphable> getAllDescendants( String contextId ) throws SecurityException { return rDao.getAllDescendants( contextId ); } /** * Adds a new Role entity to directory. The Role entity input object will be validated to ensure that: * role name is present, and reasonability checks on all of the other populated values. * * @param entity Role entity contains data targeted for insertion. * @return Role entity copy of input + additional attributes (internalId) that were added by op. * @throws SecurityException in the event of data validation or DAO system error. */ Role add( Role entity ) throws SecurityException { validate( entity ); return rDao.create( entity ); } /** * Updates existing Role entity in directory. For example the Role description and temporal constraints * updated. * * @param entity Role entity contains data targeted for updating. * @return Role entity contains fully populated updated entity. * @throws SecurityException in the event of data validation or DAO system error. */ Role update( Role entity ) throws SecurityException { validate( entity ); return rDao.update( entity ); } /** * Removes parent role assignments from Role entity in directory. * updated. * * @param entity Role entity contains data targeted for updating. * @throws SecurityException in the event of data validation or DAO system error. */ void deleteParent( Role entity ) throws SecurityException { validate( entity ); rDao.deleteParent( entity ); } /** * Method will add the "roleOccupant" attribute on OpenLDAP entry which represents an RBAC Role assignment in Fortress. * * @param entity contains the role name targeted. * @param userDn String contains the dn for the user entry that is being assigned the RBAC Role. * @return Role containing copy of input data. * @throws SecurityException in the event of data validation or DAO system error. */ Role assign( Role entity, String userDn ) throws SecurityException { return rDao.assign( entity, userDn ); } /** * Method will remove the "roleOccupant" attribute on OpenLDAP entry which represents an RBAC Role assignment in Fortress. * * @param entity contains the role name targeted. * @param userDn String contains the dn for the user entry that is being assigned the RBAC Role. * @return Role containing copy of input data. * @throws SecurityException in the event of data validation or DAO system error. */ Role deassign( Role entity, String userDn ) throws SecurityException { entity = rDao.deassign( entity, userDn ); return entity; } /** * Add the User dn occupant attribute to the OrganizationalRole entity in ldap. This method is called by AdminMgrImpl * when the User is being added. * * @param uRoles contains a collection of UserRole being targeted for assignment. * @param userDn contains the userId targeted for addition. * @param contextId maps to sub-tree in DIT, e.g. ou=contextId, dc=example, dc=com. * @throws SecurityException in the event of DAO search error. */ void addOccupant( List<UserRole> uRoles, String userDn, String contextId ) throws SecurityException { if ( CollectionUtils.isNotEmpty( uRoles ) ) { for ( UserRole uRole : uRoles ) { Role role = new Role( uRole.getName() ); role.setContextId( contextId ); assign( role, userDn ); } } } /** * Remove the User dn occupant attribute from the OrganizationalRole entity in ldap. This method is called by AdminMgrImpl * when the User is being deleted. * * @param userDn contains the userId targeted for attribute removal. * @param contextId maps to sub-tree in DIT, e.g. ou=contextId, dc=example, dc=com. * @throws SecurityException in the event of DAO search error. */ void removeOccupant( String userDn, String contextId ) throws SecurityException { List<String> list; try { list = rDao.findAssignedRoles( userDn, contextId ); for ( String roleNm : list ) { Role role = new Role( roleNm ); role.setContextId( contextId ); deassign( role, userDn ); } } catch ( FinderException fe ) { String error = "removeOccupant userDn [" + userDn + "] caught FinderException=" + fe; throw new SecurityException( GlobalErrIds.ROLE_REMOVE_OCCUPANT_FAILED, error, fe ); } } /** * This method performs a "hard" delete. It completely the RBAC Role node from the ldap directory. * RBAC Role entity must exist in directory prior to making this call else exception will be thrown. * * @param entity Contains the name of the RBAC Role targeted for deletion. * @throws SecurityException in the event of data validation or DAO system error. */ void delete( Role entity ) throws SecurityException { rDao.remove( entity ); } /** * Method will perform simple validations to ensure the integrity of the RBAC Role entity targeted for insertion * or updating in directory. For example the Role temporal constraints will be validated. Data reasonability * checks will be performed on all non-null attributes. * * @param entity contains data targeted for insertion or update. * @throws org.apache.directory.fortress.core.ValidationException in the event of data validation error or Org validation. */ private void validate( Role entity ) throws ValidationException { VUtil.safeText( entity.getName(), GlobalIds.ROLE_LEN ); if ( StringUtils.isNotEmpty( entity.getDescription() ) ) { VUtil.description( entity.getDescription() ); } if ( entity.getTimeout() != null ) { constraintValidator.timeout( entity.getTimeout() ); } if ( StringUtils.isNotEmpty( entity.getBeginTime() ) ) { constraintValidator.beginTime( entity.getBeginTime() ); } if ( StringUtils.isNotEmpty( entity.getEndTime() ) ) { constraintValidator.endTime( entity.getEndTime() ); } if ( StringUtils.isNotEmpty( entity.getBeginDate() ) ) { constraintValidator.beginDate( entity.getBeginDate() ); } if ( StringUtils.isNotEmpty( entity.getEndDate() ) ) { constraintValidator.endDate( entity.getEndDate() ); } if ( StringUtils.isNotEmpty( entity.getDayMask() ) ) { constraintValidator.dayMask( entity.getDayMask() ); } if ( StringUtils.isNotEmpty( entity.getBeginLockDate() ) ) { constraintValidator.beginDate( entity.getBeginDate() ); } if ( StringUtils.isNotEmpty( entity.getEndLockDate() ) ) { constraintValidator.endDate( entity.getEndLockDate() ); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache license, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the license for the specific language governing permissions and * limitations under the license. */ package org.apache.logging.log4j.mongodb; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.List; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.Core; import org.apache.logging.log4j.core.config.plugins.Plugin; import org.apache.logging.log4j.core.config.plugins.PluginBuilderAttribute; import org.apache.logging.log4j.core.config.plugins.PluginBuilderFactory; import org.apache.logging.log4j.core.config.plugins.PluginFactory; import org.apache.logging.log4j.core.config.plugins.convert.TypeConverters; import org.apache.logging.log4j.core.config.plugins.validation.constraints.Required; import org.apache.logging.log4j.core.config.plugins.validation.constraints.ValidHost; import org.apache.logging.log4j.core.config.plugins.validation.constraints.ValidPort; import org.apache.logging.log4j.core.filter.AbstractFilterable; import org.apache.logging.log4j.core.util.NameUtil; import org.apache.logging.log4j.core.appender.nosql.NoSqlProvider; import org.apache.logging.log4j.status.StatusLogger; import org.apache.logging.log4j.util.LoaderUtil; import org.apache.logging.log4j.util.Strings; import com.mongodb.DB; import com.mongodb.MongoClient; import com.mongodb.MongoCredential; import com.mongodb.ServerAddress; import com.mongodb.WriteConcern; /** * The MongoDB implementation of {@link NoSqlProvider}. */ @Plugin(name = "MongoDb", category = Core.CATEGORY_NAME, printObject = true) public final class MongoDbProvider implements NoSqlProvider<MongoDbConnection> { private static final WriteConcern DEFAULT_WRITE_CONCERN = WriteConcern.ACKNOWLEDGED; private static final Logger LOGGER = StatusLogger.getLogger(); private static final int DEFAULT_PORT = 27017; private static final int DEFAULT_COLLECTION_SIZE = 536870912; private final String collectionName; private final DB database; private final String description; private final WriteConcern writeConcern; private final boolean isCapped; private final Integer collectionSize; private MongoDbProvider(final DB database, final WriteConcern writeConcern, final String collectionName, final boolean isCapped, final Integer collectionSize, final String description) { this.database = database; this.writeConcern = writeConcern; this.collectionName = collectionName; this.isCapped = isCapped; this.collectionSize = collectionSize; this.description = "mongoDb{ " + description + " }"; } @Override public MongoDbConnection getConnection() { return new MongoDbConnection(this.database, this.writeConcern, this.collectionName, this.isCapped, this.collectionSize); } @Override public String toString() { return this.description; } /** * Factory method for creating a MongoDB provider within the plugin manager. * * @param collectionName The name of the MongoDB collection to which log events should be written. * @param writeConcernConstant The {@link WriteConcern} constant to control writing details, defaults to * {@link WriteConcern#ACKNOWLEDGED}. * @param writeConcernConstantClassName The name of a class containing the aforementioned static WriteConcern * constant. Defaults to {@link WriteConcern}. * @param databaseName The name of the MongoDB database containing the collection to which log events should be * written. Mutually exclusive with {@code factoryClassName&factoryMethodName!=null}. * @param server The host name of the MongoDB server, defaults to localhost and mutually exclusive with * {@code factoryClassName&factoryMethodName!=null}. * @param port The port the MongoDB server is listening on, defaults to the default MongoDB port and mutually * exclusive with {@code factoryClassName&factoryMethodName!=null}. * @param userName The username to authenticate against the MongoDB server with. * @param password The password to authenticate against the MongoDB server with. * @param factoryClassName A fully qualified class name containing a static factory method capable of returning a * {@link DB} or a {@link MongoClient}. * @param factoryMethodName The name of the public static factory method belonging to the aforementioned factory * class. * @return a new MongoDB provider. * @deprecated in 2.8; use {@link #newBuilder()} instead. */ @PluginFactory public static MongoDbProvider createNoSqlProvider( final String collectionName, final String writeConcernConstant, final String writeConcernConstantClassName, final String databaseName, final String server, final String port, final String userName, final String password, final String factoryClassName, final String factoryMethodName) { LOGGER.info("createNoSqlProvider"); return newBuilder().setCollectionName(collectionName).setWriteConcernConstant(writeConcernConstantClassName) .setWriteConcernConstant(writeConcernConstant).setDatabaseName(databaseName).setServer(server) .setPort(port).setUserName(userName).setPassword(password).setFactoryClassName(factoryClassName) .setFactoryMethodName(factoryMethodName).build(); } @PluginBuilderFactory public static <B extends Builder<B>> B newBuilder() { return new Builder<B>().asBuilder(); } public static class Builder<B extends Builder<B>> extends AbstractFilterable.Builder<B> implements org.apache.logging.log4j.core.util.Builder<MongoDbProvider> { @PluginBuilderAttribute @ValidHost private String server = "localhost"; @PluginBuilderAttribute @ValidPort private String port = "" + DEFAULT_PORT; @PluginBuilderAttribute @Required(message = "No database name provided") private String databaseName; @PluginBuilderAttribute @Required(message = "No collection name provided") private String collectionName; @PluginBuilderAttribute private String userName; @PluginBuilderAttribute(sensitive = true) private String password; @PluginBuilderAttribute("capped") private boolean isCapped = false; @PluginBuilderAttribute private int collectionSize = DEFAULT_COLLECTION_SIZE; @PluginBuilderAttribute private String factoryClassName; @PluginBuilderAttribute private String factoryMethodName; @PluginBuilderAttribute private String writeConcernConstantClassName; @PluginBuilderAttribute private String writeConcernConstant; public B setServer(final String server) { this.server = server; return asBuilder(); } public B setPort(final String port) { this.port = port; return asBuilder(); } public B setDatabaseName(final String databaseName) { this.databaseName = databaseName; return asBuilder(); } public B setCollectionName(final String collectionName) { this.collectionName = collectionName; return asBuilder(); } public B setUserName(final String userName) { this.userName = userName; return asBuilder(); } public B setPassword(final String password) { this.password = password; return asBuilder(); } public B setCapped(final boolean isCapped) { this.isCapped = isCapped; return asBuilder(); } public B setCollectionSize(final int collectionSize) { this.collectionSize = collectionSize; return asBuilder(); } public B setFactoryClassName(final String factoryClassName) { this.factoryClassName = factoryClassName; return asBuilder(); } public B setFactoryMethodName(final String factoryMethodName) { this.factoryMethodName = factoryMethodName; return asBuilder(); } public B setWriteConcernConstantClassName(final String writeConcernConstantClassName) { this.writeConcernConstantClassName = writeConcernConstantClassName; return asBuilder(); } public B setWriteConcernConstant(final String writeConcernConstant) { this.writeConcernConstant = writeConcernConstant; return asBuilder(); } @Override public MongoDbProvider build() { DB database; String description; if (Strings.isNotEmpty(factoryClassName) && Strings.isNotEmpty(factoryMethodName)) { try { final Class<?> factoryClass = LoaderUtil.loadClass(factoryClassName); final Method method = factoryClass.getMethod(factoryMethodName); final Object object = method.invoke(null); if (object instanceof DB) { database = (DB) object; } else if (object instanceof MongoClient) { if (Strings.isNotEmpty(databaseName)) { database = ((MongoClient) object).getDB(databaseName); } else { LOGGER.error("The factory method [{}.{}()] returned a MongoClient so the database name is " + "required.", factoryClassName, factoryMethodName); return null; } } else if (object == null) { LOGGER.error("The factory method [{}.{}()] returned null.", factoryClassName, factoryMethodName); return null; } else { LOGGER.error("The factory method [{}.{}()] returned an unsupported type [{}].", factoryClassName, factoryMethodName, object.getClass().getName()); return null; } description = "database=" + database.getName(); final List<ServerAddress> addresses = database.getMongo().getAllAddress(); if (addresses.size() == 1) { description += ", server=" + addresses.get(0).getHost() + ", port=" + addresses.get(0).getPort(); } else { description += ", servers=["; for (final ServerAddress address : addresses) { description += " { " + address.getHost() + ", " + address.getPort() + " } "; } description += "]"; } } catch (final ClassNotFoundException e) { LOGGER.error("The factory class [{}] could not be loaded.", factoryClassName, e); return null; } catch (final NoSuchMethodException e) { LOGGER.error("The factory class [{}] does not have a no-arg method named [{}].", factoryClassName, factoryMethodName, e); return null; } catch (final Exception e) { LOGGER.error("The factory method [{}.{}()] could not be invoked.", factoryClassName, factoryMethodName, e); return null; } } else if (Strings.isNotEmpty(databaseName)) { final List<MongoCredential> credentials = new ArrayList<>(); description = "database=" + databaseName; if (Strings.isNotEmpty(userName) && Strings.isNotEmpty(password)) { description += ", username=" + userName + ", passwordHash=" + NameUtil.md5(password + MongoDbProvider.class.getName()); credentials.add(MongoCredential.createCredential(userName, databaseName, password.toCharArray())); } try { final int portInt = TypeConverters.convert(port, int.class, DEFAULT_PORT); description += ", server=" + server + ", port=" + portInt; database = new MongoClient(new ServerAddress(server, portInt), credentials).getDB(databaseName); } catch (final Exception e) { LOGGER.error( "Failed to obtain a database instance from the MongoClient at server [{}] and " + "port [{}].", server, port); return null; } } else { LOGGER.error("No factory method was provided so the database name is required."); return null; } try { database.getCollectionNames(); // Check if the database actually requires authentication } catch (final Exception e) { LOGGER.error( "The database is not up, or you are not authenticated, try supplying a username and password to the MongoDB provider.", e); return null; } final WriteConcern writeConcern = toWriteConcern(writeConcernConstant, writeConcernConstantClassName); return new MongoDbProvider(database, writeConcern, collectionName, isCapped, collectionSize, description); } private static WriteConcern toWriteConcern(final String writeConcernConstant, final String writeConcernConstantClassName) { WriteConcern writeConcern; if (Strings.isNotEmpty(writeConcernConstant)) { if (Strings.isNotEmpty(writeConcernConstantClassName)) { try { final Class<?> writeConcernConstantClass = LoaderUtil.loadClass(writeConcernConstantClassName); final Field field = writeConcernConstantClass.getField(writeConcernConstant); writeConcern = (WriteConcern) field.get(null); } catch (final Exception e) { LOGGER.error("Write concern constant [{}.{}] not found, using default.", writeConcernConstantClassName, writeConcernConstant); writeConcern = DEFAULT_WRITE_CONCERN; } } else { writeConcern = WriteConcern.valueOf(writeConcernConstant); if (writeConcern == null) { LOGGER.warn("Write concern constant [{}] not found, using default.", writeConcernConstant); writeConcern = DEFAULT_WRITE_CONCERN; } } } else { writeConcern = DEFAULT_WRITE_CONCERN; } return writeConcern; } } }
package com.gzq.lib_resource.mvvm.base; import android.content.Intent; import android.databinding.DataBindingUtil; import android.databinding.ViewDataBinding; import android.os.Bundle; import me.yokeyword.fragmentation.SupportActivity; /** * Created by gzq on 2017/6/15. */ public abstract class BaseActivity<V extends ViewDataBinding, VM extends BaseViewModel> extends SupportActivity { protected V binding; protected VM viewModel; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); //页面接受的参数方法 initParam(getIntent(), getIntent().getExtras()); //私有的初始化Databinding和ViewModel方法 initViewDataBinding(savedInstanceState); } @Override protected void onDestroy() { super.onDestroy(); //解除ViewModel生命周期感应 if (viewModel != null) { getLifecycle().removeObserver(viewModel); } if (binding != null) { binding.unbind(); } } private void initViewDataBinding(Bundle savedInstanceState) { //DataBindingUtil类需要在project的build中配置 dataBinding {enabled true }, 同步后会自动关联android.databinding包 binding = DataBindingUtil.setContentView(this, layoutId(savedInstanceState)); viewModel = setViewModel(binding); if (viewModel != null) { getLifecycle().addObserver(viewModel); } setOtherModel(binding); } public abstract void initParam(Intent intentArgument, Bundle bundleArgument); public abstract int layoutId(Bundle savedInstanceState); public abstract VM setViewModel(V binding); /** * 如果不止ViewModel和Controller这个两variable设置到binding中 * * @param binding */ public abstract void setOtherModel(V binding); }
package com.msun.thirdpartyPay.alipay.model.pay; import java.io.Serializable; import com.msun.thirdpartyPay.alipay.annotation.AlipayOptional; /** * 支付宝PC和WAP公共支付明细 * * @author zxc Apr 13, 2017 6:40:57 PM */ public class PayDetail implements Serializable { private static final long serialVersionUID = 5892926888312847503L; /** * 我方唯一订单号 {@link com.msun.thirdpartyPay.alipay.model.enums.AlipayField#OUT_TRADE_NO} */ protected String outTradeNo; /** * 商品名称 {@link com.msun.thirdpartyPay.alipay.model.enums.AlipayField#ORDER_NAME} */ protected String orderName; /** * 商品金额(元) {@link com.msun.thirdpartyPay.alipay.model.enums.AlipayField#TOTAL_FEE} */ protected String totalFee; /** * 支付宝后置通知url,若为空,则使用Alipay类中的notifyUrl {@link com.msun.thirdpartyPay.alipay.model.enums.AlipayField#NOTIFY_URL} */ @AlipayOptional protected String notifyUrl; /** * 支付宝前端跳转url,若为空,则使用Alipay类中的returnUrl {@link com.msun.thirdpartyPay.alipay.model.enums.AlipayField#RETURN_URL} */ @AlipayOptional protected String returnUrl; public PayDetail(String outTradeNo, String orderName, String totalFee) { this.outTradeNo = outTradeNo; this.orderName = orderName; this.totalFee = totalFee; } public String getOutTradeNo() { return outTradeNo; } public void setOutTradeNo(String outTradeNo) { this.outTradeNo = outTradeNo; } public String getOrderName() { return orderName; } public void setOrderName(String orderName) { this.orderName = orderName; } public String getTotalFee() { return totalFee; } public void setTotalFee(String totalFee) { this.totalFee = totalFee; } public String getNotifyUrl() { return notifyUrl; } public void setNotifyUrl(String notifyUrl) { this.notifyUrl = notifyUrl; } public String getReturnUrl() { return returnUrl; } public void setReturnUrl(String returnUrl) { this.returnUrl = returnUrl; } @Override public String toString() { return "PayFields{" + "outTradeNo='" + outTradeNo + '\'' + ", orderName='" + orderName + '\'' + ", totalFee='" + totalFee + '\'' + ", notifyUrl='" + notifyUrl + '\'' + ", returnUrl='" + returnUrl + '\'' + '}'; } }
/* * Copyright 2019 Anton Tananaev (anton@traccar.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.traccar.protocol; import io.netty.buffer.ByteBuf; import io.netty.channel.Channel; import io.netty.channel.ChannelHandlerContext; import org.traccar.BaseFrameDecoder; import org.traccar.BasePipelineFactory; import java.nio.charset.StandardCharsets; public class NavisFrameDecoder extends BaseFrameDecoder { private static final int NTCB_HEADER_LENGTH = 16; private static final int NTCB_LENGTH_OFFSET = 12; private static final int FLEX_HEADER_LENGTH = 2; private int flexDataSize; public void setFlexDataSize(int flexDataSize) { this.flexDataSize = flexDataSize; } @Override protected Object decode( ChannelHandlerContext ctx, Channel channel, ByteBuf buf) throws Exception { if (buf.getByte(buf.readerIndex()) == 0x7F) { return buf.readRetainedSlice(1); // keep alive } if (ctx != null && flexDataSize == 0) { NavisProtocolDecoder protocolDecoder = BasePipelineFactory.getHandler(ctx.pipeline(), NavisProtocolDecoder.class); if (protocolDecoder != null) { flexDataSize = protocolDecoder.getFlexDataSize(); } } if (flexDataSize > 0) { if (buf.readableBytes() > FLEX_HEADER_LENGTH) { int length = 0; String type = buf.toString(buf.readerIndex(), 2, StandardCharsets.US_ASCII); switch (type) { // FLEX 1.0 case "~A": length = flexDataSize * buf.getByte(buf.readerIndex() + FLEX_HEADER_LENGTH) + 1 + 1; break; case "~T": length = flexDataSize + 4 + 1; break; case "~C": length = flexDataSize + 1; break; // FLEX 2.0 (Extra packages) case "~E": length++; for (int i = 0; i < buf.getByte(buf.readerIndex() + FLEX_HEADER_LENGTH); i++) { if (buf.readableBytes() > FLEX_HEADER_LENGTH + length + 1) { length += buf.getUnsignedShort(length + FLEX_HEADER_LENGTH) + 2; } else { return null; } } length++; break; case "~X": length = buf.getUnsignedShortLE(buf.readerIndex() + FLEX_HEADER_LENGTH) + 4 + 1; break; default: return null; } if (buf.readableBytes() >= FLEX_HEADER_LENGTH + length) { return buf.readRetainedSlice(buf.readableBytes()); } } } else { if (buf.readableBytes() < NTCB_HEADER_LENGTH) { return null; } int length = NTCB_HEADER_LENGTH + buf.getUnsignedShortLE(buf.readerIndex() + NTCB_LENGTH_OFFSET); if (buf.readableBytes() >= length) { return buf.readRetainedSlice(length); } } return null; } }
// Java Output Formatting"https://www.hackerrank.com/challenges/java-output-formatting/problem" import java.util.Scanner; public class Exercise4 { public static void main(String[] args) { Scanner sc = new Scanner(System.in); System.out.println("================================"); for(int i=0; i< 3; i++){ String s1 = sc.next(); int x = sc.nextInt(); //Complete this line System.out.printf("%-15s%03d%n", s1, x); } System.out.println("================================"); sc.close(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.bpmscript.integration.spring; import java.util.Map; import java.util.concurrent.BlockingQueue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import junit.framework.TestCase; import org.bpmscript.ICompletedResult; import org.bpmscript.IFailedResult; import org.bpmscript.benchmark.Benchmark; import org.bpmscript.benchmark.IBenchmarkCallback; import org.bpmscript.benchmark.IBenchmarkPrinter; import org.bpmscript.benchmark.IWaitForCallback; import org.bpmscript.exec.js.JavascriptProcessDefinition; import org.bpmscript.loanbroker.LoanRequest; import org.bpmscript.process.BpmScriptEngine; import org.bpmscript.process.IVersionedDefinitionManager; import org.bpmscript.process.listener.LoggingInstanceListener; import org.bpmscript.util.StreamService; import org.springframework.context.support.ClassPathXmlApplicationContext; import org.springframework.integration.channel.ChannelRegistry; import org.springframework.integration.channel.MessageChannel; import org.springframework.integration.message.GenericMessage; /** * Ok, here's the loan broker... I wonder why it's so slow... */ public class SpringLoanBrokerTest extends TestCase { @SuppressWarnings("unchecked") public void testReply() throws Exception { int total = 1; final CountDownLatch latch = new CountDownLatch(total); final ClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext( "/org/bpmscript/integration/spring/spring.xml"); try { final BpmScriptEngine engine = (BpmScriptEngine) context.getBean("engine"); final IVersionedDefinitionManager processManager = (IVersionedDefinitionManager) context .getBean("versionedDefinitionManager"); final ChannelRegistry channelRegistry = (ChannelRegistry) ((Map) context .getBeansOfType(ChannelRegistry.class)).values().iterator().next(); processManager.createDefinition("id", new JavascriptProcessDefinition("loanBroker", StreamService.DEFAULT_INSTANCE .getResourceAsString("/org/bpmscript/integration/spring/loanbroker.js"))); engine.setInstanceListener(new LoggingInstanceListener() { @Override public void instanceCompleted(String pid, ICompletedResult result) { super.instanceCompleted(pid, result); latch.countDown(); } @Override public void instanceFailed(String pid, IFailedResult result) { super.instanceFailed(pid, result); fail(result.getThrowable().getMessage()); } }); IBenchmarkPrinter.STDOUT.print(new Benchmark().execute(total, new IBenchmarkCallback() { @SuppressWarnings("unchecked") public void execute(int count) throws Exception { GenericMessage<Object[]> message = new GenericMessage<Object[]>(new Object[] { new LoanRequest( "asdf", 1, 1000) }); message.getHeader().setAttribute("definitionName", "loanBroker"); message.getHeader().setAttribute("operation", "requestBestRate"); message.getHeader().setReturnAddress("channel-recorder"); MessageChannel channel = channelRegistry.lookupChannel("channel-bpmscript-first"); channel.send(message); } }, new IWaitForCallback() { public void call() throws Exception { latch.await(); } }, false)); SpringRecorder springRecorder = (SpringRecorder) context.getBean("springRecorder"); BlockingQueue<Object> messages = springRecorder.getMessages(); for (int i = 0; i < total; i++) { Object poll = messages.poll(1, TimeUnit.SECONDS); assertNotNull("should have got to " + total + " but got to " + i, poll); } } finally { context.destroy(); } } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ package com.facebook.redextest; import static org.fest.assertions.api.Assertions.assertThat; import com.facebook.annotations.OkToExtend; import org.junit.Test; @OkToExtend class Base { String foo() { return "Base"; } static Base getInstance() { return new Base(); } } @OkToExtend class SubOne extends Base { static Base getInstance() { return new SubOne(); } } @OkToExtend class SubTwo extends SubOne { @Override String foo() { return "SubTwo"; } static Base getInstance() { return new SubTwo(); } } class SubThree extends SubTwo { @Override String foo() { return "SubThree"; } static Base getInstance() { return new SubThree(); } } public class ResolveRefsTest { @Test public void testSimpleInvokeVirtual() { Base b = new Base(); assertThat(b.foo()).isEqualTo("Base"); SubOne s1 = new SubOne(); assertThat(s1.foo()).isEqualTo("Base"); SubTwo s2 = new SubTwo(); assertThat(s2.foo()).isEqualTo("SubTwo"); SubThree s3 = new SubThree(); assertThat(s3.foo()).isEqualTo("SubThree"); } @Test public void testFactoryBaseInvokeVirtual() { Base b = Base.getInstance(); assertThat(b.foo()).isEqualTo("Base"); b = SubOne.getInstance(); assertThat(b.foo()).isEqualTo("Base"); b = SubTwo.getInstance(); assertThat(b.foo()).isEqualTo("SubTwo"); b = SubThree.getInstance(); assertThat(b.foo()).isEqualTo("SubThree"); } @Test public void testFactoryCastInvokeVirtual() { SubOne s1 = (SubOne) SubOne.getInstance(); assertThat(s1.foo()).isEqualTo("Base"); SubTwo s2 = (SubTwo) SubTwo.getInstance(); assertThat(s2.foo()).isEqualTo("SubTwo"); SubTwo s3 = (SubThree) SubThree.getInstance(); assertThat(s3.foo()).isEqualTo("SubThree"); } }
/* * Copyright ConsenSys AG. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * SPDX-License-Identifier: Apache-2.0 */ package org.hyperledger.errorpronechecks; import static com.google.errorprone.BugPattern.SeverityLevel.WARNING; import javax.lang.model.element.Modifier; import com.google.auto.service.AutoService; import com.google.errorprone.BugPattern; import com.google.errorprone.VisitorState; import com.google.errorprone.bugpatterns.BugChecker; import com.google.errorprone.bugpatterns.BugChecker.ClassTreeMatcher; import com.google.errorprone.bugpatterns.BugChecker.MethodTreeMatcher; import com.google.errorprone.matchers.Description; import com.sun.source.tree.ClassTree; import com.sun.source.tree.MethodTree; import com.sun.source.tree.ModifiersTree; import com.sun.source.tree.VariableTree; @AutoService(BugChecker.class) @BugPattern( summary = "Method input parameters must be final.", severity = WARNING, linkType = BugPattern.LinkType.NONE) public class MethodInputParametersMustBeFinal extends BugChecker implements MethodTreeMatcher, ClassTreeMatcher { private boolean isAbstraction = false; @Override public Description matchClass(final ClassTree tree, final VisitorState state) { isAbstraction = isInterface(tree.getModifiers()) || isAnonymousClassInAbstraction(tree) || isEnumInAbstraction(tree); return Description.NO_MATCH; } @Override public Description matchMethod(final MethodTree tree, final VisitorState state) { final ModifiersTree mods = tree.getModifiers(); if (isAbstraction) { if (isConcreteMethod(mods)) { return matchParameters(tree); } } else if (isNotAbstract(mods)) { return matchParameters(tree); } return Description.NO_MATCH; } private Description matchParameters(final MethodTree tree) { for (final VariableTree inputParameter : tree.getParameters()) { if (isMissingFinalModifier(inputParameter)) { return describeMatch(tree); } } return Description.NO_MATCH; } private boolean isMissingFinalModifier(final VariableTree inputParameter) { return !inputParameter.getModifiers().getFlags().contains(Modifier.FINAL); } private boolean isNotAbstract(final ModifiersTree mods) { return !mods.getFlags().contains(Modifier.ABSTRACT); } @SuppressWarnings("TreeToString") private boolean isInterface(final ModifiersTree mods) { return mods.toString().contains("interface"); } private boolean isConcreteMethod(final ModifiersTree mods) { return mods.getFlags().contains(Modifier.DEFAULT) || mods.getFlags().contains(Modifier.STATIC); } private boolean isAnonymousClassInAbstraction(final ClassTree tree) { return isAbstraction && isAnonymousClass(tree); } private boolean isAnonymousClass(final ClassTree tree) { return tree.getSimpleName().contentEquals(""); } private boolean isEnumInAbstraction(final ClassTree tree) { return isAbstraction && isEnum(tree); } @SuppressWarnings("TreeToString") private boolean isEnum(final ClassTree tree) { return tree.toString().contains("enum"); } }
package com.cjm721.overloaded.util; import net.minecraft.block.state.IBlockState; import net.minecraft.client.Minecraft; import net.minecraft.client.renderer.BufferBuilder; import net.minecraft.client.renderer.GlStateManager; import net.minecraft.client.renderer.Tessellator; import net.minecraft.client.renderer.block.model.BakedQuad; import net.minecraft.client.renderer.block.model.IBakedModel; import net.minecraft.client.renderer.vertex.DefaultVertexFormats; import net.minecraft.item.ItemStack; import net.minecraft.util.EnumFacing; import net.minecraft.util.math.BlockPos; import net.minecraft.world.World; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; import org.lwjgl.opengl.GL11; import java.util.List; public class RenderUtil { @SideOnly(Side.CLIENT) private static void renderModel( final IBakedModel model, final IBlockState state, final World worldObj, final BlockPos blockPos, final int alpha) { final Tessellator tessellator = Tessellator.getInstance(); final BufferBuilder worldRenderer = tessellator.getBuffer(); worldRenderer.begin(GL11.GL_QUADS, DefaultVertexFormats.BLOCK); try { for (final EnumFacing enumfacing : EnumFacing.values()) { renderQuads(alpha, worldRenderer, model.getQuads(null, enumfacing, 0), state, worldObj, blockPos); } renderQuads(alpha, worldRenderer, model.getQuads(null, null, 0), state, worldObj, blockPos); tessellator.draw(); } catch (UnsupportedOperationException ignored) { } } @SideOnly(Side.CLIENT) private static void renderQuads( final int alpha, final BufferBuilder renderer, final List<BakedQuad> quads, final IBlockState state, final World worldObj, final BlockPos blockPos) { if (quads == null) { return; } for (BakedQuad bakedquad : quads) { final int color = bakedquad.getTintIndex() == -1 ? alpha | 0xffffff : getTint(alpha, bakedquad.getTintIndex(), state, worldObj, blockPos); net.minecraftforge.client.model.pipeline.LightUtil.renderQuadColor(renderer, bakedquad, color); } } @SideOnly(Side.CLIENT) private static int getTint(final int alpha, final int tintIndex, final IBlockState state, final World worldObj, final BlockPos blockPos) { return alpha | Minecraft.getMinecraft().getBlockColors().colorMultiplier(state, worldObj, blockPos, tintIndex); } @SideOnly(Side.CLIENT) public static void renderGhostModel( final ItemStack itemStack, final IBlockState state, final World worldObj, final BlockPos blockPos) { IBakedModel model = Minecraft.getMinecraft().getRenderItem().getItemModelMesher().getItemModel(itemStack); renderGhostModel(model, state, worldObj, blockPos); } @SideOnly(Side.CLIENT) public static void renderGhostModel( final IBakedModel model, final IBlockState state, final World worldObj, final BlockPos blockPos) { final int alpha = 0xaa000000; GlStateManager.bindTexture(Minecraft.getMinecraft().getTextureMapBlocks().getGlTextureId()); GlStateManager.color(1.0f, 1.0f, 1.0f, 1.0f); GlStateManager.enableBlend(); GlStateManager.enableTexture2D(); GlStateManager.blendFunc(GL11.GL_SRC_ALPHA, GL11.GL_ONE_MINUS_SRC_ALPHA); GlStateManager.colorMask(false, false, false, false); renderModel(model, state, worldObj, blockPos, alpha); GlStateManager.colorMask(true, true, true, true); GlStateManager.depthFunc(GL11.GL_LEQUAL); renderModel(model, state, worldObj, blockPos, alpha); GlStateManager.disableBlend(); } }
/* The MIT License (MIT) Copyright (c) 2019 Pierre Lindenbaum Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package com.github.lindenb.jvarkit.htslib; import java.io.Closeable; import java.io.IOException; import java.nio.file.Path; import com.github.lindenb.jvarkit.jni.CPtr; public class HtsFile extends CPtr implements Closeable { public HtsFile(final Path s,final String m) throws IOException { this(s.toString(),m); } public HtsFile(final String s,final String m) throws IOException { super(Htslib.bind_hstfile_open(s,m)); if(isNull()) throw new IOException("Cannot open "+s); } @Override public void close() { if(!isNull()) Htslib.bind_hstfile_close(this.get()); setNull(); } public boolean readLine(char delim,final KString ks) throws IOException { return Htslib.bind_hstfile_getline(get(),delim,ks.get())!=-1; } @Override public void dispose() { close(); super.dispose(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.processor.interceptor; import org.apache.camel.ContextTestSupport; import org.apache.camel.Endpoint; import org.apache.camel.FailedToCreateRouteException; import org.apache.camel.NoSuchEndpointException; import org.apache.camel.builder.AdviceWithRouteBuilder; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.reifier.RouteReifier; import org.junit.Test; /** * Advice with match multiple ids test */ public class AdviceWithReplaceFromTest extends ContextTestSupport { @Test public void testReplaceFromUri() throws Exception { RouteReifier.adviceWith(context.getRouteDefinitions().get(0), context, new AdviceWithRouteBuilder() { @Override public void configure() throws Exception { // replace the input in the route with a new endpoint uri replaceFromWith("seda:foo"); } }); getMockEndpoint("mock:result").expectedMessageCount(1); // has been replaced to a seda endpoint instead template.sendBody("seda:foo", "Hello World"); assertMockEndpointsSatisfied(); } @Test public void testReplaceFromEndpoint() throws Exception { final Endpoint endpoint = context.getEndpoint("seda:foo"); RouteReifier.adviceWith(context.getRouteDefinitions().get(0), context, new AdviceWithRouteBuilder() { @Override public void configure() throws Exception { // replace the input in the route with a new endpoint replaceFromWith(endpoint); } }); getMockEndpoint("mock:result").expectedMessageCount(1); // has been replaced to a seda endpoint instead template.sendBody("seda:foo", "Hello World"); assertMockEndpointsSatisfied(); } @Test public void testReplaceFromInvalidUri() throws Exception { try { RouteReifier.adviceWith(context.getRouteDefinitions().get(0), context, new AdviceWithRouteBuilder() { @Override public void configure() throws Exception { replaceFromWith("xxx:foo"); } }); fail("Should have thrown exception"); } catch (FailedToCreateRouteException e) { assertIsInstanceOf(NoSuchEndpointException.class, e.getCause()); } } @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() throws Exception { from("direct:start").to("mock:result"); } }; } }
package backtracking; import java.util.*; public class MISHugePruning { static int result = 0; static int bruteInvocations = 0; static BitSet[] used = new BitSet[256]; static boolean[] colored = new boolean[256]; static { for (int i = 0; i < used.length; i++) { used[i] = new BitSet(); } } static class FastPriorityQueue { int[] h; int n; public FastPriorityQueue(int initialSize) { h = new int[initialSize]; } public void add(int value) { if (n == h.length) { h = Arrays.copyOf(h, h.length * 2); } ++n; int k = n - 1; while (k > 0) { int parent = (k - 1) >> 1; int p = h[parent]; if (value >= p) { break; } h[k] = p; k = parent; } h[k] = value; } public int remove() { int res = h[0]; --n; percolateDown(0, h[n]); return res; } void percolateDown(int k, int value) { if (n == 0) { return; } int x = n >> 1; while (k < x) { int child = (k << 1) + 1; if (child < n - 1 && h[child] > h[child + 1]) { ++child; } if (value <= h[child]) { break; } h[k] = h[child]; k = child; } h[k] = value; } } public static int misUpperBound(BitSet[] g, BitSet[] rg, BitSet vertices) { FastPriorityQueue q = new FastPriorityQueue(vertices.cardinality()); for (int i = vertices.nextSetBit(0); i >= 0; i = vertices.nextSetBit(i + 1)) { used[i].clear(); colored[i] = false; q.add(i); } int colors = 0; for (int i = vertices.nextSetBit(0); i >= 0; i = vertices.nextSetBit(i + 1)) { int bestu; while (true) { bestu = (short) q.remove(); if (!colored[bestu]) break; } int c = used[bestu].nextClearBit(0); colors = Math.max(colors, c + 1); colored[bestu] = true; BitSet vvv = (BitSet) rg[bestu].clone(); vvv.and(vertices); for (int v = vvv.nextSetBit(0); v >= 0; v = vvv.nextSetBit(v + 1)) { if (!used[v].get(c)) { used[v].set(c); if (!colored[v]) q.add(v - (used[v].cardinality() << 16)); } } } return colors; } static void mis(BitSet[] g, BitSet[] rg, BitSet vertices, int cur) { ++bruteInvocations; result = Math.max(result, cur); int upperBound; upperBound = misUpperBound(g, rg, vertices); // upperBound = vertices.cardinality(); if (cur + upperBound <= result) return; int besti = -1; int bestd = -1; for (int i = vertices.nextSetBit(0); i >= 0; i = vertices.nextSetBit(i + 1)) { BitSet a = (BitSet) g[i].clone(); a.and(vertices); int deg = a.cardinality(); if (bestd < deg) { bestd = deg; besti = i; } } BitSet tmp = (BitSet) vertices.clone(); vertices.clear(besti); mis(g, rg, vertices, cur); vertices.andNot(g[besti]); mis(g, rg, vertices, cur + 1); vertices.clear(); vertices.or(tmp); } public static void main(String[] args) { int n = 200; BitSet[] g = new BitSet[n]; for (int i = 0; i < g.length; i++) { g[i] = new BitSet(n); } Random rnd = new Random(1); for (int i = 0; i < n * (n - 1) / 2 * 40 / 100; i++) { int u = rnd.nextInt(n - 1) + 1; int v = rnd.nextInt(u); g[u].set(v); g[v].set(u); } BitSet[] rg = new BitSet[n]; for (int i = 0; i < n; i++) { rg[i] = (BitSet) g[i].clone(); rg[i].flip(0, n); rg[i].clear(i); } BitSet vertices = new BitSet(n); vertices.set(0, n); long time = System.currentTimeMillis(); mis(g, rg, vertices, 0); System.out.println((System.currentTimeMillis() - time) + "ms"); System.out.println(bruteInvocations + " bruteInvocations"); System.out.println(result); } }
package org.usfirst.frc308.FRC2018308.commands; import org.usfirst.frc308.FRC2018308.Robot; import edu.wpi.first.wpilibj.command.Command; import edu.wpi.first.wpilibj.command.CommandGroup; /** * */ public class DiagonalRightAutonomousCommand extends CommandGroup { public DiagonalRightAutonomousCommand() { addSequential(new AutonomousDrive(50, true)); addSequential(new AutonomousTurnRight(45)); addSequential(new AutonomousDrive(95, true)); addSequential(new AutonomousTurnLeft(-35)); addSequential(new AutonomousRaiseArmSwitch()); addSequential(new AutonomousDrive(70, true)); addSequential(new AutonomousOpenClaw()); addSequential(new AutonomousDrive(70, false)); } @Override protected void initialize() { } @Override protected void execute() { } @Override protected boolean isFinished() { return false; } @Override protected void end() { } @Override protected void interrupted() { } }
package br.com.juno.test; import static br.com.juno.integration.api.services.AuthorizationService.AUTHORIZATION_HEADER; import static br.com.juno.integration.api.services.JunoApiManager.X_API_VERSION; import static br.com.juno.integration.api.services.JunoApiManager.X_RESOURCE_TOKEN; import static org.junit.jupiter.api.Assertions.fail; import static org.mockserver.model.HttpRequest.request; import static org.mockserver.model.HttpResponse.response; import java.util.ArrayList; import java.util.List; import java.util.stream.Stream; import com.fasterxml.jackson.core.JsonProcessingException; import org.mockserver.client.MockServerClient; import org.mockserver.model.Header; import org.mockserver.model.HttpRequest; import org.mockserver.model.MediaType; import org.mockserver.model.Parameter; import org.mockserver.model.ParameterBody; import br.com.juno.integration.api.services.JunoApiManager; import br.com.juno.integration.api.utils.JacksonUtils; import kong.unirest.HttpMethod; public final class MockServerManager { public MockServerManager(MockServerClient client) { this.mockServer = client; configureDefaultExpectations(); } private void configureDefaultExpectations() { expectAccessToken(); } private void expectAccessToken() { mockServer.when( // request() // .withMethod(HttpMethod.POST.name()) // .withPath("/oauth/token") // .withBody(ParameterBody.params(new Parameter("grant_type", "client_credentials"))) // .withContentType(MediaType.APPLICATION_FORM_URLENCODED) // .withHeader(new Header(AUTHORIZATION_HEADER, FixtureHelper.BASIC_AUTHENTICATION)) // ) // .respond( // response(getResource("oauth", "token", "POST.mock")) // .withContentType(MediaType.APPLICATION_JSON) // .withStatusCode(200)); // } public void expectCreateCharge(Object object) { mockServer.when( // getRequestExpectation() // .withMethod(HttpMethod.POST.name()) // .withPath("/charges") // .withBody(toJson(object))) // .respond( // response(getResource("charges", "POST.mock")) // .withContentType(MediaType.APPLICATION_JSON) // .withStatusCode(200)); // } public void expectCreditCardTokenization(Object object) { mockServer.when( // getRequestExpectation() // .withMethod(HttpMethod.POST.name()) // .withPath("/credit-cards/tokenization") // .withBody(toJson(object))) // .respond( // response(getResource("credit-cards", "tokenization", "POST.mock")) // .withContentType(MediaType.APPLICATION_JSON) // .withStatusCode(200)); // } // BALANCE public void expectBalance() { mockServer.when( // getRequestExpectation() // .withMethod(HttpMethod.GET.name()) // .withPath("/balance")) // .respond( // response(getResource("balance", "GET.mock")) // .withContentType(MediaType.APPLICATION_JSON) // .withStatusCode(200)); // } // DIGITAL-ACCOUNTS public void expectCreateDigitalAccount(Object object) { mockServer.when( // getRequestExpectation() // .withMethod(HttpMethod.POST.name()) // .withPath("/digital-accounts") // .withBody(toJson(object))) // .respond( // response(getResource("digital-accounts", "POST.mock")) // .withContentType(MediaType.APPLICATION_JSON) // .withStatusCode(200)); // } public void expectFindDigitalAccount() { mockServer.when( // getRequestExpectation() // .withMethod(HttpMethod.GET.name()) // .withPath("/digital-accounts")) // .respond( // response(getResource("digital-accounts", "GET.mock")) // .withContentType(MediaType.APPLICATION_JSON) // .withStatusCode(200)); // } public void expectUpdateDigitalAccount(Object object) { mockServer.when( // getRequestExpectation() // .withMethod(HttpMethod.PATCH.name()) // .withPath("/digital-accounts") // .withBody(toJson(object))) // .respond( // response(getResource("digital-accounts", "PATCH.mock")) // .withContentType(MediaType.APPLICATION_JSON) // .withStatusCode(200)); // } // PLAN public void expectCreatePlan(Object object) { mockServer.when( // getRequestExpectation() // .withMethod(HttpMethod.POST.name()) // .withPath("/plans") // .withBody(toJson(object))) // .respond( // response(getResource("plans", "POST.mock")) // .withContentType(MediaType.APPLICATION_JSON) // .withStatusCode(200)); // } public void expectListPlans() { mockServer.when( // getRequestExpectation() // .withMethod(HttpMethod.GET.name()) // .withPath("/plans")) // .respond( // response(getResource("plans", "GET.mock")) // .withContentType(MediaType.APPLICATION_JSON) // .withStatusCode(200)); // } public void expectFindPlan(Object object) { mockServer.when( // getRequestExpectation() // .withMethod(HttpMethod.GET.name()) // .withPath("/plans") // .withBody(toJson(object))) // .respond( // response(getResource("plans", "id", "POST.mock")) // .withContentType(MediaType.APPLICATION_JSON) // .withStatusCode(200)); // } public void expectActivePlan(Object object) { mockServer.when( // getRequestExpectation() // .withMethod(HttpMethod.POST.name()) // .withPath("/plans") // .withBody(toJson(object))) // .respond( // response(getResource("plans", "id", "activation", "POST.mock")) // .withContentType(MediaType.APPLICATION_JSON) // .withStatusCode(200)); // } public void expectDeactivePlan(Object object) { mockServer.when( // getRequestExpectation() // .withMethod(HttpMethod.POST.name()) // .withPath("/plans") // .withBody(toJson(object))) // .respond( // response(getResource("plans", "id", "activation", "POST.mock")) // .withContentType(MediaType.APPLICATION_JSON) // .withStatusCode(200)); // } private HttpRequest getRequestExpectationWithoutResourceToken() { return request() // .withContentType(MediaType.APPLICATION_JSON) // .withHeader(new Header(X_API_VERSION, "2")) // .withHeader(new Header(AUTHORIZATION_HEADER, FixtureHelper.DUMMY_BEARER_AUTHORIZATION)); // } private HttpRequest getRequestExpectation() { return getRequestExpectationWithoutResourceToken() // .withHeader(new Header(X_RESOURCE_TOKEN, JunoApiManager.config().getResourceToken())); // } private String toJson(Object object) { try { return JacksonUtils.getObjectMapper().writeValueAsString(object); } catch (JsonProcessingException e) { fail("Failed to convert object to json", e); } return null; } private String getResource(String... resources) { List<String> resourcesList = new ArrayList<>(); resourcesList.add("mockserver"); Stream.of(resources).forEach(resourcesList::add); return FixtureHelper.getResource(resourcesList.toArray(new String[resourcesList.size()])); } private MockServerClient mockServer; }
package symbolchat.symbolchat.widget.symbolButton; import net.minecraft.client.gui.screen.Screen; import symbolchat.symbolchat.SymbolSelectionPanel; public class OpenSymbolPanelButtonWidget extends SymbolButtonWidget { protected SymbolSelectionPanel symbolSelectionPanel; public OpenSymbolPanelButtonWidget(Screen screen, int x, int y, SymbolSelectionPanel symbolSelectionPanel) { super(screen, x, y, "\u263a"); this.symbolSelectionPanel = symbolSelectionPanel; } @Override public void onClick(double mouseX, double mouseY) { symbolSelectionPanel.visible = !symbolSelectionPanel.visible; } @Override public boolean isHovered() { return super.isHovered() || symbolSelectionPanel.visible; } }
package es.upm.miw.apaw_practice.adapters.rest.music; import es.upm.miw.apaw_practice.adapters.rest.LexicalAnalyzer; import es.upm.miw.apaw_practice.domain.models.music.Music; import es.upm.miw.apaw_practice.domain.services.music.MusicService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.*; import java.util.stream.Stream; @RestController @RequestMapping(MusicResource.MUSICS) public class MusicResource { static final String MUSICS = "/music/musics"; static final String SEARCH = "/search"; static final String ID_ID = "{id}"; static final String NAME = "/name"; private final MusicService musicService; @Autowired public MusicResource(MusicService musicService) { this.musicService = musicService; } @PutMapping(ID_ID + NAME) public Music updateName(@PathVariable String id, @RequestBody NameDto nameDto) { return this.musicService.updateName(id, nameDto.getName()); } @GetMapping(SEARCH) public Stream<Music> findMusicBySingerName(@RequestParam String q){ String name = new LexicalAnalyzer().extractWithAssure(q,"name"); return this.musicService.findMusicBySingerName(name); } }
package ru.betterend.util.sdf.operator; public class SDFScale extends SDFUnary { private float scale; public SDFScale setScale(float scale) { this.scale = scale; return this; } @Override public float getDistance(float x, float y, float z) { return source.getDistance(x / scale, y / scale, z / scale) * scale; } }
/* * Copyright (c) 2002-2008 LWJGL Project * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of 'LWJGL' nor the names of * its contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.lwjgl.util.generator; /** * * This annotation implies that a method have its postfix stripped * according to a specified Buffer parameter. * * @author elias_naur <elias_naur@users.sourceforge.net> * @version $Revision$ * $Id$ */ import java.lang.annotation.Target; import java.lang.annotation.ElementType; @Target(ElementType.METHOD) public @interface StripPostfix { String value(); // The parameter to deduce the postfix from String extension() default "NULL"; boolean hasPostfix() default true; String postfix() default "NULL"; }
/* * MIT License * * Copyright (c) 2020 manager * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package pt.unl.fct.miei.usmanagement.manager.services.rulesystem.decision; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang.builder.ToStringBuilder; import org.springframework.dao.DataIntegrityViolationException; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import pt.unl.fct.miei.usmanagement.manager.componenttypes.ComponentTypeEnum; import pt.unl.fct.miei.usmanagement.manager.exceptions.EntityNotFoundException; import pt.unl.fct.miei.usmanagement.manager.hosts.HostAddress; import pt.unl.fct.miei.usmanagement.manager.monitoring.HostEvent; import pt.unl.fct.miei.usmanagement.manager.monitoring.ServiceEvent; import pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.Decision; import pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.Decisions; import pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.HostDecision; import pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.HostDecisionValue; import pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.HostDecisionValues; import pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.HostDecisions; import pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.ServiceDecision; import pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.ServiceDecisionValue; import pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.ServiceDecisionValues; import pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.ServiceDecisions; import pt.unl.fct.miei.usmanagement.manager.rulesystem.rules.HostRule; import pt.unl.fct.miei.usmanagement.manager.rulesystem.rules.RuleDecisionEnum; import pt.unl.fct.miei.usmanagement.manager.rulesystem.rules.ServiceRule; import pt.unl.fct.miei.usmanagement.manager.services.communication.kafka.KafkaService; import pt.unl.fct.miei.usmanagement.manager.services.fields.FieldsService; import pt.unl.fct.miei.usmanagement.manager.services.rulesystem.rules.HostRulesService; import pt.unl.fct.miei.usmanagement.manager.services.rulesystem.rules.ServiceRulesService; import pt.unl.fct.miei.usmanagement.manager.util.EntityUtils; import java.sql.Timestamp; import java.time.Instant; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; @Slf4j @Service public class DecisionsService { private final ServiceRulesService serviceRulesService; private final HostRulesService hostRulesService; private final Decisions decisions; private final ServiceDecisions serviceDecisions; private final HostDecisions hostDecisions; private final ServiceDecisionValues serviceDecisionValues; private final HostDecisionValues hostDecisionValues; private final FieldsService fieldsService; private final KafkaService kafkaService; public DecisionsService(ServiceRulesService serviceRulesService, HostRulesService hostRulesService, Decisions decisions, ServiceDecisions serviceDecisions, HostDecisions hostDecisions, ServiceDecisionValues serviceDecisionValues, HostDecisionValues hostDecisionValues, FieldsService fieldsService, KafkaService kafkaService) { this.serviceRulesService = serviceRulesService; this.hostRulesService = hostRulesService; this.decisions = decisions; this.serviceDecisions = serviceDecisions; this.hostDecisions = hostDecisions; this.serviceDecisionValues = serviceDecisionValues; this.hostDecisionValues = hostDecisionValues; this.fieldsService = fieldsService; this.kafkaService = kafkaService; } @Transactional(readOnly = true) public List<pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.Decision> getDecisions() { return decisions.findAll(); } public pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.Decision getDecision(String decisionName) { RuleDecisionEnum decision = RuleDecisionEnum.valueOf(decisionName.toUpperCase()); return getDecision(decision); } public pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.Decision getDecision(RuleDecisionEnum decision) { return decisions.findByRuleDecision(decision).orElseThrow(() -> new EntityNotFoundException(pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.Decision.class, "decision", decision.name())); } public pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.Decision getDecision(Long id) { return decisions.findById(id).orElseThrow(() -> new EntityNotFoundException(pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.Decision.class, "id", id.toString())); } public pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.Decision addDecision(pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.Decision decision) { checkDecisionDoesntExist(decision); decision = saveDecision(decision); kafkaService.sendDecision(decision); return decision; } public Decision addIfNotPresent(Decision decision) { Optional<Decision> decisionOptional = decisions.findById(decision.getId()); return decisionOptional.orElseGet(() -> { decision.clearAssociations(); return saveDecision(decision); }); } public Decision addOrUpdateDecision(Decision decision) { if (decision.getId() != null) { Optional<Decision> decisionOptional = decisions.findById(decision.getId()); if (decisionOptional.isPresent()) { Decision existingDecision = decisionOptional.get(); Set<ServiceEvent> serviceEvents = decision.getServiceEvents(); if (serviceEvents != null) { Set<pt.unl.fct.miei.usmanagement.manager.monitoring.ServiceEvent> currentServiceEvents = existingDecision.getServiceEvents(); if (currentServiceEvents == null) { existingDecision.setServiceEvents(new HashSet<>(serviceEvents)); } else { currentServiceEvents.retainAll(serviceEvents); currentServiceEvents.addAll(serviceEvents); } } Set<HostEvent> hostEvents = decision.getHostEvents(); if (hostEvents != null) { Set<HostEvent> currentHostEvents = existingDecision.getHostEvents(); if (currentHostEvents == null) { existingDecision.setHostEvents(new HashSet<>(hostEvents)); } else { currentHostEvents.retainAll(hostEvents); currentHostEvents.addAll(hostEvents); } } EntityUtils.copyValidProperties(decision, existingDecision); return saveDecision(existingDecision); } } return saveDecision(decision); } public pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.Decision saveDecision(Decision decision) { log.info("Saving decision {}", ToStringBuilder.reflectionToString(decision)); return decisions.save(decision); } public void deleteDecision(Long id) { decisions.deleteById(id); } public List<pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.Decision> getServicesPossibleDecisions() { return decisions.findByComponentTypeType(ComponentTypeEnum.SERVICE); } public List<pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.Decision> getHostsPossibleDecisions() { return decisions.findByComponentTypeType(ComponentTypeEnum.HOST); } public pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.Decision getServicePossibleDecision(String decisionName) { RuleDecisionEnum decision = RuleDecisionEnum.valueOf(decisionName.toUpperCase()); return getServicePossibleDecision(decision); } public pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.Decision getServicePossibleDecision(RuleDecisionEnum decision) { return decisions.findByRuleDecisionAndComponentTypeType(decision, ComponentTypeEnum.SERVICE).orElseThrow(() -> new EntityNotFoundException(pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.Decision.class, "decisionName", decision.name())); } public pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.Decision getContainerPossibleDecision(String decisionName) { RuleDecisionEnum decision = RuleDecisionEnum.valueOf(decisionName.toUpperCase()); return decisions.findByRuleDecisionAndComponentTypeType(decision, ComponentTypeEnum.CONTAINER).orElseThrow(() -> new EntityNotFoundException(pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.Decision.class, "decisionName", decisionName)); } public pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.Decision getHostPossibleDecision(String decisionName) { RuleDecisionEnum decision = RuleDecisionEnum.valueOf(decisionName.toUpperCase()); return getHostPossibleDecision(decision); } public pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.Decision getHostPossibleDecision(RuleDecisionEnum decision) { return decisions.findByRuleDecisionAndComponentTypeType(decision, ComponentTypeEnum.HOST).orElseThrow(() -> new EntityNotFoundException(pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.Decision.class, "decisionName", decision.name())); } public ServiceDecision addServiceDecision(String containerId, String serviceName, String decisionName, long ruleId, String result) { ServiceRule rule = serviceRulesService.getRule(ruleId); Decision decision = getServicePossibleDecision(decisionName); Timestamp timestamp = Timestamp.from(Instant.now()); ServiceDecision serviceDecision = ServiceDecision.builder() .containerId(containerId) .serviceName(serviceName) .result(result) .ruleName(rule.getName()) .decision(decision) .timestamp(timestamp).build(); return saveServiceDecision(serviceDecision); } public ServiceDecision saveServiceDecision(ServiceDecision serviceDecision) { log.info("Saving service decision: {}", ToStringBuilder.reflectionToString(serviceDecision)); return serviceDecisions.save(serviceDecision); } public HostDecision addHostDecision(HostAddress hostAddress, String decisionName, long ruleId) { HostRule rule = hostRulesService.getRule(ruleId); pt.unl.fct.miei.usmanagement.manager.rulesystem.decision.Decision decision = getHostPossibleDecision(decisionName); HostDecision hostDecision = HostDecision.builder().publicIpAddress(hostAddress.getPublicIpAddress()) .privateIpAddress(hostAddress.getPrivateIpAddress()).ruleName(rule.getName()).decision(decision).build(); return saveHostDecision(hostDecision); } public HostDecision saveHostDecision(HostDecision hostDecision) { log.info("Saving host decision: {}", ToStringBuilder.reflectionToString(hostDecision)); return hostDecisions.save(hostDecision); } public void addServiceDecisionValueFromFields(ServiceDecision serviceDecision, Map<String, Double> fields) { serviceDecisionValues.saveAll( fields.entrySet().stream() .filter(field -> field.getKey().contains("effective-val")) .map(field -> ServiceDecisionValue.builder() .serviceDecision(serviceDecision) .field(fieldsService.getField(field.getKey().split("-effective-val")[0])) .value(field.getValue()) .build()) .collect(Collectors.toList()) ); } public void addHostDecisionValueFromFields(HostDecision hostDecision, Map<String, Double> fields) { hostDecisionValues.saveAll( fields.entrySet().stream() .filter(field -> field.getKey().contains("effective-val")) .map(field -> HostDecisionValue.builder() .hostDecision(hostDecision) .field(fieldsService.getField(field.getKey().split("-effective-val")[0])) .value(field.getValue()) .build()) .collect(Collectors.toList()) ); } public List<ServiceDecision> getServiceDecisions() { return serviceDecisions.findAll(); } public List<ServiceDecision> getServiceDecisions(String serviceName) { return serviceDecisions.findByServiceName(serviceName); } public List<ServiceDecision> getContainerDecisions(String containerId) { return serviceDecisions.findByContainerIdStartingWith(containerId); } public List<HostDecision> getHostDecisions() { return hostDecisions.findAll(); } public List<HostDecision> getHostDecisions(HostAddress hostAddress) { return hostDecisions.findByPublicIpAddressAndPrivateIpAddress(hostAddress.getPublicIpAddress(), hostAddress.getPrivateIpAddress()); } private void checkDecisionDoesntExist(Decision decision) { Long id = decision.getId(); if (decisions.hasDecision(id)) { throw new DataIntegrityViolationException("Decision '" + id + "' already exists"); } } }
package ru.job4j.pool; import ru.job4j.simpleblockingqueue.SimpleBlockingQueue; public class WorkerThread extends Thread { private SimpleBlockingQueue<Runnable> taskQueue; private boolean isStopped = false; public WorkerThread(SimpleBlockingQueue<Runnable> taskQueue) { this.taskQueue = taskQueue; } @Override public void run() { while (!isStopped()) { Runnable runnable; try { while ((runnable = taskQueue.poll()) != null) { runnable.run(); } Thread.sleep(1); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } } private synchronized boolean isStopped() { return isStopped; } public synchronized void doStop() { isStopped = true; this.interrupt(); } }
package cn.shishuihao.thirdparty.api.pay.weixin; import cn.shishuihao.thirdparty.api.core.properties.AbstractApiProperties; import cn.shishuihao.thirdparty.api.pay.PayApiProperties; import cn.shishuihao.thirdparty.api.pay.weixin.sdk.domain.SignType; import java.util.Objects; /** * @author shishuihao * @version 1.0.0 */ public class WxPayApiProperties extends AbstractApiProperties implements PayApiProperties { /** * 公众账号ID wx8888888888888888 微信支付分配的公众账号ID(企业号corpid即为此appId) */ private String appId; /** * 商户号 1900000109 微信支付分配的商户号 */ private String mchId; /** * 子商户应用ID wx8888888888888888 特约商户在微信开放平台上申请的APPID */ private String subAppId; /** * 子商户号 1900000109 特约商户的商户号 */ private String subMchId; /** * 设备号 013467007045764 终端设备号(商户自定义,如门店编号) */ private String deviceInfo; /** * 签名类型 HMAC-SHA256 * 签名类型,目前支持HMAC-SHA256和MD5,默认为MD5 */ private SignType signType = SignType.MD5; /** * 商户密钥 */ private String key; public WxPayApiProperties() { this.setChannelId(WxPayApiChannel.CHANNEL_ID); } @Override public String id() { return appId; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } if (!super.equals(o)) { return false; } WxPayApiProperties that = (WxPayApiProperties) o; return Objects.equals(appId, that.appId) && Objects.equals(mchId, that.mchId); } @Override public int hashCode() { return Objects.hash(super.hashCode(), appId, mchId); } public String getAppId() { return appId; } public void setAppId(String appId) { this.appId = appId; } public String getMchId() { return mchId; } public void setMchId(String mchId) { this.mchId = mchId; } public String getSubAppId() { return subAppId; } public void setSubAppId(String subAppId) { this.subAppId = subAppId; } public String getSubMchId() { return subMchId; } public void setSubMchId(String subMchId) { this.subMchId = subMchId; } public String getDeviceInfo() { return deviceInfo; } public void setDeviceInfo(String deviceInfo) { this.deviceInfo = deviceInfo; } public SignType getSignType() { return signType; } public void setSignType(SignType signType) { this.signType = signType; } public String getKey() { return key; } public void setKey(String key) { this.key = key; } }
package org.projectlombok.security.totpexample.servlets; import javax.servlet.http.HttpServlet; public class FileServlet extends HttpServlet { }
/* * Copyright (c) 2008-2016, GigaSpaces Technologies, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openspaces.archive; import org.openspaces.core.GigaSpace; import org.openspaces.events.DynamicEventTemplateProvider; import org.openspaces.events.adapter.AnnotationDynamicEventTemplateProviderAdapter; import org.openspaces.events.adapter.MethodDynamicEventTemplateProviderAdapter; import org.springframework.transaction.PlatformTransactionManager; /** * A configuration for {@link org.openspaces.archive.ArchivePollingContainer} using fluent API. * * <p>Sample usage of static template: * <pre> * UrlSpaceConfigurer urlSpaceConfigurerPrimary = new UrlSpaceConfigurer("/./space"); * GigaSpace gigaSpace = new GigaSpaceConfigurer(urlSpaceConfigurerPrimary.space()).gigaSpace(); * ArchivePollingEventListenerContainer archiveContainer = new ArchiveContainerConfigurer(gigaSpace) * .template(new TestMessage()) * .archiveHandler(new CasandraArchiveOperationHandler()) * .create(); * * ... * * archiveContainer.destroy(); * urlSpaceConfigurerPrimary.destroy(); * </pre> * * @author Itai Frenkel * @since 9.1.1 */ public class ArchivePollingContainerConfigurer { private ArchivePollingContainer archiveContainer; private boolean initialized = false; public ArchivePollingContainerConfigurer(GigaSpace gigaSpace) { archiveContainer = new ArchivePollingContainer(); archiveContainer.setGigaSpace(gigaSpace); } /** * @see org.openspaces.archive.ArchivePollingContainer#setBeanName(String) */ public ArchivePollingContainerConfigurer name(String name) { archiveContainer.setBeanName(name); return this; } /** * @see org.openspaces.archive.ArchivePollingContainer#setConcurrentConsumers(int) */ public ArchivePollingContainerConfigurer concurrentConsumers(int concurrentConsumers) { archiveContainer.setConcurrentConsumers(concurrentConsumers); return this; } /** * @see org.openspaces.archive.ArchivePollingContainer#setTemplate(Object) */ public ArchivePollingContainerConfigurer template(Object template) { archiveContainer.setTemplate(template); return this; } /** * @see org.openspaces.archive.ArchivePollingContainer#setMaxConcurrentConsumers(int) */ public ArchivePollingContainerConfigurer maxConcurrentConsumers(int maxConcurrentConsumers) { archiveContainer.setMaxConcurrentConsumers(maxConcurrentConsumers); return this; } /** * @see org.openspaces.archive.ArchivePollingContainer#setReceiveTimeout(long) */ public ArchivePollingContainerConfigurer receiveTimeout(long receiveTimeout) { archiveContainer.setReceiveTimeout(receiveTimeout); return this; } /** * @see org.openspaces.archive.ArchivePollingContainer#setPerformSnapshot(boolean) */ public ArchivePollingContainerConfigurer performSnapshot(boolean performSnapshot) { archiveContainer.setPerformSnapshot(performSnapshot); return this; } /** * @see org.openspaces.archive.ArchivePollingContainer#setRecoveryInterval(long) */ public ArchivePollingContainerConfigurer recoveryInterval(long recoveryInterval) { archiveContainer.setRecoveryInterval(recoveryInterval); return this; } /** * @see org.openspaces.archive.ArchivePollingContainer#setAutoStart(boolean) */ public ArchivePollingContainerConfigurer autoStart(boolean autoStart) { archiveContainer.setAutoStart(autoStart); return this; } /** * @see org.openspaces.archive.ArchivePollingContainer#setArchiveHandler(ArchiveOperationHandler) */ public ArchivePollingContainerConfigurer archiveHandler(ArchiveOperationHandler archiveHandler) { archiveContainer.setArchiveHandler(archiveHandler); return this; } /** * @see org.openspaces.archive.ArchivePollingContainer#setTransactionManager(org.springframework.transaction.PlatformTransactionManager) */ public ArchivePollingContainerConfigurer transactionManager(PlatformTransactionManager transactionManager) { archiveContainer.setTransactionManager(transactionManager); return this; } /** * @see org.openspaces.archive.ArchivePollingContainer#setTransactionName(String) */ public ArchivePollingContainerConfigurer transactionName(String transactionName) { archiveContainer.setTransactionName(transactionName); return this; } /** * @see org.openspaces.archive.ArchivePollingContainer#setTransactionTimeout(int) */ public ArchivePollingContainerConfigurer transactionTimeout(int transactionTimeout) { archiveContainer.setTransactionTimeout(transactionTimeout); return this; } /** * @see org.openspaces.archive.ArchivePollingContainer#setTransactionIsolationLevel(int) */ public ArchivePollingContainerConfigurer transactionIsolationLevel(int transactionIsolationLevel) { archiveContainer.setTransactionIsolationLevel(transactionIsolationLevel); return this; } /** * @see org.openspaces.events.polling.SimplePollingEventListenerContainer#setDynamicTemplate(Object) */ public ArchivePollingContainerConfigurer dynamicTemplate(DynamicEventTemplateProvider templateProvider) { archiveContainer.setDynamicTemplate(templateProvider); return this; } /** * @see org.openspaces.events.adapter.MethodDynamicEventTemplateProviderAdapter */ public ArchivePollingContainerConfigurer dynamicTemplateMethod(Object templateProvider, String methodName) { MethodDynamicEventTemplateProviderAdapter adapter = new MethodDynamicEventTemplateProviderAdapter(); adapter.setDelegate(templateProvider); adapter.setMethodName(methodName); adapter.afterPropertiesSet(); return dynamicTemplate(adapter); } /** * @see org.openspaces.events.adapter.AnnotationDynamicEventTemplateProviderAdapter */ public ArchivePollingContainerConfigurer dynamicTemplateAnnotation(Object templateProvider) { AnnotationDynamicEventTemplateProviderAdapter adapter = new AnnotationDynamicEventTemplateProviderAdapter(); adapter.setDelegate(templateProvider); adapter.afterPropertiesSet(); return dynamicTemplate(adapter); } public ArchivePollingContainer create() { if (!initialized) { archiveContainer.setRegisterSpaceModeListener(true); archiveContainer.afterPropertiesSet(); initialized = true; } return archiveContainer; } /** * @see ArchivePollingContainer#setBatchSize(int) */ public ArchivePollingContainerConfigurer batchSize(int batchSize) { archiveContainer.setBatchSize(batchSize); return this; } /** * @see ArchivePollingContainer#setNonBlockingSleep(long) */ public ArchivePollingContainerConfigurer nonBlockingSleep(long nonBlockingSleepMilliseconds) { archiveContainer.setNonBlockingSleep(nonBlockingSleepMilliseconds); return this; } /** * @see ArchivePollingContainer#setUseFifoGrouping(boolean) */ public ArchivePollingContainerConfigurer useFifoGrouping(boolean useFifoGrouping) { archiveContainer.setUseFifoGrouping(useFifoGrouping); return this; } }
package com.bx.erp.coupon; import com.bx.erp.model.Commodity; import com.bx.erp.model.Coupon; import com.bx.erp.model.CouponScope; import com.bx.erp.model.RetailTrade; import com.bx.erp.utils.GeneralUtil; import java.util.List; public class CouponCalculator { /** * 计算零售单使用优惠券后的应付款,并设置rtSelling中优惠掉的金额 */ public static double calculateAmountUsingCoupon(Coupon couponSelected, double amountBeforeUsingCoupon, List<Commodity> commListToSell, RetailTrade rtSelling) { double amountToPay = 0.00000000d; if (couponSelected.getType() == Coupon.EnumCouponCardType.ECCT_CASH.getIndex()) { amountToPay = GeneralUtil.sub(amountBeforeUsingCoupon, couponSelected.getReduceAmount()); rtSelling.setCouponAmount(couponSelected.getReduceAmount()); } else { // 对于折扣券.如果有指定商品范围。那么仅是对范围内的商品总价进行打折 if (couponSelected.getListSlave1() != null && couponSelected.getListSlave1().size() > 0) { double scopeCommodityTotal = 0.000000d; List<CouponScope> couponScopes = (List<CouponScope>) couponSelected.getListSlave1(); for (int j = 0; j < commListToSell.size(); j++) { Commodity commodity = commListToSell.get(j); for (int k = 0; k < couponScopes.size(); k++) { CouponScope couponScope = couponScopes.get(k); if (commodity.getID() == couponScope.getCommodityID()) { scopeCommodityTotal = GeneralUtil.sum(scopeCommodityTotal, commodity.getSubtotal()); } } } double commodityTotal = GeneralUtil.sub(amountBeforeUsingCoupon, scopeCommodityTotal); //未参与优惠的商品总价 amountToPay = GeneralUtil.sum(commodityTotal, GeneralUtil.mul(scopeCommodityTotal, couponSelected.getDiscount())); } else { amountToPay = GeneralUtil.mul(amountBeforeUsingCoupon, couponSelected.getDiscount()); } rtSelling.setCouponAmount(GeneralUtil.sub(amountBeforeUsingCoupon, amountToPay)); } return amountToPay; } }
package littleaj.simpoll.api.repositories.inmem; import littleaj.simpoll.api.exceptions.PollNotFoundException; import littleaj.simpoll.api.repositories.PollRepository; import littleaj.simpoll.model.Poll; import littleaj.simpoll.model.PollId; import littleaj.simpoll.model.PollResults; import littleaj.simpoll.model.Result; import littleaj.simpoll.model.Status; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; public class InMemoryPollRepository implements PollRepository { private Map<PollId, Poll> pollsStore; private Map<PollId, PollResults> resultsStore; public InMemoryPollRepository() { this(new HashMap<>(), new HashMap<>()); } public InMemoryPollRepository(Map<PollId, Poll> pollsStore, Map<PollId, PollResults> resultsStore) { this.pollsStore = pollsStore; this.resultsStore = resultsStore; } InMemoryPollRepository(Collection<Poll> polls, Collection<PollResults> results, Collection<Status> statuses) { this( polls.stream().collect(Collectors.toMap(Poll::getId, Function.identity())), results.stream().collect(Collectors.toMap(PollResults::getPollId, Function.identity()))); } @Override public Collection<Poll> getAllPolls() { return new ArrayList<>(pollsStore.values()); } @Override public boolean hasPollId(PollId id) { return pollsStore.containsKey(id); } @Override public void storePoll(Poll poll) { pollsStore.put(poll.getId(), poll); } @Override public Poll loadPoll(PollId id) { return pollsStore.get(id); } /** * @throws PollNotFoundException */ @Override public void updateStatus(PollId id, Status status) { Poll poll = pollsStore.get(id); if (poll == null) { throw new PollNotFoundException(); } poll.setStatus(status); pollsStore.put(poll.getId(), poll); } /** * @throws PollNotFoundException */ @Override public PollResults getPollResults(PollId id) { if (!pollsStore.containsKey(id)) { throw new PollNotFoundException(); } return resultsStore.get(id); } @Override public void incrementResult(PollId pollId, String answer) { if (!pollsStore.containsKey(pollId)) { throw new PollNotFoundException(); } PollResults results = resultsStore.get(pollId); if (results == null) { final Poll poll = pollsStore.get(pollId); results = new PollResults(pollId); for (String a : poll.getAnswers()) { Result r = new Result(); r.setAnswer(a); results.putResult(r); } } final Result result = results.getResult(answer); result.setVoteCount(result.getVoteCount() + 1); results.putResult(result); resultsStore.put(pollId, results); } /** * @throws PollNotFoundException */ @Override public void deletePoll(PollId id) { if (!pollsStore.containsKey(id)) { throw new PollNotFoundException(); } resultsStore.remove(id); pollsStore.remove(id); } }
package com.github.saulis.enumerables; import java.util.Iterator; import java.util.NoSuchElementException; public class EmptyIterator<T> implements Iterator<T> { @Override public boolean hasNext() { return false; } @Override public T next() { throw new NoSuchElementException(); } }
package net.twasi.obsremotejava.requests.SaveReplayBuffer; import net.twasi.obsremotejava.OBSCommunicator; import net.twasi.obsremotejava.requests.BaseRequest; import net.twasi.obsremotejava.requests.RequestType; public class SaveReplayBufferRequest extends BaseRequest { public SaveReplayBufferRequest(OBSCommunicator com) { super(RequestType.SaveReplayBuffer); com.messageTypes.put(getMessageId(), SaveReplayBufferResponse.class); } }
package com.pajonc.coding.repo.pricing.model; import java.util.Objects; public class Product { private String item; private Price price; private SpecialPrice specialPrice; public Product(String item) { this.item = item; } public Product buildPrice(Price price) { this.price = price; return this; } public Product buildSpecialPrice(SpecialPrice specialPrice) { this.specialPrice = specialPrice; return this; } public String getItem() { return item; } public Price getPrice() { return price; } public SpecialPrice getSpecialPrice() { return specialPrice; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Product product = (Product) o; return Objects.equals(item, product.item); } @Override public int hashCode() { return Objects.hash(item); } }
/* * Copyright 2016 (c) MuleSoft, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package org.raml.parser.builder; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; import java.util.Map; import org.junit.Test; import org.raml.model.ParamType; import org.raml.model.Raml; import org.raml.model.Resource; public class UriParametersTestCase extends AbstractRamlTestCase { public static final String RAML = "org/raml/params/uri-parameters.yaml"; @Test public void validate() { validateRamlNoErrors(RAML); } @Test public void namelessResources() { Raml raml = parseRaml(RAML); Map<String, Resource> namelessChildren = raml.getResource("/nameless-children").getResources(); assertThat(namelessChildren.get("/").getRelativeUri(), is("/")); assertThat(namelessChildren.get("/").getUri(), is("/nameless-children/")); assertThat(namelessChildren.get("/").getResources().get("/").getRelativeUri(), is("/")); assertThat(namelessChildren.get("/").getResources().get("/").getUri(), is("/nameless-children//")); assertThat(raml.getResource("/nameless-children//").getRelativeUri(), is("/")); } @Test public void namelessRootResources() { Raml raml = parseRaml(RAML); assertThat(raml.getResource("/").getRelativeUri(), is("/")); assertThat(raml.getResource("//named").getRelativeUri(), is("/named")); assertThat(raml.getResource("//named/").getRelativeUri(), is("/")); assertThat(raml.getResource("//named//").getRelativeUri(), is("/")); assertThat(raml.getResource("//named///named").getRelativeUri(), is("/named")); } @Test public void resourceLikeBaseUriPath() { Raml raml = parseRaml(RAML); Resource resource = raml.getResource("/apis"); assertThat(resource.getRelativeUri(), is("/apis")); } @Test public void parentUriTemplate() { Raml raml = parseRaml(RAML); Resource apiId = raml.getResource("/apis/{apiId}"); assertThat(apiId.getUriParameters().size(), is(1)); assertThat(apiId.getUriParameters().get("apiId").getType(), is(ParamType.STRING)); Resource childId = raml.getResource("/apis/{apiId}/{childId}"); assertThat(childId.getUriParameters().size(), is(1)); assertThat(childId.getResolvedUriParameters().size(), is(2)); assertThat(childId.getResolvedUriParameters().get("apiId").getType(), is(ParamType.STRING)); assertThat(childId.getResolvedUriParameters().get("childId").getType(), is(ParamType.STRING)); } }
/* * Licensed to the University of California, Berkeley under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package tachyon.client.file.options; import tachyon.annotation.PublicApi; import tachyon.conf.TachyonConf; /** * Method option for mounting a path. */ @PublicApi public final class MountOptions { /** * Builder for {@link MountOptions}. */ public static class Builder implements OptionsBuilder<MountOptions> { /** * Creates a new builder for {@link MountOptions}. */ public Builder() {} /** * Creates a new builder for {@link MountOptions}. * * @param conf a Tachyon configuration */ public Builder(TachyonConf conf) {} /** * Builds a new instance of {@link MountOptions}. * * @return a {@link MountOptions} instance */ @Override public MountOptions build() { return new MountOptions(this); } } /** * @return the default {@link MountOptions} */ public static MountOptions defaults() { return new Builder().build(); } private MountOptions(MountOptions.Builder builder) {} }
/* * Copyright 2018 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.casemgmt.impl.audit; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.ServiceLoader; import org.jbpm.casemgmt.api.audit.CaseFileData; import org.jbpm.casemgmt.api.audit.CaseVariableIndexer; import org.jbpm.casemgmt.api.event.CaseEvent; /** * Represents logic behind mechanism to index task variables. * Supports custom indexers to be loaded dynamically via JDK ServiceLoader * * Adds default indexer (org.jbpm.casemgmt.impl.audit.variable.StringTaskVariableIndexer) as the last indexer * as it accepts all types * */ public class CaseIndexerManager { private static ServiceLoader<CaseVariableIndexer> caseVariableIndexers = ServiceLoader.load(CaseVariableIndexer.class); private static CaseIndexerManager INSTANCE; private List<CaseVariableIndexer> indexers = new ArrayList<>(); private CaseIndexerManager() { for (CaseVariableIndexer indexer : caseVariableIndexers) { indexers.add(indexer); } // always add at the end the default one indexers.add(new StringCaseVariableIndexer()); } public List<CaseFileData> index(CaseEvent caseEvent, String variableName, Object variable) { for (CaseVariableIndexer indexer : indexers) { if (indexer.accept(variable)) { List<CaseFileData> indexed = indexer.index(variableName, variable); if (indexed != null) { List<CaseFileData> dataItems = new ArrayList<>(); for (CaseFileData caseVariable : indexed) { CaseFileDataLog caseFileDataLog = new CaseFileDataLog(caseEvent.getCaseId(), caseEvent.getCaseFile().getDefinitionId(), caseVariable.getItemName()); caseFileDataLog.setItemType(caseVariable.getItemType()); caseFileDataLog.setItemValue(caseVariable.getItemValue()); caseFileDataLog.setLastModified(new Date()); caseFileDataLog.setLastModifiedBy(caseEvent.getUser()); dataItems.add(caseFileDataLog); } return dataItems; } } } return null; } public static CaseIndexerManager get() { if (INSTANCE == null) { INSTANCE = new CaseIndexerManager(); } return INSTANCE; } }
package org.sut.cashmachine.dao.receipt; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.stereotype.Repository; import org.sut.cashmachine.model.order.ReceiptEntryModel; @Repository public interface ReceiptEntryRepository extends JpaRepository<ReceiptEntryModel, Long> { ReceiptEntryModel findByProductIdAndReceiptId(long productId, long receiptId); }
package com.anggastudio.printama; import android.bluetooth.BluetoothDevice; import android.os.Bundle; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.core.content.ContextCompat; import androidx.fragment.app.DialogFragment; import androidx.recyclerview.widget.LinearLayoutManager; import androidx.recyclerview.widget.RecyclerView; import java.util.ArrayList; import java.util.Set; public class DeviceListFragment extends DialogFragment { private Printama.OnConnectPrinter onConnectPrinter; private Set<BluetoothDevice> bondedDevices; private String mPrinterName; private Button saveButton; public DeviceListFragment() { // Required empty public constructor } public static DeviceListFragment newInstance() { DeviceListFragment fragment = new DeviceListFragment(); Bundle args = new Bundle(); fragment.setArguments(args); return fragment; } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { return inflater.inflate(R.layout.fragment_device_list, container, false); } public void setOnConnectPrinter(Printama.OnConnectPrinter onConnectPrinter) { this.onConnectPrinter = onConnectPrinter; } public void setDeviceList(Set<BluetoothDevice> bondedDevices) { this.bondedDevices = bondedDevices; } @Override public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); saveButton = view.findViewById(R.id.btn_save_printer); saveButton.setOnClickListener(v -> savePrinter()); mPrinterName = Pref.getString(Pref.SAVED_DEVICE); toggleSaveButton(); RecyclerView rvDeviceList = view.findViewById(R.id.rv_device_list); rvDeviceList.setLayoutManager(new LinearLayoutManager(getContext())); ArrayList<BluetoothDevice> bluetoothDevices = new ArrayList<>(bondedDevices); DeviceListAdapter adapter = new DeviceListAdapter(bluetoothDevices, mPrinterName); rvDeviceList.setAdapter(adapter); adapter.setOnConnectPrinter(printerName -> { this.mPrinterName = printerName; toggleSaveButton(); }); } private void toggleSaveButton() { if (mPrinterName != null) { saveButton.setBackgroundColor(ContextCompat.getColor(getContext(), R.color.colorGreen)); } else { saveButton.setBackgroundColor(ContextCompat.getColor(getContext(), R.color.colorGray5)); } } private void savePrinter() { Pref.setString(Pref.SAVED_DEVICE, mPrinterName); if (onConnectPrinter != null) { onConnectPrinter.onConnectPrinter(mPrinterName); } dismiss(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.syncope.wa.starter.mapping; import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.syncope.common.lib.to.OIDCRPClientAppTO; import org.apache.syncope.common.lib.types.OIDCGrantType; import org.apache.syncope.common.lib.types.OIDCResponseType; import org.apache.syncope.common.lib.wa.WAClientApp; import org.apereo.cas.configuration.CasConfigurationProperties; import org.apereo.cas.oidc.claims.OidcAddressScopeAttributeReleasePolicy; import org.apereo.cas.oidc.claims.OidcCustomScopeAttributeReleasePolicy; import org.apereo.cas.oidc.claims.OidcEmailScopeAttributeReleasePolicy; import org.apereo.cas.oidc.claims.OidcPhoneScopeAttributeReleasePolicy; import org.apereo.cas.oidc.claims.OidcProfileScopeAttributeReleasePolicy; import org.apereo.cas.services.ChainingAttributeReleasePolicy; import org.apereo.cas.services.OidcRegisteredService; import org.apereo.cas.services.RegisteredService; import org.apereo.cas.services.RegisteredServiceAccessStrategy; import org.apereo.cas.services.RegisteredServiceAttributeReleasePolicy; import org.apereo.cas.services.RegisteredServiceAuthenticationPolicy; import org.apereo.cas.services.ReturnMappedAttributeReleasePolicy; import org.apereo.cas.util.spring.ApplicationContextProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.ApplicationContext; @ClientAppMapFor(clientAppClass = OIDCRPClientAppTO.class) public class OIDCRPClientAppTOMapper extends AbstractClientAppMapper { private static final Logger LOG = LoggerFactory.getLogger(OIDCRPClientAppTOMapper.class); private static final String CUSTOM_SCOPE = "syncope"; @Override public RegisteredService map( final WAClientApp clientApp, final RegisteredServiceAuthenticationPolicy authenticationPolicy, final RegisteredServiceAccessStrategy accessStrategy, final RegisteredServiceAttributeReleasePolicy attributeReleasePolicy) { OIDCRPClientAppTO rp = OIDCRPClientAppTO.class.cast(clientApp.getClientAppTO()); OidcRegisteredService service = new OidcRegisteredService(); setCommon(service, rp); service.setServiceId(rp.getRedirectUris().stream(). filter(Objects::nonNull). collect(Collectors.joining("|"))); service.setClientId(rp.getClientId()); service.setClientSecret(rp.getClientSecret()); service.setSignIdToken(rp.isSignIdToken()); if (!service.isSignIdToken()) { service.setIdTokenSigningAlg("none"); } service.setJwtAccessToken(rp.isJwtAccessToken()); service.setSupportedGrantTypes(rp.getSupportedGrantTypes().stream(). map(OIDCGrantType::name).collect(Collectors.toCollection(HashSet::new))); service.setSupportedResponseTypes(rp.getSupportedResponseTypes().stream(). map(OIDCResponseType::getExternalForm).collect(Collectors.toCollection(HashSet::new))); if (rp.getSubjectType() != null) { service.setSubjectType(rp.getSubjectType().name()); } service.setLogoutUrl(rp.getLogoutUri()); setPolicies(service, authenticationPolicy, accessStrategy, attributeReleasePolicy); if (attributeReleasePolicy != null) { ChainingAttributeReleasePolicy chain = new ChainingAttributeReleasePolicy(); if (attributeReleasePolicy instanceof ReturnMappedAttributeReleasePolicy) { chain.addPolicy(attributeReleasePolicy); } else { chain.addPolicy(new ReturnMappedAttributeReleasePolicy(clientApp.getReleaseAttrs())); chain.addPolicy(attributeReleasePolicy); } chain.addPolicy(new OidcProfileScopeAttributeReleasePolicy()); chain.addPolicy(new OidcEmailScopeAttributeReleasePolicy()); chain.addPolicy(new OidcAddressScopeAttributeReleasePolicy()); chain.addPolicy(new OidcPhoneScopeAttributeReleasePolicy()); Set<String> customClaims = clientApp.getReleaseAttrs().values().stream(). map(Objects::toString).collect(Collectors.toCollection(HashSet::new)); customClaims.removeAll(OidcProfileScopeAttributeReleasePolicy.ALLOWED_CLAIMS); customClaims.removeAll(OidcEmailScopeAttributeReleasePolicy.ALLOWED_CLAIMS); customClaims.removeAll(OidcAddressScopeAttributeReleasePolicy.ALLOWED_CLAIMS); customClaims.removeAll(OidcPhoneScopeAttributeReleasePolicy.ALLOWED_CLAIMS); if (!customClaims.isEmpty()) { ApplicationContext ctx = ApplicationContextProvider.getApplicationContext(); if (ctx == null) { LOG.warn("Could not locate the application context to add custom claims {}", customClaims); } else { CasConfigurationProperties properties = ctx.getBean(CasConfigurationProperties.class); List<String> supportedClaims = properties.getAuthn().getOidc().getDiscovery().getClaims(); if (!supportedClaims.containsAll(customClaims)) { properties.getAuthn().getOidc().getDiscovery().setClaims( Stream.concat(supportedClaims.stream(), customClaims.stream()). distinct().collect(Collectors.toList())); } chain.addPolicy(new OidcCustomScopeAttributeReleasePolicy( CUSTOM_SCOPE, customClaims.stream().collect(Collectors.toList()))); } } service.setAttributeReleasePolicy(chain); } return service; } }
/* * Copyright (C) 2018 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.ext.ui.spherical; import static com.google.android.exoplayer2.util.GlUtil.checkGlError; import android.opengl.GLES11Ext; import android.opengl.GLES20; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.video.spherical.Projection; import java.nio.FloatBuffer; /** * Utility class to render spherical meshes for video or images. Call {@link #init()} on the GL * thread when ready. */ /* package */ final class ProjectionRenderer { /** * Returns whether {@code projection} is supported. At least it should have left mesh and there * should be only one sub mesh per mesh. */ public static boolean isSupported(Projection projection) { Projection.Mesh leftMesh = projection.leftMesh; Projection.Mesh rightMesh = projection.rightMesh; return leftMesh.getSubMeshCount() == 1 && leftMesh.getSubMesh(0).textureId == Projection.SubMesh.VIDEO_TEXTURE_ID && rightMesh.getSubMeshCount() == 1 && rightMesh.getSubMesh(0).textureId == Projection.SubMesh.VIDEO_TEXTURE_ID; } // Basic vertex & fragment shaders to render a mesh with 3D position & 2D texture data. private static final String[] VERTEX_SHADER_CODE = new String[] { "uniform mat4 uMvpMatrix;", "uniform mat3 uTexMatrix;", "attribute vec4 aPosition;", "attribute vec2 aTexCoords;", "varying vec2 vTexCoords;", // Standard transformation. "void main() {", " gl_Position = uMvpMatrix * aPosition;", " vTexCoords = (uTexMatrix * vec3(aTexCoords, 1)).xy;", "}" }; private static final String[] FRAGMENT_SHADER_CODE = new String[] { // This is required since the texture data is GL_TEXTURE_EXTERNAL_OES. "#extension GL_OES_EGL_image_external : require", "precision mediump float;", // Standard texture rendering shader. "uniform samplerExternalOES uTexture;", "varying vec2 vTexCoords;", "void main() {", " gl_FragColor = texture2D(uTexture, vTexCoords);", "}" }; // Texture transform matrices. private static final float[] TEX_MATRIX_WHOLE = { 1.0f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, 1.0f, 1.0f }; private static final float[] TEX_MATRIX_TOP = { 1.0f, 0.0f, 0.0f, 0.0f, -0.5f, 0.0f, 0.0f, 0.5f, 1.0f }; private static final float[] TEX_MATRIX_BOTTOM = { 1.0f, 0.0f, 0.0f, 0.0f, -0.5f, 0.0f, 0.0f, 1.0f, 1.0f }; private static final float[] TEX_MATRIX_LEFT = { 0.5f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, 1.0f, 1.0f }; private static final float[] TEX_MATRIX_RIGHT = { 0.5f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.5f, 1.0f, 1.0f }; private int stereoMode; @Nullable private MeshData leftMeshData; @Nullable private MeshData rightMeshData; // Program related GL items. These are only valid if program != 0. private int program; private int mvpMatrixHandle; private int uTexMatrixHandle; private int positionHandle; private int texCoordsHandle; private int textureHandle; /** * Sets a {@link Projection} to be used. * * @param projection Contains the projection data to be rendered. * @see #isSupported(Projection) */ public void setProjection(Projection projection) { if (!isSupported(projection)) { return; } stereoMode = projection.stereoMode; leftMeshData = new MeshData(projection.leftMesh.getSubMesh(0)); rightMeshData = projection.singleMesh ? leftMeshData : new MeshData(projection.rightMesh.getSubMesh(0)); } /** Initializes of the GL components. */ /* package */ void init() { program = GlUtil.compileProgram(VERTEX_SHADER_CODE, FRAGMENT_SHADER_CODE); mvpMatrixHandle = GLES20.glGetUniformLocation(program, "uMvpMatrix"); uTexMatrixHandle = GLES20.glGetUniformLocation(program, "uTexMatrix"); positionHandle = GLES20.glGetAttribLocation(program, "aPosition"); texCoordsHandle = GLES20.glGetAttribLocation(program, "aTexCoords"); textureHandle = GLES20.glGetUniformLocation(program, "uTexture"); } /** * Renders the mesh. If the projection hasn't been set, does nothing. This must be called on the * GL thread. * * @param textureId GL_TEXTURE_EXTERNAL_OES used for this mesh. * @param mvpMatrix The Model View Projection matrix. * @param rightEye Whether the right eye view should be drawn. If {@code false}, the left eye view * is drawn. */ /* package */ void draw(int textureId, float[] mvpMatrix, boolean rightEye) { MeshData meshData = rightEye ? rightMeshData : leftMeshData; if (meshData == null) { return; } // Configure shader. GLES20.glUseProgram(program); checkGlError(); GLES20.glEnableVertexAttribArray(positionHandle); GLES20.glEnableVertexAttribArray(texCoordsHandle); checkGlError(); float[] texMatrix; if (stereoMode == C.STEREO_MODE_TOP_BOTTOM) { texMatrix = rightEye ? TEX_MATRIX_BOTTOM : TEX_MATRIX_TOP; } else if (stereoMode == C.STEREO_MODE_LEFT_RIGHT) { texMatrix = rightEye ? TEX_MATRIX_RIGHT : TEX_MATRIX_LEFT; } else { texMatrix = TEX_MATRIX_WHOLE; } GLES20.glUniformMatrix3fv(uTexMatrixHandle, 1, false, texMatrix, 0); GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false, mvpMatrix, 0); GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId); GLES20.glUniform1i(textureHandle, 0); checkGlError(); // Load position data. GLES20.glVertexAttribPointer( positionHandle, Projection.POSITION_COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, Projection.POSITION_COORDS_PER_VERTEX * C.BYTES_PER_FLOAT, meshData.vertexBuffer); checkGlError(); // Load texture data. GLES20.glVertexAttribPointer( texCoordsHandle, Projection.TEXTURE_COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, Projection.TEXTURE_COORDS_PER_VERTEX * C.BYTES_PER_FLOAT, meshData.textureBuffer); checkGlError(); // Render. GLES20.glDrawArrays(meshData.drawMode, 0, meshData.vertexCount); checkGlError(); GLES20.glDisableVertexAttribArray(positionHandle); GLES20.glDisableVertexAttribArray(texCoordsHandle); } /** Cleans up the GL resources. */ /* package */ void shutdown() { if (program != 0) { GLES20.glDeleteProgram(program); } } private static class MeshData { private final int vertexCount; private final FloatBuffer vertexBuffer; private final FloatBuffer textureBuffer; @Projection.DrawMode private final int drawMode; public MeshData(Projection.SubMesh subMesh) { vertexCount = subMesh.getVertexCount(); vertexBuffer = GlUtil.createBuffer(subMesh.vertices); textureBuffer = GlUtil.createBuffer(subMesh.textureCoords); switch (subMesh.mode) { case Projection.DRAW_MODE_TRIANGLES_STRIP: drawMode = GLES20.GL_TRIANGLE_STRIP; break; case Projection.DRAW_MODE_TRIANGLES_FAN: drawMode = GLES20.GL_TRIANGLE_FAN; break; case Projection.DRAW_MODE_TRIANGLES: default: drawMode = GLES20.GL_TRIANGLES; break; } } } }
package com.vik.covid19vik; import com.google.gson.Gson; import java.util.LinkedList; import java.util.Queue; class CountriesGlobalDataParse { // use time series pull methods to get data (in controller) static String parseDataToJSON(String status, String data) { CountriesGlobal countryWithDates = new CountriesGlobal(); LinkedList<CountriesGlobal.Country> countries = new LinkedList<>(); // set status countryWithDates.setStatus(status); // string of data points delineated by commas; first row are labels, second row and onward are data points // System.out.println(data); // loop over data until a given string equals "Long" Queue<Character> labelMaker = new LinkedList<>(); int cursor = 0; // labels delineated by a comma // when string equals "Long", skip that string and begin storing dates as strings int lengthOfCSV = data.length(); System.out.println("length of CSV = " + lengthOfCSV); while (true) { while (data.charAt(cursor) != ',') { labelMaker.add(data.charAt(cursor)); cursor++; } StringBuilder label = new StringBuilder(); while (labelMaker.peek() != null) { label.append(labelMaker.poll()); } cursor++; if (label.toString().equals("Long")) { break; } } // store dates as array of strings and set dates LinkedList<String> dates = new LinkedList<>(); while (true) { while (data.charAt(cursor) != ',') { labelMaker.add(data.charAt(cursor)); cursor++; } StringBuilder date = new StringBuilder(); while (labelMaker.peek() != null) { date.append(labelMaker.poll()); } String newDate = date.toString(); if (newDate.contains("\n")) { newDate = newDate.replace("\n", ""); dates.add(newDate); // System.out.println(dates); break; } dates.add(newDate); cursor++; } // set dates countryWithDates.setDates(dates); // instantiate data of each country and store in array do { CountriesGlobal.Country countryInfo = new CountriesGlobal.Country(); // adjust counter for new country, or if end of file break if (data.charAt(cursor) == '\n') { cursor++; } if (cursor == lengthOfCSV) { break; } // set province/state if (data.charAt(cursor) == ',') { countryInfo.setProvinceOrState(""); // System.out.println("province/state = empty"); } else { StringBuilder provinceOrState = new StringBuilder(); if (data.charAt(cursor) == '\"') { cursor++; while (data.charAt(cursor) != '\"') { provinceOrState.append(data.charAt(cursor)); cursor++; } cursor++; } else { while (data.charAt(cursor) != ',') { provinceOrState.append(data.charAt(cursor)); cursor++; } } countryInfo.setProvinceOrState(provinceOrState.toString()); // System.out.println("province/state = " + provinceOrState.toString()); } cursor++; // set country/region if (data.charAt(cursor) == ',') { countryInfo.setCountryOrRegion(""); // System.out.println("country/region = empty"); } else { StringBuilder countryRegion = new StringBuilder(); if (data.charAt(cursor) == '\"') { cursor++; while (data.charAt(cursor) != '\"') { countryRegion.append(data.charAt(cursor)); cursor++; } cursor++; } else { while (data.charAt(cursor) != ',') { countryRegion.append(data.charAt(cursor)); cursor++; } } countryInfo.setCountryOrRegion(countryRegion.toString()); // System.out.println("country/region = " + countryRegion.toString()); } cursor++; // set lat/long StringBuilder lat = new StringBuilder(); while (data.charAt(cursor) != ',') { lat.append(data.charAt(cursor)); cursor++; } float l = Float.parseFloat(lat.toString()); countryInfo.setLat(l); // System.out.println(l); cursor++; StringBuilder lon = new StringBuilder(); while (data.charAt(cursor) != ',') { lon.append(data.charAt(cursor)); cursor++; } l = Float.parseFloat(lon.toString()); // System.out.println(l); countryInfo.setLon(l); cursor++; // set case data LinkedList<Integer> timeSeriesCases = new LinkedList<>(); // loop until all data points added while (true) { StringBuilder noOfCasesB = new StringBuilder(); while (data.charAt(cursor) != ',') { noOfCasesB.append(data.charAt(cursor)); cursor++; if (cursor == lengthOfCSV || data.charAt(cursor) == '\n') { break; } } String noOfCasesS = noOfCasesB.toString(); int noOfCases = Integer.parseInt(noOfCasesS); timeSeriesCases.add(noOfCases); if (cursor == lengthOfCSV || data.charAt(cursor) == '\n') { break; } cursor++; } // System.out.println(timeSeriesCases); countryInfo.setTotalCases(timeSeriesCases); // set empty list for new cases; being added later LinkedList<Integer> timeSeriesNewCases = new LinkedList<>(); countryInfo.setNewCases(timeSeriesNewCases); // add to list of countries countries.add(countryInfo); countryWithDates.setCountries(countries); } while (cursor < lengthOfCSV); // add in new case data and parse to json CountriesGlobal countryWithDatesNewCases = addNewCaseList(countryWithDates); Gson gson = new Gson(); return gson.toJson(countryWithDatesNewCases); } static CountriesGlobal fromJSON(String status, String data) { String json = parseDataToJSON(status, data); Gson gson = new Gson(); return gson.fromJson(json, CountriesGlobal.class); } private static CountriesGlobal addNewCaseList(CountriesGlobal withoutNewCases) { for (CountriesGlobal.Country country : withoutNewCases.getCountries()) { // for every country, get total case data, iterate through each, and subtract the difference between the current day's cases and cases from previous day // to get new cases for current day // add this difference as a new item in the new cases list LinkedList<Integer> totalCases = country.getTotalCases(); LinkedList<Integer> newCases = country.getNewCases(); for (int i = 0; i < totalCases.size(); i++) { if (i == 0) { newCases.add(0); } else { Integer newCase = totalCases.get(i) - totalCases.get(i - 1); newCases.add(newCase); } } country.setNewCases(newCases); } return withoutNewCases; } }
package br.com.webApp.servletpt1.controler; import java.util.Date; import java.util.Random; public class Nome { private String nome; private int id; Date data; public Nome (String nome, Date data){ this.nome = nome; this.data = data; Random random =new Random(); id = random.nextInt(1000); } public String getNome() { return nome; } public Date getData(){ return this.data; } public int getId() { return id; } }
package uk.sparkydiscordbot.api.event.server.member; import net.dv8tion.jda.api.JDA; import net.dv8tion.jda.api.entities.Guild; import net.dv8tion.jda.api.entities.User; import org.jetbrains.annotations.NotNull; public class MemberLeaveEvent extends MemberEvent { public MemberLeaveEvent(@NotNull(value = "guild cannot be null") final Guild server, @NotNull(value = "member cannot be null") final User user, @NotNull(value = "member cannot be null") final JDA jda) { super(null, user, server, jda); } }
/* * This file to You under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.org.ukfederation.uaattribute.resolver; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.opensaml.xml.util.Pair; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import uk.org.ukfederation.uaattribute.authn.UserAgentPrincipal; import edu.internet2.middleware.shibboleth.common.attribute.BaseAttribute; import edu.internet2.middleware.shibboleth.common.attribute.provider.BasicAttribute; import edu.internet2.middleware.shibboleth.common.attribute.resolver.AttributeResolutionException; import edu.internet2.middleware.shibboleth.common.attribute.resolver.provider.ShibbolethResolutionContext; import edu.internet2.middleware.shibboleth.common.attribute.resolver.provider.dataConnector.BaseDataConnector; import edu.internet2.middleware.shibboleth.common.session.Session; import edu.internet2.middleware.shibboleth.idp.util.IPRange; /** * A data connector that generates certain attributes/values based on the IP address of the user's user agent at the * time of authentication. */ public class UserAgentAttributeMapDataConnector extends BaseDataConnector { /** Class logger. */ private final Logger log = LoggerFactory.getLogger(UserAgentAttributeMapDataConnector.class); /** Map from IP ranges to the attribute name/value pairs that they trigger. */ private List<Pair<IPRange, Pair<String, String>>> attributeMappings; /** * Sets the mappings from IP ranges to attributes/values. * * @param mappings mappings from IP ranges to attributes/values */ public void setAttributeMappings(List<Pair<IPRange, Pair<String, String>>> mappings) { attributeMappings = mappings; } /** {@inheritDoc} */ public Map<String, BaseAttribute> resolve(ShibbolethResolutionContext resolutionContext) throws AttributeResolutionException { final UserAgentPrincipal uaPrincpal = getUserAgentPrincipal(resolutionContext); if (uaPrincpal == null) { return Collections.emptyMap(); } byte[] uaAddress = uaPrincpal.getUserAgentAddress(); HashMap<String, BaseAttribute> mappedAttributes = new HashMap<String, BaseAttribute>(); for (Pair<IPRange, Pair<String, String>> mapping : attributeMappings) { if (mapping.getFirst().contains(uaAddress)) { addAttributeValue(mapping.getSecond(), mappedAttributes); } } return mappedAttributes; } /** * Extracts the {@link UserAgentPrincipal} from the given resolution context. * * @param resolutionContext current resolution context * * @return the extract principal or null if no such principal is associated with the current user */ private UserAgentPrincipal getUserAgentPrincipal(ShibbolethResolutionContext resolutionContext) { final Session userSession = resolutionContext.getAttributeRequestContext().getUserSession(); if (userSession == null) { log.debug("No user session available, unable to extract user agent information"); return null; } final Set<UserAgentPrincipal> userAgentPrincipals = userSession.getSubject().getPrincipals(UserAgentPrincipal.class); if (userAgentPrincipals == null || userAgentPrincipals.isEmpty()) { log.debug("No user agent information information associated with user session"); return null; } if (userAgentPrincipals.size() > 1) { log.debug("Multiple user agent principals found, onl the first will be used."); } return userAgentPrincipals.iterator().next(); } /** * Adds the specified value for the specified attribute to the {@link BasicAttribute} found in the already mapped * attributes. If no {@link BasicAttribute} with the given ID exists, it is created and added to the mapped * attributes. * * @param attributeDescriptor name/value pair of the attribute * @param mappedAttributes currently mapped attributes */ private void addAttributeValue(Pair<String, String> attributeDescriptor, HashMap<String, BaseAttribute> mappedAttributes) { BaseAttribute attribute = mappedAttributes.get(attributeDescriptor.getFirst()); if (attribute == null) { attribute = new BasicAttribute<String>(attributeDescriptor.getFirst()); mappedAttributes.put(attributeDescriptor.getFirst(), attribute); } if (!attribute.getValues().contains(attributeDescriptor.getSecond())) { attribute.getValues().add(attributeDescriptor.getSecond()); } } /** {@inheritDoc} */ public void validate() throws AttributeResolutionException { } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.servicecomb.saga.alpha.server; import static org.apache.servicecomb.saga.alpha.core.TaskStatus.DONE; import static org.apache.servicecomb.saga.alpha.core.TaskStatus.NEW; import static org.apache.servicecomb.saga.alpha.core.TaskStatus.PENDING; import java.lang.invoke.MethodHandles; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import javax.transaction.Transactional; import org.apache.servicecomb.saga.alpha.core.Command; import org.apache.servicecomb.saga.alpha.core.CommandRepository; import org.apache.servicecomb.saga.alpha.core.TxEvent; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import kamon.annotation.EnableKamon; import kamon.annotation.Segment; @EnableKamon public class SpringCommandRepository implements CommandRepository { private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private final TxEventEnvelopeRepository eventRepository; private final CommandEntityRepository commandRepository; SpringCommandRepository(TxEventEnvelopeRepository eventRepository, CommandEntityRepository commandRepository) { this.eventRepository = eventRepository; this.commandRepository = commandRepository; } @Override @Segment(name = "saveCompensationCommands", category = "application", library = "kamon") public void saveCompensationCommands(String globalTxId) { List<TxEvent> events = eventRepository .findStartedEventsWithMatchingEndedButNotCompensatedEvents(globalTxId); Map<String, Command> commands = new LinkedHashMap<>(); for (TxEvent event : events) { commands.computeIfAbsent(event.localTxId(), k -> new Command(event)); } for (Command command : commands.values()) { LOG.info("Saving compensation command {}", command); try { commandRepository.save(command); } catch (Exception e) { LOG.warn("Failed to save some command {}", command); } LOG.info("Saved compensation command {}", command); } } @Override @Segment(name = "markCommandAsDone", category = "application", library = "kamon") public void markCommandAsDone(String globalTxId, String localTxId) { commandRepository.updateStatusByGlobalTxIdAndLocalTxId(DONE.name(), globalTxId, localTxId); } @Override @Segment(name = "findUncompletedCommands", category = "application", library = "kamon") public List<Command> findUncompletedCommands(String globalTxId) { return commandRepository.findByGlobalTxIdAndStatus(globalTxId, NEW.name()); } @Transactional @Override @Segment(name = "findFirstCommandToCompensate", category = "application", library = "kamon") public List<Command> findFirstCommandToCompensate() { List<Command> commands = commandRepository .findFirstGroupByGlobalTxIdWithoutPendingOrderByIdDesc(); commands.forEach(command -> commandRepository.updateStatusByGlobalTxIdAndLocalTxId( NEW.name(), PENDING.name(), command.globalTxId(), command.localTxId())); return commands; } }
package net.viap.viapcraft.mixin; import net.viap.viapcraft.Viapcraft; import net.minecraft.client.gui.screen.TitleScreen; import net.viap.viapcraft.Viapcraft; import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.injection.At; import org.spongepowered.asm.mixin.injection.Inject; import org.spongepowered.asm.mixin.injection.callback.CallbackInfo; @Mixin(TitleScreen.class) public class ViapcraftMixin { @Inject(at = @At("HEAD"), method = "init()V") private void init(CallbackInfo info) { Viapcraft.LOGGER.info("This line is printed by an example mod mixin!"); } }
package com.purplesky.coldweather.gson; import com.google.gson.annotations.SerializedName; public class Basic { public class Update{ @SerializedName("loc") public String updateName; } @SerializedName("city") public String cityName; @SerializedName("id") public String weatherId; public Update update; }
/* Copyright 2015 Akexorcist Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.akexorcist.googledirection; import com.akexorcist.googledirection.request.GeocodeLatLngRequest; /** * Created by Akexorcist on 11/29/15 AD. */ public class GoogleGeocode { public static GeocodeLatLngRequest withServerKey(String apiKey) { return new GeocodeLatLngRequest(apiKey); } }
/* * Copyright 2018-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.data.gemfire.config.support; import java.util.Optional; import java.util.function.Predicate; import org.apache.geode.cache.Region; import org.apache.geode.cache.lucene.LuceneIndex; import org.apache.geode.cache.query.Index; import org.springframework.beans.factory.BeanFactory; import org.springframework.beans.factory.annotation.AnnotatedBeanDefinition; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.BeanFactoryPostProcessor; import org.springframework.core.type.MethodMetadata; import org.springframework.data.gemfire.GenericRegionFactoryBean; import org.springframework.data.gemfire.LocalRegionFactoryBean; import org.springframework.data.gemfire.PartitionedRegionFactoryBean; import org.springframework.data.gemfire.ReplicatedRegionFactoryBean; import org.springframework.data.gemfire.client.ClientCacheFactoryBean; import org.springframework.data.gemfire.client.ClientRegionFactoryBean; import org.springframework.data.gemfire.client.PoolFactoryBean; import org.springframework.data.gemfire.util.SpringUtils; import org.springframework.util.Assert; import org.springframework.util.StringUtils; /** * The {@link AbstractDependencyStructuringBeanFactoryPostProcessor} class is a Spring {@link BeanFactoryPostProcessor} * post processing the Spring {@link BeanFactory} to help ensure that the dependencies between different Apache Geode * or Pivotal GemFire objects (e.g. {@link Region} and a {@link LuceneIndex} or an OQL {@link Index}) have been * properly declared in order to the lifecycle of those components are upheld according to Apache Geode * or Pivotal GemFire requirements/rules. * * @author John Blum * @see org.springframework.beans.factory.BeanFactory * @see org.springframework.beans.factory.config.BeanDefinition * @see org.springframework.beans.factory.config.BeanFactoryPostProcessor * @since 2.1.0 */ @SuppressWarnings("unused") public abstract class AbstractDependencyStructuringBeanFactoryPostProcessor implements BeanFactoryPostProcessor { protected BeanDefinition addDependsOn(BeanDefinition beanDefinition, String... beanNames) { return SpringUtils.addDependsOn(beanDefinition, beanNames); } protected Optional<Object> getPropertyValue(BeanDefinition beanDefinition, String propertyName) { return SpringUtils.getPropertyValue(beanDefinition, propertyName); } protected boolean isBeanDefinitionOfType(BeanDefinition beanDefinition, Class<?> type) { Assert.notNull(type, "Class type must not be null"); return isBeanDefinitionOfType(beanDefinition, typeName -> type.getName().equals(typeName)); } protected boolean isBeanDefinitionOfType(BeanDefinition beanDefinition, String typeName) { return isBeanDefinitionOfType(beanDefinition, typeNameArgument -> String.valueOf(typeName).equals(typeNameArgument)); } protected boolean isBeanDefinitionOfType(BeanDefinition beanDefinition, Predicate<String> typeFilter) { return Optional.of(beanDefinition) .map(it -> beanDefinition.getBeanClassName()) .filter(StringUtils::hasText) .map(typeFilter::test) .orElseGet(() -> Optional.ofNullable(beanDefinition.getFactoryMethodName()) .filter(StringUtils::hasText) .filter(it -> beanDefinition instanceof AnnotatedBeanDefinition) .map(it -> ((AnnotatedBeanDefinition) beanDefinition).getFactoryMethodMetadata()) .map(MethodMetadata::getReturnTypeName) .map(typeFilter::test) .orElse(false) ); } protected boolean isClientCacheBean(BeanDefinition beanDefinition) { return isBeanDefinitionOfType(beanDefinition, ClientCacheFactoryBean.class); } protected boolean isClientRegionBean(BeanDefinition beanDefinition) { return isBeanDefinitionOfType(beanDefinition, ClientRegionFactoryBean.class); } protected boolean isPoolBean(BeanDefinition beanDefinition) { return isBeanDefinitionOfType(beanDefinition, PoolFactoryBean.class); } protected Predicate<String> isRegionBeanType() { Predicate<String> genericRegionBeanType = typeName -> GenericRegionFactoryBean.class.getName().equals(typeName); return genericRegionBeanType.or(typeName -> ClientRegionFactoryBean.class.getName().equals(typeName)) .or(typeName -> LocalRegionFactoryBean.class.getName().equals(typeName)) .or(typeName -> PartitionedRegionFactoryBean.class.getName().equals(typeName)) .or(typeName -> ReplicatedRegionFactoryBean.class.getName().equals(typeName)); } }
package au.edu.aaf.shibext.config; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; import org.springframework.mock.jndi.SimpleNamingContextBuilder; import javax.naming.NamingException; /** * Configuration for tests requiring an in-memory database. */ @Configuration public class EmbeddedDataSourceConfig { /** * Used for lookups in JNDI context. */ public static final String DATASOURCE_ID = "jdbc/DS_idp_admin"; /** * Creates an in-memory database and registers the datasource in JNDI. * * @return EmbeddedDatabase instance * @throws NamingException */ @Bean(destroyMethod = "shutdown") public EmbeddedDatabase dataSource() throws NamingException { EmbeddedDatabase dataSource = new EmbeddedDatabaseBuilder() .setType(EmbeddedDatabaseType.H2) .addScript("file:db/schema.sql") .addScript("seed.sql") .build(); SimpleNamingContextBuilder builder = new SimpleNamingContextBuilder(); builder.bind(DATASOURCE_ID, dataSource); builder.activate(); return dataSource; } }
package cd4017be.rs_ctr2.part; import static cd4017be.lib.network.Sync.ALL; import static cd4017be.lib.network.Sync.Type.Enum; import static cd4017be.rs_ctr2.Content.led; import static cd4017be.rs_ctr2.util.Utils.heldColor; import static cd4017be.rs_ctr2.util.Utils.serverAction; import com.mojang.blaze3d.matrix.MatrixStack; import cd4017be.api.grid.IDynamicPart; import cd4017be.api.grid.port.ISignalReceiver; import cd4017be.lib.network.Sync; import cd4017be.lib.part.OrientedPart; import cd4017be.lib.render.GridModels; import cd4017be.lib.util.Orientation; import cd4017be.rs_ctr2.Main; import net.minecraft.client.renderer.IRenderTypeBuffer; import net.minecraft.client.renderer.RenderType; import net.minecraft.entity.player.PlayerEntity; import net.minecraft.item.DyeColor; import net.minecraft.item.Item; import net.minecraft.network.PacketBuffer; import net.minecraft.util.*; import net.minecraft.util.math.BlockRayTraceResult; import net.minecraftforge.api.distmarker.Dist; import net.minecraftforge.api.distmarker.OnlyIn; /** * @author CD4017BE */ public class LED extends OrientedPart implements IDynamicPart, ISignalReceiver { @Sync(to = ALL) public boolean state; @Sync(to = ALL, type = Enum) public DyeColor color = DyeColor.RED; public LED() { super(1); } @Override public void set(int pos, Orientation orient) { super.set(pos, orient); setBounds(pos, pos); setPort(0, pos, Direction.NORTH, ISignalReceiver.TYPE_ID); } @Override public Item item() { return led; } @Override public byte getLayer() { return L_INNER; } @Override public ActionResultType onInteract(PlayerEntity player, Hand hand, BlockRayTraceResult hit, int pos) { if (hand == null) return super.onInteract(player, hand, hit, pos); DyeColor color = heldColor(player, hand); return color != null ? serverAction(player, ()-> { this.color = color; host.onPartChange(); }) : ActionResultType.PASS; } @Override public Object getHandler(int port) { return this; } @Override public void setHandler(int port, Object handler) {} @Override public boolean isMaster(int port) { return false; } @Override public void updateInput(int value, int rec) { if (state ^ (state = value != 0) && host != null) host.updateDisplay(); } public static final ResourceLocation LED = Main.rl("part/led"); @Override @OnlyIn(Dist.CLIENT) protected ResourceLocation model() { return Switch.BASE; } @Override @OnlyIn(Dist.CLIENT) public void render( MatrixStack ms, IRenderTypeBuffer rtb, int light, int overlay, float t, long opaque ) { if ((bounds & ~opaque) == 0) return; ms.pushPose(); transform(ms); int color = this.color.getTextColor() | 0xff000000; GridModels.draw( LED, ms.last(), rtb.getBuffer(RenderType.solid()), state ? color : color >> 2 & 0xff3f3f3f, state ? 0xf0 : light, overlay ); ms.popPose(); } @Override public void readSync(PacketBuffer pkt) { state = pkt.readBoolean(); } @Override public void writeSync(PacketBuffer pkt, boolean init) { pkt.writeBoolean(state); } }
package com.kotakotik.createautomated.compat.kubejs; import com.jozufozu.flywheel.core.PartialModel; import com.kotakotik.createautomated.CreateAutomated; import com.kotakotik.createautomated.compat.kubejs.item.drillHead.item.DrillHeadBuilderJS; import com.kotakotik.createautomated.compat.kubejs.item.drillHead.item.DrillHeadItemJS; import com.kotakotik.createautomated.compat.kubejs.item.drillHead.item.DrillHeadRegistryEventJS; import com.kotakotik.createautomated.compat.kubejs.item.drillHead.partial.DrillPartialRegistryEventJS; import com.kotakotik.createautomated.compat.kubejs.recipe.ExtractingJS; import com.kotakotik.createautomated.compat.kubejs.recipe.PickingJS; import dev.latvian.kubejs.script.ScriptType; import net.minecraft.item.Item; import net.minecraft.util.ResourceLocation; import net.minecraftforge.event.RegistryEvent; import net.minecraftforge.fml.DeferredWorkQueue; import net.minecraftforge.fml.javafmlmod.FMLJavaModLoadingContext; import java.util.LinkedHashMap; import java.util.Map; import static dev.latvian.kubejs.recipe.RegisterRecipeHandlersEvent.EVENT; public class CAKubeJS { public static final Map<ResourceLocation, DrillHeadBuilderJS> DRILL_HEADS = new LinkedHashMap<>(); public static final Map<ResourceLocation, ResourceLocation> DRILL_PARTIALS = new LinkedHashMap<>(); public CAKubeJS() { EVENT.register(event -> { event.register(CreateAutomated.asResource("extracting"), ExtractingJS::new); event.register(CreateAutomated.asResource("picking"), PickingJS::new); }); FMLJavaModLoadingContext.get().getModEventBus().addGenericListener(Item.class, this::registerDrillHeads); DeferredWorkQueue.runLater(() -> { new DrillHeadRegistryEventJS().post(ScriptType.STARTUP, "item.registry.drillhead"); new DrillPartialRegistryEventJS().post(ScriptType.CLIENT, "partial.registry.drillhead"); }); } public void registerDrillHeads(final RegistryEvent.Register<Item> event) { DRILL_HEADS.forEach((id, builder) -> { DrillHeadItemJS item = new DrillHeadItemJS(builder); item.setRegistryName(id); event.getRegistry().register(item); }); } public static final Map<ResourceLocation, PartialModel> BUILD_DRILL_PARTIALS = new LinkedHashMap<>(); }
package us.ihmc.footstepPlanning.testTools; import static org.junit.Assert.assertTrue; import javax.vecmath.Point2d; import javax.vecmath.Point3d; import javax.vecmath.Vector3d; import us.ihmc.footstepPlanning.AnytimeFootstepPlanner; import us.ihmc.footstepPlanning.FootstepPlan; import us.ihmc.footstepPlanning.FootstepPlanner; import us.ihmc.footstepPlanning.FootstepPlannerGoal; import us.ihmc.footstepPlanning.FootstepPlannerGoalType; import us.ihmc.footstepPlanning.FootstepPlanningResult; import us.ihmc.footstepPlanning.SimpleFootstep; import us.ihmc.graphicsDescription.Graphics3DObject; import us.ihmc.graphicsDescription.appearance.AppearanceDefinition; import us.ihmc.graphicsDescription.appearance.YoAppearance; import us.ihmc.graphicsDescription.yoGraphics.YoGraphicPolygon; import us.ihmc.graphicsDescription.yoGraphics.YoGraphicPosition; import us.ihmc.graphicsDescription.yoGraphics.YoGraphicVector; import us.ihmc.graphicsDescription.yoGraphics.YoGraphicsListRegistry; import us.ihmc.robotics.dataStructures.registry.YoVariableRegistry; import us.ihmc.robotics.geometry.ConvexPolygon2d; import us.ihmc.robotics.geometry.ConvexPolygonTools; import us.ihmc.robotics.geometry.FramePose; import us.ihmc.robotics.geometry.FrameVector; import us.ihmc.robotics.geometry.PlanarRegionsList; import us.ihmc.robotics.geometry.RigidBodyTransform; import us.ihmc.robotics.math.frames.YoFrameConvexPolygon2d; import us.ihmc.robotics.math.frames.YoFramePoint; import us.ihmc.robotics.math.frames.YoFramePose; import us.ihmc.robotics.math.frames.YoFrameVector; import us.ihmc.robotics.referenceFrames.PoseReferenceFrame; import us.ihmc.robotics.referenceFrames.ReferenceFrame; import us.ihmc.robotics.robotSide.RobotSide; import us.ihmc.robotics.robotSide.SideDependentList; import us.ihmc.robotics.time.ExecutionTimer; import us.ihmc.simulationconstructionset.Robot; import us.ihmc.simulationconstructionset.SimulationConstructionSet; import us.ihmc.tools.io.printing.PrintTools; import us.ihmc.tools.thread.ThreadTools; public class PlanningTestTools { private static final ReferenceFrame worldFrame = ReferenceFrame.getWorldFrame(); private static final AppearanceDefinition[] appearances = { YoAppearance.White(), YoAppearance.Grey(), YoAppearance.DarkGray() }; public static ConvexPolygon2d createDefaultFootPolygon() { double footLength = 0.2; double footWidth = 0.1; ConvexPolygon2d footPolygon = new ConvexPolygon2d(); footPolygon.addVertex(footLength / 2.0, footWidth / 2.0); footPolygon.addVertex(footLength / 2.0, -footWidth / 2.0); footPolygon.addVertex(-footLength / 2.0, footWidth / 2.0); footPolygon.addVertex(-footLength / 2.0, -footWidth / 2.0); footPolygon.update(); return footPolygon; } public static SideDependentList<ConvexPolygon2d> createDefaultFootPolygons() { SideDependentList<ConvexPolygon2d> footPolygons = new SideDependentList<>(); for (RobotSide side : RobotSide.values) footPolygons.put(side, PlanningTestTools.createDefaultFootPolygon()); return footPolygons; } public static void visualizeAndSleep(PlanarRegionsList planarRegionsList, FootstepPlan footseps, FramePose goalPose) { visualizeAndSleep(planarRegionsList, footseps, goalPose, null, null); } public static void visualizeAndSleep(PlanarRegionsList planarRegionsList, FootstepPlan footseps) { visualizeAndSleep(planarRegionsList, footseps, null, null, null); } public static void visualizeAndSleep(PlanarRegionsList planarRegionsList, FootstepPlan footseps, FramePose goalPose, YoVariableRegistry registry, YoGraphicsListRegistry graphicsListRegistry) { SimulationConstructionSet scs = new SimulationConstructionSet(new Robot("Dummy")); if (registry != null) scs.addYoVariableRegistry(registry); if (graphicsListRegistry != null) scs.addYoGraphicsListRegistry(graphicsListRegistry, true); Graphics3DObject graphics3DObject = new Graphics3DObject(); graphics3DObject.addCoordinateSystem(0.3); if (planarRegionsList != null) graphics3DObject.addPlanarRegionsList(planarRegionsList, appearances); scs.addStaticLinkGraphics(graphics3DObject); YoVariableRegistry vizRegistry = new YoVariableRegistry("FootstepPlanningResult"); YoGraphicsListRegistry vizGraphicsListRegistry = new YoGraphicsListRegistry(); if (goalPose != null) addGoalViz(goalPose, vizRegistry, vizGraphicsListRegistry); if (footseps != null) { YoFrameConvexPolygon2d yoDefaultFootPolygon = new YoFrameConvexPolygon2d("DefaultFootPolygon", worldFrame, 4, vizRegistry); yoDefaultFootPolygon.setConvexPolygon2d(createDefaultFootPolygon()); int numberOfSteps = footseps.getNumberOfSteps(); for (int i = 0; i < numberOfSteps; i++) { SimpleFootstep footstep = footseps.getFootstep(i); FramePose footstepPose = new FramePose(); footstep.getSoleFramePose(footstepPose); AppearanceDefinition appearance = footstep.getRobotSide() == RobotSide.RIGHT ? YoAppearance.Green() : YoAppearance.Red(); YoFramePose yoFootstepPose = new YoFramePose("footPose" + i, worldFrame, vizRegistry); yoFootstepPose.set(footstepPose); if (!footstep.hasFoothold()) { YoGraphicPolygon footstepViz = new YoGraphicPolygon("footstep" + i, yoDefaultFootPolygon, yoFootstepPose, 1.0, appearance); vizGraphicsListRegistry.registerYoGraphic("viz", footstepViz); } else { YoGraphicPolygon fullFootstepViz = new YoGraphicPolygon("fullFootstep" + i, yoDefaultFootPolygon, yoFootstepPose, 1.0, YoAppearance.Glass(0.7)); vizGraphicsListRegistry.registerYoGraphic("viz", fullFootstepViz); ConvexPolygon2d foothold = new ConvexPolygon2d(); footstep.getFoothold(foothold); ConvexPolygonTools.limitVerticesConservative(foothold, 4); YoFrameConvexPolygon2d yoFoothold = new YoFrameConvexPolygon2d("Foothold" + i, worldFrame, 4, vizRegistry); yoFoothold.setConvexPolygon2d(foothold); YoGraphicPolygon footstepViz = new YoGraphicPolygon("footstep" + i, yoFoothold, yoFootstepPose, 1.0, appearance); vizGraphicsListRegistry.registerYoGraphic("viz", footstepViz); } } } scs.addYoVariableRegistry(vizRegistry); scs.addYoGraphicsListRegistry(vizGraphicsListRegistry, true); scs.startOnAThread(); ThreadTools.sleepForever(); } public static void addGoalViz(FramePose goalPose, YoVariableRegistry registry, YoGraphicsListRegistry graphicsListRegistry) { YoFramePoint yoGoal = new YoFramePoint("GoalPosition", worldFrame, registry); yoGoal.set(goalPose.getFramePointCopy()); graphicsListRegistry.registerYoGraphic("viz", new YoGraphicPosition("GoalViz", yoGoal, 0.05, YoAppearance.White())); PoseReferenceFrame goalFrame = new PoseReferenceFrame("GoalFrame", goalPose); FrameVector goalOrientation = new FrameVector(goalFrame, 0.5, 0.0, 0.0); goalOrientation.changeFrame(worldFrame); YoFrameVector yoGoalOrientation = new YoFrameVector("GoalVector", worldFrame, registry); yoGoalOrientation.set(goalOrientation); graphicsListRegistry.registerYoGraphic("vizOrientation", new YoGraphicVector("GoalOrientationViz", yoGoal, yoGoalOrientation, 1.0, YoAppearance.White())); } public static FootstepPlan runPlanner(FootstepPlanner planner, FramePose initialStanceFootPose, RobotSide initialStanceSide, FramePose goalPose, PlanarRegionsList planarRegionsList) { return runPlanner(planner, initialStanceFootPose, initialStanceSide, goalPose, planarRegionsList, true); } public static FootstepPlan runPlanner(FootstepPlanner planner, FramePose initialStanceFootPose, RobotSide initialStanceSide, FramePose goalPose, PlanarRegionsList planarRegionsList, boolean assertPlannerReturnedResult) { FootstepPlannerGoal goal = new FootstepPlannerGoal(); goal.setFootstepPlannerGoalType(FootstepPlannerGoalType.POSE_BETWEEN_FEET); goal.setGoalPoseBetweenFeet(goalPose); goal.setXYGoal(new Point2d(goalPose.getX(), goalPose.getY()), 0.5); return runPlanner(planner, initialStanceFootPose, initialStanceSide, goal, planarRegionsList, assertPlannerReturnedResult); } public static FootstepPlan runPlanner(FootstepPlanner planner, FramePose initialStanceFootPose, RobotSide initialStanceSide, FootstepPlannerGoal goal, PlanarRegionsList planarRegionsList, boolean assertPlannerReturnedResult) { planner.setInitialStanceFoot(initialStanceFootPose, initialStanceSide); planner.setGoal(goal); planner.setPlanarRegions(planarRegionsList); ExecutionTimer timer = new ExecutionTimer("Timer", 0.0, new YoVariableRegistry("Timer")); timer.startMeasurement(); FootstepPlanningResult result = planner.plan(); timer.stopMeasurement(); PrintTools.info("Planning took " + timer.getCurrentTime().getDoubleValue() + "s"); FootstepPlan footstepPlan = planner.getPlan(); if (assertPlannerReturnedResult) assertTrue("Planner was not able to provide valid result.", result.validForExecution()); return footstepPlan; } public static void configureAnytimePlannerRunnable(final AnytimeFootstepPlanner planner, FramePose initialStanceFootPose, RobotSide initialStanceSide, FramePose goalPose, PlanarRegionsList planarRegionsList) { FootstepPlannerGoal goal = new FootstepPlannerGoal(); goal.setFootstepPlannerGoalType(FootstepPlannerGoalType.POSE_BETWEEN_FEET); goal.setGoalPoseBetweenFeet(goalPose); planner.setInitialStanceFoot(initialStanceFootPose, initialStanceSide); planner.setGoal(goal); planner.setPlanarRegions(planarRegionsList); } public static boolean isGoalNextToLastStep(FramePose goalPose, FootstepPlan footstepPlan) { return isGoalNextToLastStep(goalPose, footstepPlan, 0.5); } public static boolean isGoalNextToLastStep(FramePose goalPose, FootstepPlan footstepPlan, double epsilon) { int steps = footstepPlan.getNumberOfSteps(); if (steps < 1) throw new RuntimeException("Did not get enough footsteps to check if goal is within feet."); SimpleFootstep footstep = footstepPlan.getFootstep(steps - 1); FramePose stepPose = new FramePose(); footstep.getSoleFramePose(stepPose); RobotSide stepSide = footstep.getRobotSide(); double midFeetOffset = stepSide.negateIfLeftSide(0.125); Vector3d goalOffset = new Vector3d(0.0, midFeetOffset , 0.0); RigidBodyTransform soleToWorld = new RigidBodyTransform(); stepPose.getRigidBodyTransform(soleToWorld); soleToWorld.transform(goalOffset); FramePose achievedGoal = new FramePose(stepPose); Point3d goalPosition = new Point3d(); achievedGoal.getPosition(goalPosition); goalPosition.add(goalOffset); achievedGoal.setPosition(goalPosition); if (achievedGoal.epsilonEquals(goalPose, epsilon)) return true; else return false; } }
<%# Copyright 2013-2018 the original author or authors from the JHipster project. This file is part of the JHipster project, see https://jhipster.github.io/ for more information. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -%> package <%=packageName%>.security.oauth2; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import javax.servlet.http.Cookie; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.List; /** * Test the CookieCollection. * * @see CookieCollection */ public class CookieCollectionTest { public static final String COOKIE_NAME = "chocolate"; public static final String COOKIE_VALUE = "yummy"; public static final String BROWNIE_NAME = "brownie"; private Cookie cookie; private Cookie cupsCookie; private Cookie brownieCookie; @Before public void setUp() throws Exception { cookie = new Cookie(COOKIE_NAME, COOKIE_VALUE); cupsCookie = new Cookie("cups", "delicious"); brownieCookie = new Cookie(BROWNIE_NAME, "mmh"); } @After public void tearDown() throws Exception { } @Test public void size() throws Exception { CookieCollection cookies = new CookieCollection(); Assert.assertEquals(0, cookies.size()); cookies.add(cookie); Assert.assertEquals(1, cookies.size()); } @Test public void isEmpty() throws Exception { CookieCollection cookies = new CookieCollection(); Assert.assertTrue(cookies.isEmpty()); cookies.add(cookie); Assert.assertFalse(cookies.isEmpty()); } @Test public void contains() throws Exception { CookieCollection cookies = new CookieCollection(cookie); Assert.assertTrue(cookies.contains(cookie)); Assert.assertTrue(cookies.contains(COOKIE_NAME)); Assert.assertFalse(cookies.contains("yuck")); } @Test public void iterator() throws Exception { CookieCollection cookies = new CookieCollection(cookie); Iterator<Cookie> it = cookies.iterator(); Assert.assertTrue(it.hasNext()); Assert.assertEquals(cookie, it.next()); Assert.assertFalse(it.hasNext()); } @Test public void toArray() throws Exception { CookieCollection cookies = new CookieCollection(cookie); Cookie[] array = cookies.toArray(); Assert.assertEquals(cookies.size(), array.length); Assert.assertEquals(cookie, array[0]); } @Test public void toArray1() throws Exception { CookieCollection cookies = new CookieCollection(cookie); Cookie[] array = new Cookie[cookies.size()]; cookies.toArray(array); Assert.assertEquals(cookies.size(), array.length); Assert.assertEquals(cookie, array[0]); } @Test public void add() throws Exception { CookieCollection cookies = new CookieCollection(cookie); Cookie newCookie = new Cookie(BROWNIE_NAME, "mmh"); cookies.add(newCookie); Assert.assertEquals(2, cookies.size()); Assert.assertTrue(cookies.contains(newCookie)); Assert.assertTrue(cookies.contains(BROWNIE_NAME)); } @Test public void addAgain() { CookieCollection cookies = new CookieCollection(cookie, brownieCookie, cupsCookie); Cookie white = new Cookie(COOKIE_NAME, "white"); boolean modified = cookies.add(white); Assert.assertTrue(modified); Assert.assertEquals(white, cookies.get(COOKIE_NAME)); Assert.assertTrue(cookies.contains(white)); Assert.assertFalse(cookies.contains(cookie)); Assert.assertTrue(cookies.contains(COOKIE_NAME)); } @Test public void get() { CookieCollection cookies = new CookieCollection(cookie, brownieCookie, cupsCookie); Cookie c = cookies.get(COOKIE_NAME); Assert.assertNotNull(c); Assert.assertEquals(cookie, c); } @Test public void remove() throws Exception { CookieCollection cookies = new CookieCollection(cookie, brownieCookie, cupsCookie); cookies.remove(cookie); Assert.assertEquals(2, cookies.size()); Assert.assertFalse(cookies.contains(cookie)); Assert.assertFalse(cookies.contains(COOKIE_NAME)); Assert.assertTrue(cookies.contains(brownieCookie)); Assert.assertTrue(cookies.contains(BROWNIE_NAME)); } @Test public void containsAll() throws Exception { List<Cookie> content = Arrays.asList(cookie, brownieCookie); CookieCollection cookies = new CookieCollection(content); Assert.assertTrue(cookies.containsAll(content)); Assert.assertTrue(cookies.containsAll(Collections.singletonList(cookie))); Assert.assertFalse(cookies.containsAll(Arrays.asList(cookie, brownieCookie, cupsCookie))); Assert.assertTrue(cookies.containsAll(Arrays.asList(COOKIE_NAME, BROWNIE_NAME))); } @Test @SuppressWarnings("unchecked") public void addAll() throws Exception { CookieCollection cookies = new CookieCollection(); List<Cookie> content = Arrays.asList(cookie, brownieCookie, cupsCookie); boolean modified = cookies.addAll(content); Assert.assertTrue(modified); Assert.assertEquals(3, cookies.size()); Assert.assertTrue(cookies.containsAll(content)); Assert.assertFalse(cookies.addAll(Collections.EMPTY_LIST)); } @Test public void removeAll() throws Exception { CookieCollection cookies = new CookieCollection(cookie, brownieCookie, cupsCookie); boolean modified = cookies.removeAll(Arrays.asList(brownieCookie, cupsCookie)); Assert.assertTrue(modified); Assert.assertEquals(1, cookies.size()); Assert.assertFalse(cookies.contains(brownieCookie)); Assert.assertFalse(cookies.contains(cupsCookie)); Assert.assertFalse(cookies.removeAll(Collections.EMPTY_LIST)); } @Test public void removeAllByName() throws Exception { CookieCollection cookies = new CookieCollection(cookie, brownieCookie, cupsCookie); boolean modified = cookies.removeAll(Arrays.asList(COOKIE_NAME, BROWNIE_NAME)); Assert.assertTrue(modified); Assert.assertEquals(1, cookies.size()); Assert.assertFalse(cookies.contains(brownieCookie)); Assert.assertFalse(cookies.contains(cookie)); Assert.assertFalse(cookies.removeAll(Collections.EMPTY_LIST)); } @Test public void retainAll() throws Exception { CookieCollection cookies = new CookieCollection(cookie, brownieCookie, cupsCookie); List<Cookie> content = Arrays.asList(cookie, brownieCookie); boolean modified = cookies.retainAll(content); Assert.assertTrue(modified); Assert.assertEquals(2, cookies.size()); Assert.assertTrue(cookies.containsAll(content)); Assert.assertFalse(cookies.contains(cupsCookie)); Assert.assertFalse(cookies.retainAll(content)); } @Test public void clear() throws Exception { CookieCollection cookies = new CookieCollection(cookie); cookies.clear(); Assert.assertTrue(cookies.isEmpty()); } }
/******************************************************************************* * Copyright 2013-2019 Qaprosoft (http://www.qaprosoft.com). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package com.automation.carina.demo.db.mappers; import com.automation.carina.demo.db.models.UserPreference; public interface UserPreferenceMapper { void create(UserPreference userPreference); UserPreference findById(Long id); }
/* * Copyright 2016-2020 The OpenZipkin Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package zipkin2.collector.sqs; import com.amazonaws.AbortedException; import com.amazonaws.services.sqs.AmazonSQS; import com.amazonaws.services.sqs.model.DeleteMessageBatchRequestEntry; import com.amazonaws.services.sqs.model.DeleteMessageBatchResult; import com.amazonaws.services.sqs.model.Message; import com.amazonaws.services.sqs.model.ReceiveMessageRequest; import com.amazonaws.util.Base64; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.logging.Level; import java.util.logging.Logger; import zipkin2.Callback; import zipkin2.CheckResult; import zipkin2.Component; import zipkin2.collector.Collector; import zipkin2.collector.CollectorMetrics; final class SQSSpanProcessor extends Component implements Runnable { private static final Logger logger = Logger.getLogger(SQSSpanProcessor.class.getName()); private static final Charset UTF_8 = Charset.forName("UTF-8"); private static final long DEFAULT_BACKOFF = 100; private static final long MAX_BACKOFF = 30000; final AmazonSQS client; final Collector collector; final CollectorMetrics metrics; final String queueUrl; final AtomicReference<CheckResult> status = new AtomicReference<>(CheckResult.OK); final AtomicBoolean closed; final ReceiveMessageRequest request; long failureBackoff = DEFAULT_BACKOFF; SQSSpanProcessor(SQSCollector sqsCollector) { client = sqsCollector.client(); collector = sqsCollector.collector; metrics = sqsCollector.metrics; queueUrl = sqsCollector.queueUrl; closed = sqsCollector.closed; request = new ReceiveMessageRequest(queueUrl) .withWaitTimeSeconds(sqsCollector.waitTimeSeconds) .withMaxNumberOfMessages(sqsCollector.maxNumberOfMessages); } @Override public CheckResult check() { return status.get(); } @Override public void close() { // the collector owns closing of its resources so noop here } @Override public void run() { while (!closed.get()) { try { process(client.receiveMessage(request).getMessages()); status.lazySet(CheckResult.OK); failureBackoff = DEFAULT_BACKOFF; } catch (AbortedException ae) { status.lazySet(CheckResult.failed(ae)); } catch (Exception e) { logger.log(Level.WARNING, "sqs receive failed", e); status.lazySet(CheckResult.failed(e)); // backoff on failures to avoid pinging SQS in a tight loop if there are failures. try { Thread.sleep(failureBackoff); } catch (InterruptedException ie) { } finally { failureBackoff = Math.max(failureBackoff * 2, MAX_BACKOFF); } } } } private void process(final List<Message> messages) { if (messages.size() == 0) return; final List<DeleteMessageBatchRequestEntry> toDelete = new ArrayList<>(); int count = 0; for (Message message : messages) { final String deleteId = String.valueOf(count++); try { String stringBody = message.getBody(); if (stringBody.isEmpty() || stringBody.equals("[]")) continue; // allow plain-text json, but permit base64 encoded thrift or json byte[] serialized = stringBody.charAt(0) == '[' ? stringBody.getBytes(UTF_8) : Base64.decode(stringBody); metrics.incrementMessages(); metrics.incrementBytes(serialized.length); collector.acceptSpans( serialized, new Callback<Void>() { @Override public void onSuccess(Void value) { toDelete.add( new DeleteMessageBatchRequestEntry(deleteId, message.getReceiptHandle())); } @Override public void onError(Throwable t) { logger.log(Level.WARNING, "collector accept failed", t); // for cases that are not recoverable just discard the message, // otherwise ignore so processing can be retried. if (t instanceof IllegalArgumentException) { toDelete.add( new DeleteMessageBatchRequestEntry(deleteId, message.getReceiptHandle())); } } }); } catch (RuntimeException | Error e) { logger.log(Level.WARNING, "message decoding failed", e); toDelete.add(new DeleteMessageBatchRequestEntry(deleteId, message.getReceiptHandle())); } } if (!toDelete.isEmpty()) { delete(toDelete); } } private DeleteMessageBatchResult delete(List<DeleteMessageBatchRequestEntry> entries) { return client.deleteMessageBatch(queueUrl, entries); } @Override public final String toString() { return "SQSSpanProcessor{queueUrl=" + queueUrl + "}"; } }
package com.mapbox.rctmgl.components.mapview; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.graphics.Bitmap; import android.graphics.PointF; import android.graphics.RectF; import android.location.Location; import android.os.Handler; import android.support.annotation.NonNull; import android.text.LoginFilter; import android.util.DisplayMetrics; import android.util.Log; import android.view.View; import android.view.ViewGroup; import android.view.MotionEvent; import com.facebook.react.bridge.ActivityEventListener; import com.facebook.react.bridge.LifecycleEventListener; import com.facebook.react.bridge.ReactContext; import com.facebook.react.bridge.ReadableArray; import com.facebook.react.bridge.ReadableMap; import com.facebook.react.bridge.WritableArray; import com.facebook.react.bridge.WritableMap; import com.facebook.react.bridge.WritableNativeArray; import com.facebook.react.bridge.WritableNativeMap; import com.mapbox.mapboxsdk.annotations.Marker; import com.mapbox.mapboxsdk.annotations.MarkerView; import com.mapbox.mapboxsdk.annotations.MarkerViewManager; import com.mapbox.mapboxsdk.camera.CameraPosition; import com.mapbox.mapboxsdk.camera.CameraUpdate; import com.mapbox.mapboxsdk.camera.CameraUpdateFactory; import com.mapbox.mapboxsdk.geometry.LatLng; import com.mapbox.mapboxsdk.geometry.VisibleRegion; import com.mapbox.mapboxsdk.maps.MapView; import com.mapbox.mapboxsdk.maps.MapboxMap; import com.mapbox.mapboxsdk.maps.MapboxMapOptions; import com.mapbox.mapboxsdk.maps.OnMapReadyCallback; import com.mapbox.mapboxsdk.maps.UiSettings; import com.mapbox.mapboxsdk.plugins.localization.LocalizationPlugin; import com.mapbox.mapboxsdk.plugins.locationlayer.LocationLayerMode; import com.mapbox.mapboxsdk.plugins.locationlayer.LocationLayerPlugin; import com.mapbox.mapboxsdk.storage.FileSource; import com.mapbox.mapboxsdk.style.layers.Layer; import com.mapbox.mapboxsdk.style.layers.Property; import com.mapbox.mapboxsdk.style.layers.PropertyFactory; import com.mapbox.rctmgl.components.AbstractMapFeature; import com.mapbox.rctmgl.components.annotation.RCTMGLCallout; import com.mapbox.rctmgl.components.annotation.RCTMGLCalloutAdapter; import com.mapbox.rctmgl.components.annotation.RCTMGLPointAnnotation; import com.mapbox.rctmgl.components.annotation.RCTMGLPointAnnotationAdapter; import com.mapbox.rctmgl.components.camera.CameraStop; import com.mapbox.rctmgl.components.camera.CameraUpdateQueue; import com.mapbox.rctmgl.components.mapview.helpers.CameraChangeTracker; import com.mapbox.rctmgl.components.styles.light.RCTMGLLight; import com.mapbox.rctmgl.components.styles.sources.RCTSource; import com.mapbox.rctmgl.events.AndroidCallbackEvent; import com.mapbox.rctmgl.events.IEvent; import com.mapbox.rctmgl.events.MapChangeEvent; import com.mapbox.rctmgl.events.MapClickEvent; import com.mapbox.rctmgl.events.MapUserTrackingModeEvent; import com.mapbox.rctmgl.events.constants.EventKeys; import com.mapbox.rctmgl.events.constants.EventTypes; import com.mapbox.rctmgl.location.LocationManager; import com.mapbox.rctmgl.location.UserLocation; import com.mapbox.rctmgl.location.UserLocationLayerConstants; import com.mapbox.rctmgl.location.UserLocationVerticalAlignment; import com.mapbox.rctmgl.location.UserTrackingMode; import com.mapbox.rctmgl.location.UserTrackingState; import com.mapbox.rctmgl.utils.BitmapUtils; import com.mapbox.rctmgl.utils.FilterParser; import com.mapbox.rctmgl.utils.GeoJSONUtils; import com.mapbox.rctmgl.utils.GeoViewport; import com.mapbox.rctmgl.utils.SimpleEventCallback; import com.mapbox.services.android.telemetry.permissions.PermissionsManager; import com.mapbox.services.commons.geojson.Feature; import com.mapbox.services.commons.geojson.FeatureCollection; import com.mapbox.services.commons.geojson.Point; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import javax.annotation.Nullable; /** * Created by nickitaliano on 8/18/17. */ @SuppressWarnings({"MissingPermission"}) public class RCTMGLMapView extends MapView implements OnMapReadyCallback, MapboxMap.OnMapClickListener, MapboxMap.OnMapLongClickListener, MapView.OnMapChangedListener, MapboxMap.OnMarkerViewClickListener { public static final String LOG_TAG = RCTMGLMapView.class.getSimpleName(); public static final int USER_LOCATION_CAMERA_MOVE_DURATION = 1000; private RCTMGLMapViewManager mManager; private Context mContext; private Handler mHandler; private LifecycleEventListener mLifeCycleListener; private boolean mPaused; private boolean mDestroyed; private List<AbstractMapFeature> mFeatures; private List<AbstractMapFeature> mQueuedFeatures; private Map<String, RCTMGLPointAnnotation> mPointAnnotations; private Map<String, RCTSource> mSources; private CameraUpdateQueue mCameraUpdateQueue; private CameraChangeTracker mCameraChangeTracker = new CameraChangeTracker(); private Map<Integer, ReadableArray> mPreRenderMethodMap = new HashMap<>(); private MapboxMap mMap; private LocationManager mLocationManger; private UserLocation mUserLocation; private LocationLayerPlugin mLocationLayer; private LocalizationPlugin mLocalizationPlugin; private String mStyleURL; private boolean mAnimated; private boolean mLocalizeLabels; private Boolean mScrollEnabled; private Boolean mPitchEnabled; private Boolean mRotateEnabled; private Boolean mAttributionEnabled; private Boolean mLogoEnabled; private Boolean mCompassEnabled; private Boolean mZoomEnabled; private boolean mShowUserLocation; private long mActiveMarkerID = -1; private int mUserTrackingMode; private int mUserTrackingState = UserTrackingState.POSSIBLE; private int mUserLocationVerticalAlignment = UserLocationVerticalAlignment.CENTER; private double mHeading; private double mPitch; private double mZoomLevel; private Double mMinZoomLevel; private Double mMaxZoomLevel; private ReadableArray mInsets; private Point mCenterCoordinate; private int mChangeDelimiterSuppressionDepth; private LocationManager.OnUserLocationChange mLocationChangeListener = new LocationManager.OnUserLocationChange() { @Override public void onLocationChange(Location nextLocation) { if (mMap == null || mLocationLayer == null || !mShowUserLocation) { return; } float distToNextLocation = mUserLocation.getDistance(nextLocation); mLocationLayer.onLocationChanged(nextLocation); mUserLocation.setCurrentLocation(nextLocation); if (mUserTrackingState == UserTrackingState.POSSIBLE || distToNextLocation > 0.0f) { updateUserLocation(true); } sendUserLocationUpdateEvent(nextLocation); } }; public RCTMGLMapView(Context context, RCTMGLMapViewManager manager, MapboxMapOptions options) { super(context, options); mContext = context; onCreate(null); onStart(); onResume(); getMapAsync(this); mManager = manager; mCameraUpdateQueue = new CameraUpdateQueue(); mUserLocation = new UserLocation(); mLocationManger = new LocationManager(context); mLocationManger.setOnLocationChangeListener(mLocationChangeListener); mSources = new HashMap<>(); mPointAnnotations = new HashMap<>(); mQueuedFeatures = new ArrayList<>(); mFeatures = new ArrayList<>(); mHandler = new Handler(); setLifecycleListeners(); addOnMapChangedListener(this); } @Override public void onResume() { super.onResume(); mPaused = false; } @Override public void onPause() { super.onPause(); mPaused = true; } @Override public void onDestroy() { super.onDestroy(); mDestroyed = true; } @Override public void onWindowFocusChanged(boolean hasWindowFocus) { super.onWindowFocusChanged(hasWindowFocus); if (mLocationLayer == null) { return; } if (hasWindowFocus) { mLocationLayer.onStart(); } else { mLocationLayer.onStop(); } } private boolean mDetached = false; @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); if (mDetached) { onStart(); onResume(); getMapAsync(this); mDetached = false; } } @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); mDetached = true; } public void enqueuePreRenderMapMethod(Integer methodID, @Nullable ReadableArray args) { mPreRenderMethodMap.put(methodID, args); } public void addFeature(View childView, int childPosition) { AbstractMapFeature feature = null; if (childView instanceof RCTSource) { RCTSource source = (RCTSource) childView; mSources.put(source.getID(), source); feature = (AbstractMapFeature) childView; } else if (childView instanceof RCTMGLLight) { feature = (AbstractMapFeature) childView; } else if (childView instanceof RCTMGLPointAnnotation) { RCTMGLPointAnnotation annotation = (RCTMGLPointAnnotation) childView; mPointAnnotations.put(annotation.getID(), annotation); feature = (AbstractMapFeature) childView; } else { ViewGroup children = (ViewGroup) childView; for (int i = 0; i < children.getChildCount(); i++) { addFeature(children.getChildAt(i), childPosition); } } if (feature != null) { if (mMap != null) { feature.addToMap(this); mFeatures.add(childPosition, feature); } else { mQueuedFeatures.add(childPosition, feature); } } } public void removeFeature(int childPosition) { AbstractMapFeature feature; if (mQueuedFeatures != null && mQueuedFeatures.size() > 0) { feature = mQueuedFeatures.get(childPosition); } else { feature = mFeatures.get(childPosition); } if (feature == null) { return; } if (feature instanceof RCTSource) { RCTSource source = (RCTSource) feature; mSources.remove(source.getID()); } else if (feature instanceof RCTMGLPointAnnotation) { RCTMGLPointAnnotation annotation = (RCTMGLPointAnnotation) feature; if (annotation.getMapboxID() == mActiveMarkerID) { mActiveMarkerID = -1; } mPointAnnotations.remove(annotation.getID()); } feature.removeFromMap(this); if (mQueuedFeatures != null && mQueuedFeatures.size() > 0) { mQueuedFeatures.remove(feature); } else { mFeatures.remove(feature); } } public int getFeatureCount() { int totalCount = 0; if (mQueuedFeatures != null) { totalCount = mQueuedFeatures.size(); } totalCount += mFeatures.size(); return totalCount; } public AbstractMapFeature getFeatureAt(int i) { if (mQueuedFeatures != null && mQueuedFeatures.size() > 0) { return mQueuedFeatures.get(i); } return mFeatures.get(i); } public synchronized void dispose() { if (mDestroyed) { return; } ReactContext reactContext = (ReactContext) mContext; reactContext.removeLifecycleEventListener(mLifeCycleListener); if(mLocationLayer != null){ mLocationLayer.onStop(); } mLocationManger.dispose(); if (!mPaused) { onPause(); } onStop(); onDestroy(); } public RCTMGLPointAnnotation getPointAnnotationByID(String annotationID) { if (annotationID == null) { return null; } for (String key : mPointAnnotations.keySet()) { RCTMGLPointAnnotation annotation = mPointAnnotations.get(key); if (annotation != null && annotationID.equals(annotation.getID())) { return annotation; } } return null; } public RCTMGLPointAnnotation getPointAnnotationByMarkerID(long markerID) { for (String key : mPointAnnotations.keySet()) { RCTMGLPointAnnotation annotation = mPointAnnotations.get(key); if (annotation != null && markerID == annotation.getMapboxID()) { return annotation; } } return null; } public MapboxMap getMapboxMap() { return mMap; } //region Map Callbacks @Override public void onMapReady(final MapboxMap mapboxMap) { mMap = mapboxMap; reflow(); // the internal widgets(compass, attribution, etc) need this to position themselves correctly final MarkerViewManager markerViewManager = mMap.getMarkerViewManager(); markerViewManager.addMarkerViewAdapter(new RCTMGLPointAnnotationAdapter(this, mContext)); markerViewManager.setOnMarkerViewClickListener(this); mMap.setInfoWindowAdapter(new RCTMGLCalloutAdapter(this)); mMap.setOnMapClickListener(this); mMap.setOnMapLongClickListener(this); // in case props were set before the map was ready lets set them updateInsets(); updateUISettings(); setMinMaxZoomLevels(); if (mShowUserLocation) { enableLocation(); } if (mCenterCoordinate != null && mUserTrackingMode == UserTrackingMode.NONE) { mMap.moveCamera(CameraUpdateFactory.newCameraPosition(buildCamera()), new MapboxMap.CancelableCallback() { @Override public void onCancel() { sendRegionChangeEvent(false); } @Override public void onFinish() { sendRegionChangeEvent(false); } }); } if (!mCameraUpdateQueue.isEmpty()) { mCameraUpdateQueue.execute(mMap); } if (mQueuedFeatures != null && mQueuedFeatures.size() > 0) { for (int i = 0; i < mQueuedFeatures.size(); i++) { AbstractMapFeature feature = mQueuedFeatures.get(i); feature.addToMap(this); mFeatures.add(feature); } mQueuedFeatures = null; } if (mPointAnnotations.size() > 0) { markerViewManager.invalidateViewMarkersInVisibleRegion(); } final RCTMGLMapView self = this; mMap.addOnCameraIdleListener(new MapboxMap.OnCameraIdleListener() { long lastTimestamp = System.currentTimeMillis(); boolean lastAnimated = false; // Workaround for the event called twice @Override public void onCameraIdle() { if (mPointAnnotations.size() > 0) { markerViewManager.invalidateViewMarkersInVisibleRegion(); } long curTimestamp = System.currentTimeMillis(); boolean curAnimated = mCameraChangeTracker.isAnimated(); if (curTimestamp - lastTimestamp < 500 && curAnimated == lastAnimated) { // even if we don't send the change event, we need to set the reason... //this happens when you have multiple calls to setCamera very quickly. This method will short circuit, //and then the next time the user moves the map, it will think it is NOT from a user interaction , because // this flag was not reset mCameraChangeTracker.setReason(-1); return; } sendRegionChangeEvent(curAnimated); lastTimestamp = curTimestamp; lastAnimated = curAnimated; } }); mMap.addOnCameraMoveStartedListener(new MapboxMap.OnCameraMoveStartedListener() { @Override public void onCameraMoveStarted(int reason) { if (mCameraChangeTracker.isEmpty()) { mCameraChangeTracker.setReason(reason); } } }); mMap.setOnScrollListener(new MapboxMap.OnScrollListener() { @Override public void onScroll() { if (mUserLocation.getTrackingMode() != UserTrackingMode.NONE) { updateUserTrackingMode(UserTrackingMode.NONE); } } }); mMap.setOnFlingListener(new MapboxMap.OnFlingListener() { @Override public void onFling() { if (mUserLocation.getTrackingMode() != UserTrackingMode.NONE) { updateUserTrackingMode(UserTrackingMode.NONE); } } }); mMap.addOnCameraMoveListener(new MapboxMap.OnCameraMoveListener() { double lastMapRotation = getMapRotation(); @Override public void onCameraMove() { int userTrackingMode = mUserLocation.getTrackingMode(); boolean isFollowWithCourseOrHeading = userTrackingMode == UserTrackingMode.FollowWithCourse || userTrackingMode == UserTrackingMode.FollowWithHeading; if (!isFollowWithCourseOrHeading) { lastMapRotation = getRotation(); return; } double currentMapRotation = getMapRotation(); if (lastMapRotation != currentMapRotation && mCameraChangeTracker.isUserInteraction()) { updateUserTrackingMode(UserTrackingMode.FOLLOW); } lastMapRotation = currentMapRotation; } }); mLocalizationPlugin = new LocalizationPlugin(this, mMap); if (mLocalizeLabels) { try { mLocalizationPlugin.matchMapLanguageWithDeviceDefault(); } catch (Exception e) { final String localeString = Locale.getDefault().toString(); Log.w(LOG_TAG, String.format("Could not find matching locale for %s", localeString)); } } } public void reflow() { mHandler.post(new Runnable() { @Override public void run() { measure( View.MeasureSpec.makeMeasureSpec(getMeasuredWidth(), View.MeasureSpec.EXACTLY), View.MeasureSpec.makeMeasureSpec(getMeasuredHeight(), View.MeasureSpec.EXACTLY)); layout(getLeft(), getTop(), getRight(), getBottom()); } }); } @Override public boolean onTouchEvent(MotionEvent ev) { boolean result = super.onTouchEvent(ev); int eventAction = ev.getAction(); if (eventAction == MotionEvent.ACTION_DOWN) { mChangeDelimiterSuppressionDepth = 0; } else if (eventAction == MotionEvent.ACTION_MOVE) { mChangeDelimiterSuppressionDepth++; } else if (eventAction == MotionEvent.ACTION_CANCEL) { mChangeDelimiterSuppressionDepth = 0; } else if (eventAction == MotionEvent.ACTION_UP) { mChangeDelimiterSuppressionDepth = 0; } if (result && mScrollEnabled) { requestDisallowInterceptTouchEvent(true); } return result; } private boolean isSuppressingChangeDelimiters() { return mChangeDelimiterSuppressionDepth > 2; } @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { if (!mPaused) { super.onLayout(changed, left, top, right, bottom); } } @Override public void onMapClick(@NonNull LatLng point) { boolean isEventCaptured = false; if (mActiveMarkerID != -1) { for (String key : mPointAnnotations.keySet()) { RCTMGLPointAnnotation annotation = mPointAnnotations.get(key); if (mActiveMarkerID == annotation.getMapboxID()) { isEventCaptured = deselectAnnotation(annotation); } } } if (isEventCaptured) { return; } PointF screenPoint = mMap.getProjection().toScreenLocation(point); List<RCTSource> touchableSources = getAllTouchableSources(); Map<String, Feature> hits = new HashMap<>(); List<RCTSource> hitTouchableSources = new ArrayList<>(); for (RCTSource touchableSource : touchableSources) { Map<String, Double> hitbox = touchableSource.getTouchHitbox(); if (hitbox == null) { continue; } float halfWidth = hitbox.get("width").floatValue() / 2.0f; float halfHeight = hitbox.get("height").floatValue() / 2.0f; RectF hitboxF = new RectF(); hitboxF.set( screenPoint.x - halfWidth, screenPoint.y - halfHeight, screenPoint.x + halfWidth, screenPoint.y + halfHeight); List<Feature> features = mMap.queryRenderedFeatures(hitboxF, touchableSource.getLayerIDs()); if (features.size() > 0) { hits.put(touchableSource.getID(), features.get(0)); hitTouchableSources.add(touchableSource); } } if (hits.size() > 0) { RCTSource source = getTouchableSourceWithHighestZIndex(hitTouchableSources); if (source != null && source.hasPressListener()) { source.onPress(hits.get(source.getID())); return; } } MapClickEvent event = new MapClickEvent(this, point, screenPoint); mManager.handleEvent(event); } @Override public void onMapLongClick(@NonNull LatLng point) { PointF screenPoint = mMap.getProjection().toScreenLocation(point); MapClickEvent event = new MapClickEvent(this, point, screenPoint, EventTypes.MAP_LONG_CLICK); mManager.handleEvent(event); } @Override public boolean onMarkerClick(@NonNull Marker marker, @NonNull View view, @NonNull MapboxMap.MarkerViewAdapter adapter) { final long selectedMarkerID = marker.getId(); RCTMGLPointAnnotation activeAnnotation = null; RCTMGLPointAnnotation nextActiveAnnotation = null; for (String key : mPointAnnotations.keySet()) { RCTMGLPointAnnotation annotation = mPointAnnotations.get(key); final long curMarkerID = annotation.getMapboxID(); if (selectedMarkerID == curMarkerID) { nextActiveAnnotation = annotation; } else if (mActiveMarkerID == curMarkerID) { activeAnnotation = annotation; } } if (activeAnnotation != null) { deselectAnnotation(activeAnnotation); } if (nextActiveAnnotation != null) { selectAnnotation(nextActiveAnnotation); } return true; } public void selectAnnotation(RCTMGLPointAnnotation annotation) { final long id = annotation.getMapboxID(); if (id != mActiveMarkerID) { final MarkerView markerView = annotation.getMarker(); mMap.selectMarker(markerView); annotation.onSelect(true); mActiveMarkerID = id; RCTMGLCallout calloutView = annotation.getCalloutView(); if (!markerView.isInfoWindowShown() && calloutView != null) { markerView.showInfoWindow(mMap, this); } } } public boolean deselectAnnotation(RCTMGLPointAnnotation annotation) { MarkerView markerView = annotation.getMarker(); RCTMGLCallout calloutView = annotation.getCalloutView(); if (calloutView != null) { markerView.hideInfoWindow(); } mMap.deselectMarker(markerView); mActiveMarkerID = -1; annotation.onDeselect(); return calloutView != null; } @Override public void onMapChanged(int changed) { IEvent event = null; switch (changed) { case REGION_WILL_CHANGE: if (!isSuppressingChangeDelimiters()) { event = new MapChangeEvent(this, makeRegionPayload(false), EventTypes.REGION_WILL_CHANGE); } break; case REGION_WILL_CHANGE_ANIMATED: if (!isSuppressingChangeDelimiters()) { event = new MapChangeEvent(this, makeRegionPayload(true), EventTypes.REGION_WILL_CHANGE); } break; case REGION_IS_CHANGING: event = new MapChangeEvent(this, EventTypes.REGION_IS_CHANGING); break; case REGION_DID_CHANGE: mCameraChangeTracker.setRegionChangeAnimated(false); break; case REGION_DID_CHANGE_ANIMATED: mCameraChangeTracker.setRegionChangeAnimated(true); break; case WILL_START_LOADING_MAP: event = new MapChangeEvent(this, EventTypes.WILL_START_LOADING_MAP); break; case DID_FAIL_LOADING_MAP: event = new MapChangeEvent(this, EventTypes.DID_FAIL_LOADING_MAP); break; case DID_FINISH_LOADING_MAP: event = new MapChangeEvent(this, EventTypes.DID_FINISH_LOADING_MAP); break; case WILL_START_RENDERING_FRAME: event = new MapChangeEvent(this, EventTypes.WILL_START_RENDERING_FRAME); break; case DID_FINISH_RENDERING_FRAME: event = new MapChangeEvent(this, EventTypes.DID_FINISH_RENDERING_FRAME); break; case DID_FINISH_RENDERING_FRAME_FULLY_RENDERED: event = new MapChangeEvent(this, EventTypes.DID_FINISH_RENDERING_FRAME_FULLY); break; case WILL_START_RENDERING_MAP: event = new MapChangeEvent(this, EventTypes.WILL_START_RENDERING_MAP); break; case DID_FINISH_RENDERING_MAP: event = new MapChangeEvent(this, EventTypes.DID_FINISH_RENDERING_MAP); break; case DID_FINISH_RENDERING_MAP_FULLY_RENDERED: if (mPreRenderMethodMap.size() > 0) { for (Integer methodID : mPreRenderMethodMap.keySet()) { mManager.receiveCommand(this, methodID, mPreRenderMethodMap.get(methodID)); } mPreRenderMethodMap.clear(); } event = new MapChangeEvent(this, EventTypes.DID_FINISH_RENDERING_MAP_FULLY); break; case DID_FINISH_LOADING_STYLE: event = new MapChangeEvent(this, EventTypes.DID_FINISH_LOADING_STYLE); break; } if (event != null) { mManager.handleEvent(event); } } //endregion //region Property getter/setters public void setReactStyleURL(String styleURL) { mStyleURL = styleURL; if (mMap != null) { removeAllSourcesFromMap(); mMap.setStyle(styleURL, new MapboxMap.OnStyleLoadedListener() { @Override public void onStyleLoaded(String style) { addAllSourcesToMap(); } }); } } public void setReactAnimated(boolean animated) { mAnimated = animated; updateCameraPositionIfNeeded(false); } public void setReactContentInset(ReadableArray array) { mInsets = array; updateInsets(); } public void setLocalizeLabels(boolean localizeLabels) { mLocalizeLabels = localizeLabels; } public void setReactZoomEnabled(boolean zoomEnabled) { mZoomEnabled = zoomEnabled; updateUISettings(); } public void setReactScrollEnabled(boolean scrollEnabled) { mScrollEnabled = scrollEnabled; updateUISettings(); } public void setReactPitchEnabled(boolean pitchEnabled) { mPitchEnabled = pitchEnabled; updateUISettings(); } public void setReactRotateEnabled(boolean rotateEnabled) { mRotateEnabled = rotateEnabled; updateUISettings(); } public void setReactLogoEnabled(boolean logoEnabled) { mLogoEnabled = logoEnabled; updateUISettings(); } public void setReactCompassEnabled(boolean compassEnabled) { mCompassEnabled = compassEnabled; updateUISettings(); } public void setReactAttributionEnabled(boolean attributionEnabled) { mAttributionEnabled = attributionEnabled; updateUISettings(); } public void setReactHeading(double heading) { mHeading = heading; updateCameraPositionIfNeeded(false); } public void setReactPitch(double pitch) { mPitch = pitch; updateCameraPositionIfNeeded(false); } public void setReactZoomLevel(double zoomLevel) { mZoomLevel = zoomLevel; updateCameraPositionIfNeeded(false); } public void setReactMinZoomLevel(double minZoomLevel) { mMinZoomLevel = minZoomLevel; setMinMaxZoomLevels(); } public void setReactMaxZoomLevel(double maxZoomLevel) { mMaxZoomLevel = maxZoomLevel; setMinMaxZoomLevels(); } public void setReactCenterCoordinate(Point centerCoordinate) { mCenterCoordinate = centerCoordinate; updateCameraPositionIfNeeded(true); } public void setReactShowUserLocation(boolean showUserLocation) { mShowUserLocation = showUserLocation; if (mMap != null) { if (mLocationManger.isActive() && !mShowUserLocation) { mLocationManger.disable(); if (mLocationLayer != null) { int trackingMode = mUserLocation.getTrackingMode(); if (trackingMode != UserTrackingMode.NONE) { mUserLocation.setTrackingMode(UserTrackingMode.NONE); updateUserTrackingMode(UserTrackingMode.NONE); } updateLocationLayer(); } } else { enableLocation(); } } } public void setReactUserTrackingMode(int userTrackingMode) { int oldTrackingMode = mUserTrackingMode; mUserTrackingMode = userTrackingMode; updateUserTrackingMode(userTrackingMode); switch (mUserTrackingMode) { case UserTrackingMode.NONE: mUserTrackingState = UserTrackingState.POSSIBLE; break; case UserTrackingMode.FOLLOW: case UserTrackingMode.FollowWithCourse: case UserTrackingMode.FollowWithHeading: if (oldTrackingMode == UserTrackingMode.NONE) { mUserTrackingState = UserTrackingState.POSSIBLE; } mShowUserLocation = true; break; } if (mMap != null) { updateUserLocation(false); updateLocationLayer(); } } public void setReactUserLocationVerticalAlignment(int userLocationVerticalAlignment) { mUserLocationVerticalAlignment = userLocationVerticalAlignment; if (mMap != null && mUserLocation.getTrackingMode() != UserTrackingMode.NONE) { updateUserLocation(false); } } //endregion //region Methods public void setCamera(String callbackID, ReadableMap args) { IEvent event = new AndroidCallbackEvent(this, callbackID, EventKeys.MAP_ANDROID_CALLBACK); final SimpleEventCallback callback = new SimpleEventCallback(mManager, event); // remove any current camera updates mCameraUpdateQueue.flush(); if (args.hasKey("stops")) { ReadableArray stops = args.getArray("stops"); for (int i = 0; i < stops.size(); i++) { CameraStop stop = CameraStop.fromReadableMap(mContext, stops.getMap(i), null); mCameraUpdateQueue.offer(stop); } mCameraUpdateQueue.setOnCompleteAllListener(new CameraUpdateQueue.OnCompleteAllListener() { @Override public void onCompleteAll() { callback.onFinish(); mCameraChangeTracker.setReason(3); } }); } else { CameraStop stop = CameraStop.fromReadableMap(mContext, args, new MapboxMap.CancelableCallback() { @Override public void onCancel() { callback.onCancel(); mCameraChangeTracker.setReason(1); } @Override public void onFinish() { callback.onFinish(); mCameraChangeTracker.setReason(3); } }); mCameraUpdateQueue.offer(stop); } // if map is already ready start executing on the queue if (mMap != null) { mCameraUpdateQueue.execute(mMap); } } public void queryRenderedFeaturesAtPoint(String callbackID, PointF point, FilterParser.FilterList filter, List<String> layerIDs) { AndroidCallbackEvent event = new AndroidCallbackEvent(this, callbackID, EventKeys.MAP_ANDROID_CALLBACK); List<Feature> features = mMap.queryRenderedFeatures(point, FilterParser.parse(filter), layerIDs.toArray(new String[layerIDs.size()])); WritableMap payload = new WritableNativeMap(); payload.putString("data", FeatureCollection.fromFeatures(features).toJson()); event.setPayload(payload); mManager.handleEvent(event); } public void getZoom(String callbackID) { AndroidCallbackEvent event = new AndroidCallbackEvent(this, callbackID, EventKeys.MAP_ANDROID_CALLBACK); CameraPosition position = mMap.getCameraPosition(); WritableMap payload = new WritableNativeMap(); payload.putDouble("zoom", position.zoom); event.setPayload(payload); mManager.handleEvent(event); } public void queryRenderedFeaturesInRect(String callbackID, RectF rect, FilterParser.FilterList filter, List<String> layerIDs) { AndroidCallbackEvent event = new AndroidCallbackEvent(this, callbackID, EventKeys.MAP_ANDROID_CALLBACK); List<Feature> features = mMap.queryRenderedFeatures(rect, FilterParser.parse(filter), layerIDs.toArray(new String[layerIDs.size()])); WritableMap payload = new WritableNativeMap(); payload.putString("data", FeatureCollection.fromFeatures(features).toJson()); event.setPayload(payload); mManager.handleEvent(event); } public void getVisibleBounds(String callbackID) { AndroidCallbackEvent event = new AndroidCallbackEvent(this, callbackID, EventKeys.MAP_ANDROID_CALLBACK); VisibleRegion region = mMap.getProjection().getVisibleRegion(); WritableMap payload = new WritableNativeMap(); payload.putArray("visibleBounds", GeoJSONUtils.fromLatLngBounds(region.latLngBounds)); event.setPayload(payload); mManager.handleEvent(event); } public void getPointInView(String callbackID, LatLng mapCoordinate) { AndroidCallbackEvent event = new AndroidCallbackEvent(this, callbackID, EventKeys.MAP_ANDROID_CALLBACK); PointF pointInView = mMap.getProjection().toScreenLocation(mapCoordinate); WritableMap payload = new WritableNativeMap(); WritableArray array = new WritableNativeArray(); array.pushDouble(pointInView.x); array.pushDouble(pointInView.y); payload.putArray("pointInView", array); event.setPayload(payload); mManager.handleEvent(event); } public void getCoordinateFromView(String callbackID, PointF pointInView) { AndroidCallbackEvent event = new AndroidCallbackEvent(this, callbackID, EventKeys.MAP_ANDROID_CALLBACK); LatLng mapCoordinate = mMap.getProjection().fromScreenLocation(pointInView); WritableMap payload = new WritableNativeMap(); WritableArray array = new WritableNativeArray(); array.pushDouble(mapCoordinate.getLongitude()); array.pushDouble(mapCoordinate.getLatitude()); payload.putArray("coordinateFromView", array); event.setPayload(payload); mManager.handleEvent(event); } public void takeSnap(final String callbackID, final boolean writeToDisk) { final AndroidCallbackEvent event = new AndroidCallbackEvent(this, callbackID, EventKeys.MAP_ANDROID_CALLBACK); if (mMap == null) { throw new Error("takeSnap should only be called after the map has rendered"); } mMap.snapshot(new MapboxMap.SnapshotReadyCallback() { @Override public void onSnapshotReady(Bitmap snapshot) { WritableMap payload = new WritableNativeMap(); String uri = writeToDisk ? BitmapUtils.createTempFile(mContext, snapshot) : BitmapUtils.createBase64(snapshot); payload.putString("uri", uri); event.setPayload(payload); mManager.handleEvent(event); } }); } public void getCenter(String callbackID) { AndroidCallbackEvent event = new AndroidCallbackEvent(this, callbackID, EventKeys.MAP_ANDROID_CALLBACK); LatLng center = mMap.getCameraPosition().target; WritableArray array = new WritableNativeArray(); array.pushDouble(center.getLongitude()); array.pushDouble(center.getLatitude()); WritableMap payload = new WritableNativeMap(); payload.putArray("center", array); event.setPayload(payload); mManager.handleEvent(event); } public void init() { setStyleUrl(mStyleURL); // very important, this will make sure that mapbox-gl-native initializes the gl surface // https://github.com/mapbox/react-native-mapbox-gl/issues/955 getViewTreeObserver().dispatchOnGlobalLayout(); } public boolean isDestroyed(){ return mDestroyed; } private void updateCameraPositionIfNeeded(boolean shouldUpdateTarget) { if (mMap != null) { CameraPosition prevPosition = mMap.getCameraPosition(); CameraUpdate cameraUpdate = CameraUpdateFactory.newCameraPosition(buildCamera(prevPosition, shouldUpdateTarget)); if (mAnimated) { mMap.easeCamera(cameraUpdate); } else { mMap.moveCamera(cameraUpdate); } } } private CameraPosition buildCamera() { return buildCamera(null, true); } private CameraPosition buildCamera(CameraPosition previousPosition, boolean shouldUpdateTarget) { CameraPosition.Builder builder = new CameraPosition.Builder(previousPosition) .bearing(mHeading) .tilt(mPitch) .zoom(mZoomLevel); if (shouldUpdateTarget) { builder.target(GeoJSONUtils.toLatLng(mCenterCoordinate)); } return builder.build(); } private void updateUISettings() { if (mMap == null) { return; } // Gesture settings UiSettings uiSettings = mMap.getUiSettings(); if (mScrollEnabled != null && uiSettings.isScrollGesturesEnabled() != mScrollEnabled) { uiSettings.setScrollGesturesEnabled(mScrollEnabled); } if (mPitchEnabled != null && uiSettings.isTiltGesturesEnabled() != mPitchEnabled) { uiSettings.setTiltGesturesEnabled(mPitchEnabled); } if (mRotateEnabled != null && uiSettings.isRotateGesturesEnabled() != mRotateEnabled) { uiSettings.setRotateGesturesEnabled(mRotateEnabled); } if (mAttributionEnabled != null && uiSettings.isAttributionEnabled() != mAttributionEnabled) { uiSettings.setAttributionEnabled(mAttributionEnabled); } if (mLogoEnabled != null && uiSettings.isLogoEnabled() != mLogoEnabled) { uiSettings.setLogoEnabled(mLogoEnabled); } if (mCompassEnabled != null && uiSettings.isCompassEnabled() != mCompassEnabled) { uiSettings.setCompassEnabled(mCompassEnabled); } if (mZoomEnabled != null && uiSettings.isZoomGesturesEnabled() != mZoomEnabled) { uiSettings.setZoomGesturesEnabled(mZoomEnabled); } } private void updateInsets() { if (mMap == null || mInsets == null) { return; } final DisplayMetrics metrics = mContext.getResources().getDisplayMetrics(); int top = 0, right = 0, bottom = 0, left = 0; if (mInsets.size() == 4) { top = mInsets.getInt(0); right = mInsets.getInt(1); bottom = mInsets.getInt(2); left = mInsets.getInt(3); } else if (mInsets.size() == 2) { top = mInsets.getInt(0); right = mInsets.getInt(1); bottom = top; left = right; } else if (mInsets.size() == 1) { top = mInsets.getInt(0); right = top; bottom = top; left = top; } mMap.setPadding( Float.valueOf(left * metrics.scaledDensity).intValue(), Float.valueOf(top * metrics.scaledDensity).intValue(), Float.valueOf(right * metrics.scaledDensity).intValue(), Float.valueOf(bottom * metrics.scaledDensity).intValue()); } private void setMinMaxZoomLevels() { if (mMap == null) { return; } if (mMinZoomLevel != null) { mMap.setMinZoomPreference(mMinZoomLevel); } if (mMaxZoomLevel != null) { mMap.setMaxZoomPreference(mMaxZoomLevel); } } private void setLifecycleListeners() { final ReactContext reactContext = (ReactContext) mContext; mLifeCycleListener = new LifecycleEventListener() { @Override public void onHostResume() { if (mShowUserLocation && !mLocationManger.isActive()) { mLocationManger.enable(); } onResume(); } @Override public void onHostPause() { if (mLocationManger.isActive()) { mLocationManger.disable(); } onPause(); } @Override public void onHostDestroy() { dispose(); } }; reactContext.addLifecycleEventListener(mLifeCycleListener); } private void enableLocation() { if (!PermissionsManager.areLocationPermissionsGranted(mContext)) { return; } if (!mLocationManger.isActive()) { mLocationManger.enable(); } updateLocationLayer(); Location lastKnownLocation = mLocationManger.getLastKnownLocation(); if (lastKnownLocation != null) { mLocationChangeListener.onLocationChange(lastKnownLocation); postDelayed(new Runnable() { @Override public void run() { sendRegionChangeEvent(false); } }, 200); } } private void updateLocationLayer() { if (mLocationLayer == null) { mLocationLayer = new LocationLayerPlugin(this, mMap, mLocationManger.getEngine()); } int userLayerMode = UserTrackingMode.getMapLayerMode(mUserLocation.getTrackingMode(), mShowUserLocation); if (userLayerMode != mLocationLayer.getLocationLayerMode()) { mLocationLayer.setLocationLayerEnabled(userLayerMode); Layer accLayer = mMap.getLayer(UserLocationLayerConstants.ACCURACY_LAYER_ID); if (accLayer != null) { accLayer.setProperties(PropertyFactory.visibility(Property.NONE)); } } } private WritableMap makeRegionPayload(boolean isAnimated) { CameraPosition position = mMap.getCameraPosition(); if (position == null || position.target == null) { return new WritableNativeMap(); } LatLng latLng = new LatLng(position.target.getLatitude(), position.target.getLongitude()); WritableMap properties = new WritableNativeMap(); properties.putDouble("zoomLevel", position.zoom); properties.putDouble("heading", position.bearing); properties.putDouble("pitch", position.tilt); properties.putBoolean("animated", isAnimated); properties.putBoolean("isUserInteraction", mCameraChangeTracker.isUserInteraction()); try { // This crashes sometimes for some reason, seems fine to ignore. VisibleRegion visibleRegion = mMap.getProjection().getVisibleRegion(); properties.putArray("visibleBounds", GeoJSONUtils.fromLatLngBounds(visibleRegion.latLngBounds)); } catch (Exception ex) { ex.printStackTrace(); } return GeoJSONUtils.toPointFeature(latLng, properties); } private void removeAllSourcesFromMap() { if (mSources.size() == 0) { return; } for (String key : mSources.keySet()) { RCTSource source = mSources.get(key); source.removeFromMap(this); } } private void addAllSourcesToMap() { if (mSources.size() == 0) { return; } for (String key : mSources.keySet()) { RCTSource source = mSources.get(key); source.addToMap(this); } } private List<RCTSource> getAllTouchableSources() { List<RCTSource> sources = new ArrayList<>(); for (String key : mSources.keySet()) { RCTSource source = mSources.get(key); if (source.hasPressListener()) { sources.add(source); } } return sources; } private RCTSource getTouchableSourceWithHighestZIndex(List<RCTSource> sources) { if (sources == null || sources.size() == 0) { return null; } if (sources.size() == 1) { return sources.get(0); } Map<String, RCTSource> layerToSourceMap = new HashMap<>(); for (RCTSource source : sources) { String[] layerIDs = source.getLayerIDs(); for (String layerID : layerIDs) { layerToSourceMap.put(layerID, source); } } // getLayers returns from back(N - 1) to front(0) List<Layer> mapboxLayers = mMap.getLayers(); for (int i = mapboxLayers.size() - 1; i >= 0; i--) { Layer mapboxLayer = mapboxLayers.get(i); String layerID = mapboxLayer.getId(); if (layerToSourceMap.containsKey(layerID)) { return layerToSourceMap.get(layerID); } } return null; } private void updateUserTrackingMode(int userTrackingMode) { mUserLocation.setTrackingMode(userTrackingMode); IEvent event = new MapUserTrackingModeEvent(this, userTrackingMode); mManager.handleEvent(event); } private void updateUserLocation(boolean isAnimated) { if (!mShowUserLocation || mUserLocation.getTrackingMode() == UserTrackingMode.NONE) { return; } if (mUserTrackingState == UserTrackingState.POSSIBLE) { updateUserLocationSignificantly(isAnimated); } else if (mUserTrackingState == UserTrackingState.CHANGED) { updateUserLocationIncrementally(isAnimated); } } private void updateUserLocationSignificantly(boolean isAnimated) { mUserTrackingState = UserTrackingState.BEGAN; CameraUpdate cameraUpdate = CameraUpdateFactory.newCameraPosition(getUserLocationUpdateCameraPosition(mZoomLevel)); MapboxMap.CancelableCallback cameraCallback = new MapboxMap.CancelableCallback() { @Override public void onCancel() { mUserTrackingState = UserTrackingState.CHANGED; } @Override public void onFinish() { mUserTrackingState = UserTrackingState.CHANGED; } }; if (isAnimated && hasSetCenterCoordinate()) { mMap.animateCamera(cameraUpdate, cameraCallback); } else { mMap.moveCamera(cameraUpdate, cameraCallback); } } private void updateUserLocationIncrementally(boolean isAnimated) { mUserTrackingState = UserTrackingState.BEGAN; CameraPosition cameraPosition = mMap.getCameraPosition(); CameraUpdate cameraUpdate = CameraUpdateFactory.newCameraPosition(getUserLocationUpdateCameraPosition(cameraPosition.zoom)); MapboxMap.CancelableCallback callback = new MapboxMap.CancelableCallback() { @Override public void onCancel() { mUserTrackingState = UserTrackingState.CHANGED; } @Override public void onFinish() { mUserTrackingState = UserTrackingState.CHANGED; } }; if (isAnimated) { mMap.easeCamera(cameraUpdate, USER_LOCATION_CAMERA_MOVE_DURATION, callback); } else { mMap.moveCamera(cameraUpdate, callback); } } private CameraPosition getUserLocationUpdateCameraPosition(double zoomLevel) { LatLng center = mUserLocation.getCoordinate(); if (mUserLocationVerticalAlignment != UserLocationVerticalAlignment.CENTER) { DisplayMetrics metrics = mContext.getResources().getDisplayMetrics(); int[] contentPadding = mMap.getPadding(); // we want to get the width, and height scaled based on pixel density, that also includes content padding // (width * percentOfWidthWeWant - (leftPadding + rightPadding)) / dpi int mapWidth = (int)((mMap.getWidth() * 0.75 - (contentPadding[0] + contentPadding[2])) / metrics.scaledDensity); int mapHeight = (int)((mMap.getHeight() * 0.75 - (contentPadding[1] + contentPadding[3])) / metrics.scaledDensity); VisibleRegion region = GeoViewport.getRegion(center, (int) zoomLevel, mapWidth, mapHeight); switch (mUserLocationVerticalAlignment) { case UserLocationVerticalAlignment.TOP: center = new LatLng(region.nearRight.getLatitude(), center.getLongitude()); break; case UserLocationVerticalAlignment.BOTTOM: center = new LatLng(region.farLeft.getLatitude(), center.getLongitude()); break; } } return new CameraPosition.Builder() .target(center) .bearing(getDirectionForUserLocationUpdate()) .tilt(mPitch) .zoom(zoomLevel) .build(); } private double getDirectionForUserLocationUpdate() { // NOTE: The direction of this is used for map rotation only, not location layer rotation CameraPosition currentCamera = mMap.getCameraPosition(); double direction = currentCamera.bearing; int userTrackingMode = mUserLocation.getTrackingMode(); if (userTrackingMode == UserTrackingMode.FollowWithHeading || userTrackingMode == UserTrackingMode.FollowWithCourse) { direction = mUserLocation.getBearing(); } else if (mHeading != 0.0) { direction = mHeading; } return direction; } private boolean hasSetCenterCoordinate() { CameraPosition cameraPosition = mMap.getCameraPosition(); LatLng center = cameraPosition.target; return center.getLatitude() != 0.0 && center.getLongitude() != 0.0; } private double getMapRotation() { CameraPosition cameraPosition = mMap.getCameraPosition(); return cameraPosition.bearing; } private void sendRegionChangeEvent(boolean isAnimated) { IEvent event = new MapChangeEvent(this, makeRegionPayload(isAnimated), EventTypes.REGION_DID_CHANGE); mManager.handleEvent(event); mCameraChangeTracker.setReason(-1); } private void sendUserLocationUpdateEvent(Location location) { if(location == null){ return; } IEvent event = new MapChangeEvent(this, makeLocationChangePayload(location), EventTypes.USER_LOCATION_UPDATED); mManager.handleEvent(event); } /** * Create a payload of the location data per the web api geolocation spec * https://dev.w3.org/geo/api/spec-source.html#position * @return */ private WritableMap makeLocationChangePayload(Location location) { WritableMap positionProperties = new WritableNativeMap(); WritableMap coords = new WritableNativeMap(); coords.putDouble("longitude", location.getLongitude()); coords.putDouble("latitude", location.getLatitude()); coords.putDouble("altitude", location.getAltitude()); coords.putDouble("accuracy", location.getAccuracy()); coords.putDouble("heading", location.getBearing()); coords.putDouble("speed", location.getSpeed()); positionProperties.putMap("coords", coords); positionProperties.putDouble("timestamp", location.getTime()); return positionProperties; } }
package rentasad.library.db.dataObjects; public class MySQLDataTypeValue { /* * Umsetzungen verschiedener Begriffe auf Datentypen * Datenbankenuebergreifend: * BOOL TINYINT * BOOLEAN TINYINT * CHARACTER VARYING(M) VARCHAR(M) * FIXED DECIMAL * FLOAT4 FLOAT * FLOAT8 DOUBLE * INT1 TINYINT * INT2 SMALLINT * INT3 MEDIUMINT * INT4 INT * INT8 BIGINT * LONG VARBINARY MEDIUMBLOB * LONG VARCHAR MEDIUMTEXT * LONG MEDIUMTEXT * MIDDLEINT MEDIUMINT * NUMERIC DECIMAL */ public final static int TYPE_INT_DATE = 1; public final static int TYPE_INT_DATE_TIME = 2; public final static int TYPE_INT_TIMESTAMP = 3; public final static int TYPE_INT_TIME = 4; public final static int TYPE_INT_YEAR = 5; public final static int TYPE_INT_CHAR = 6; public final static int TYPE_INT_VARCHAR = 7; public final static int TYPE_INT_BINARY = 8; public final static int TYPE_INT_BLOB = 9; public final static int TYPE_INT_ENUM = 10; public final static int TYPE_INT_SET = 11; public final static int TYPE_INT_TINY_INT = 12; public final static int TYPE_INT_SMALL_INT = 13; public final static int TYPE_INT_MEDIUM_INT = 14; public final static int TYPE_INT_INT = 15; public final static int TYPE_INT_BIG_INT = 16; public final static int TYPE_INT_DECMAL = 17; public final static int TYPE_INT_NUMERIC = 18; public final static int TYPE_INT_FLOAT = 19; public final static int TYPE_INT_DOUBLE = 20; public final static int TYPE_INT_BIT = 21; /* * Numerische Datentpyen: */ public final static String TYPE_NAME_TINY_INT = "TINYINT"; public final static String TYPE_NAME_SMALL_INT = "SMALLINT"; public final static String TYPE_NAME_MEDIUM_INT ="MEDIUMINT"; public final static String TYPE_NAME_INT = "INT"; public final static String TYPE_NAME_BIG_INT = "BIGINT"; public final static String TYPE_NAME_DECIMAL = "DECIMAL"; public final static String TYPE_NAME_NUMERIC ="NUMERIC"; public final static String TYPE_NAME_FLOAT = "FLOAT"; public final static String TYPE_NAME_DOUBE ="DOUBLE"; public final static String TYPE_NAME_BIT = "BIT"; /* * Date and Time Types */ public final static String TYPE_NAME_DATE = "DATE"; public final static String TYPE_NAME_DATE_TIME = "DATETIME"; public final static String TYPE_NAME_TIME_STAMP = "TIMESTAMP"; public final static String TYPE_NAME_TIME = "TIME"; public final static String TYPE_NAME_YEAR = "YEAR"; /* * String Types */ public final static String TYPE_NAME_CHAR = "CHAR"; public final static String TYPE_NAME_VARCHAR = "VARCHAR"; public final static String TYPE_NAME_BINARY = "BINARY"; public final static String TYPE_NAME_BLOB = "BLOB"; public final static String TYPE_NAME_ENUM = "ENUM"; public final static String TYPE_NAME_SET = "SET"; }
/******************************************************************************* * Copyright © 2017-2018 VMware, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. * * @author: Huaqiao Zhang, <huaqiaoz@vmware.com> * @version: 0.1.0 *******************************************************************************/ package edgexfoundry.controller; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.springframework.beans.factory.annotation.Value; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RestController; import edgexfoundry.domain.User; @RestController @RequestMapping("/api/v1/auth") public class AuthController { @Value(value = "${USER_NAME}") private String userName; @Value("${USER_PWD}") private String userPwd; @RequestMapping(value="/login",method=RequestMethod.POST) public void login(@RequestBody User user,HttpServletRequest req,HttpServletResponse resp) throws Exception{ if(user.getName().equals(userName) && user.getPassword().equals(userPwd)) { req.getSession().setAttribute("user", user); } //resp.sendRedirect("http://localhost:4000"); return ; } @RequestMapping(value="/logout",method=RequestMethod.GET) public void logout(HttpServletRequest req,HttpServletResponse resp) throws Exception{ HttpSession session = req.getSession(); session.invalidate(); //resp.sendRedirect(req.getContextPath()+"/"); return ; } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.management.containerservice; import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; /** The NetworkProfile model. */ @Fluent public final class NetworkProfile { /* * CIDR for the OpenShift Vnet. */ @JsonProperty(value = "vnetCidr") private String vnetCidr; /* * CIDR of the Vnet to peer. */ @JsonProperty(value = "peerVnetId") private String peerVnetId; /* * ID of the Vnet created for OSA cluster. */ @JsonProperty(value = "vnetId") private String vnetId; /** * Get the vnetCidr property: CIDR for the OpenShift Vnet. * * @return the vnetCidr value. */ public String vnetCidr() { return this.vnetCidr; } /** * Set the vnetCidr property: CIDR for the OpenShift Vnet. * * @param vnetCidr the vnetCidr value to set. * @return the NetworkProfile object itself. */ public NetworkProfile withVnetCidr(String vnetCidr) { this.vnetCidr = vnetCidr; return this; } /** * Get the peerVnetId property: CIDR of the Vnet to peer. * * @return the peerVnetId value. */ public String peerVnetId() { return this.peerVnetId; } /** * Set the peerVnetId property: CIDR of the Vnet to peer. * * @param peerVnetId the peerVnetId value to set. * @return the NetworkProfile object itself. */ public NetworkProfile withPeerVnetId(String peerVnetId) { this.peerVnetId = peerVnetId; return this; } /** * Get the vnetId property: ID of the Vnet created for OSA cluster. * * @return the vnetId value. */ public String vnetId() { return this.vnetId; } /** * Set the vnetId property: ID of the Vnet created for OSA cluster. * * @param vnetId the vnetId value to set. * @return the NetworkProfile object itself. */ public NetworkProfile withVnetId(String vnetId) { this.vnetId = vnetId; return this; } }
/* * Copyright 2018 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.profiler.sender; import com.navercorp.pinpoint.common.util.Assert; import com.navercorp.pinpoint.profiler.context.thrift.MessageConverter; import com.navercorp.pinpoint.common.util.IOUtils; import com.navercorp.pinpoint.rpc.PinpointSocketException; import com.navercorp.pinpoint.rpc.buffer.ByteBufferFactory; import com.navercorp.pinpoint.rpc.buffer.ByteBufferFactoryLocator; import com.navercorp.pinpoint.rpc.buffer.ByteBufferType; import com.navercorp.pinpoint.thrift.io.ByteBufferOutputStream; import com.navercorp.pinpoint.thrift.io.HeaderTBaseSerializer2; import com.navercorp.pinpoint.thrift.io.HeaderTBaseSerializerFactory2; import org.apache.thrift.TBase; import org.apache.thrift.TException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.DatagramSocket; import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.nio.channels.DatagramChannel; /** * @author Taejin Koo */ public class NioUDPDataSender implements DataSender { protected final Logger logger = LoggerFactory.getLogger(this.getClass()); protected final boolean isDebug = logger.isDebugEnabled(); public static final int SOCKET_TIMEOUT = 1000 * 5; public static final int SEND_BUFFER_SIZE = 1024 * 64 * 16; public static final int UDP_MAX_PACKET_LENGTH = 65507; private final DatagramChannel datagramChannel; private final HeaderTBaseSerializer2 serializer; private final ByteBufferOutputStream byteBufferOutputStream; private final AsyncQueueingExecutor<Object> executor; private final MessageConverter<TBase<?, ?>> messageConverter; private volatile boolean closed = false; public NioUDPDataSender(String host, int port, String threadName, int queueSize, int timeout, int sendBufferSize, MessageConverter<TBase<?, ?>> messageConverter) { Assert.requireNonNull(host, "host"); Assert.requireNonNull(threadName, "threadName"); Assert.isTrue(queueSize > 0, "queueSize"); Assert.isTrue(timeout > 0, "timeout"); Assert.isTrue(sendBufferSize > 0, "sendBufferSize"); this.messageConverter = Assert.requireNonNull(messageConverter, "messageConverter"); // TODO If fail to create socket, stop agent start logger.info("NioUDPDataSender initialized. host={}, port={}", host, port); this.datagramChannel = createChannel(host, port, timeout, sendBufferSize); HeaderTBaseSerializerFactory2 serializerFactory = new HeaderTBaseSerializerFactory2(); this.serializer = serializerFactory.createSerializer(); ByteBufferFactory bufferFactory = ByteBufferFactoryLocator.getFactory(ByteBufferType.DIRECT); ByteBuffer byteBuffer = bufferFactory.getBuffer(UDP_MAX_PACKET_LENGTH); this.byteBufferOutputStream = new ByteBufferOutputStream(byteBuffer); this.executor = createAsyncQueueingExecutor(queueSize, threadName); } private AsyncQueueingExecutor<Object> createAsyncQueueingExecutor(int queueSize, String executorName) { AsyncQueueingExecutorListener<Object> listener = new DefaultAsyncQueueingExecutorListener() { @Override public void execute(Object message) { NioUDPDataSender.this.sendPacket(message); } }; final AsyncQueueingExecutor<Object> executor = new AsyncQueueingExecutor<Object>(queueSize, executorName, listener); return executor; } private DatagramChannel createChannel(String host, int port, int timeout, int sendBufferSize) { DatagramChannel datagramChannel = null; DatagramSocket socket = null; try { datagramChannel = DatagramChannel.open(); socket = datagramChannel.socket(); socket.setSoTimeout(timeout); socket.setSendBufferSize(sendBufferSize); if (logger.isWarnEnabled()) { final int checkSendBufferSize = socket.getSendBufferSize(); if (sendBufferSize != checkSendBufferSize) { logger.warn("DatagramChannel.setSendBufferSize() error. {}!={}", sendBufferSize, checkSendBufferSize); } } InetSocketAddress serverAddress = new InetSocketAddress(host, port); datagramChannel.connect(serverAddress); return datagramChannel; } catch (IOException e) { IOUtils.closeQuietly(socket); IOUtils.closeQuietly(datagramChannel); throw new IllegalStateException("DatagramChannel create fail. Cause" + e.getMessage(), e); } } @Override public boolean send(Object data) { return executor.execute(data); } @Override public void stop() { try { closed = true; executor.stop(); } finally { try { byteBufferOutputStream.close(); } catch (IOException e) { // ignore } } } private void sendPacket(Object message) { if (closed) { throw new PinpointSocketException("NioUDPDataSender already closed."); } if (message instanceof TBase) { final TBase tBase = (TBase) message; sendPacket(tBase); return; } final TBase<?, ?> tBase = this.messageConverter.toMessage(message); if (tBase != null) { sendPacket(tBase); return; } logger.warn("sendPacket fail. invalid type:{}", message != null ? message.getClass() : null); if (logger.isDebugEnabled()) { logger.debug("unknown message:{}", message); } } private void sendPacket(TBase tBase) { byteBufferOutputStream.clear(); // do not copy bytes because it's single threaded try { serializer.serialize(tBase, byteBufferOutputStream); } catch (TException e) { throw new PinpointSocketException("Serialize " + tBase + " failed. Error:" + e.getMessage(), e); } ByteBuffer byteBuffer = byteBufferOutputStream.getByteBuffer(); int bufferSize = byteBuffer.remaining(); try { datagramChannel.write(byteBuffer); } catch (IOException e) { final Thread currentThread = Thread.currentThread(); if (currentThread.isInterrupted()) { logger.warn("{} thread interrupted.", currentThread.getName()); throw new PinpointSocketException(currentThread.getName() + " thread interrupted.", e); } else { throw new PinpointSocketException("packet send error. size:" + bufferSize + ", " + tBase, e); } } } }
package online.quar.application.manager; import online.quar.application.Singleton; import online.quar.application.model.Car; import online.quar.application.model.User; import java.util.ArrayList; public class UserManager { ArrayList<User> users = new ArrayList<>(); public User addUser(User userToAdd) { users.add(userToAdd); //Save the user that was just added Singleton.getApplicationManager().getDatabaseManager().save(userToAdd); return userToAdd; } public boolean removeUser(User userToRemove) { users.remove(userToRemove); //to "remove" from saved userToRemove.setActive(false); Singleton.getApplicationManager().getDatabaseManager().save(userToRemove); if (findUser(userToRemove.getId()) == null) { return true; } return false; } public User findUser(String userName) { //First check for user in cache for (User user : users) { if (user.getUsername() == userName && user.isActive()) { return user; } } //User was not found in memory, check database DatabaseManager databaseManager = Singleton.getApplicationManager().getDatabaseManager(); User user = databaseManager.getUserByUsername(userName, true); if(user != null) { users.add(user); } //If the user is not found in the database, null will be returned return user; } public User findUser(long userId) { //First check for user in cache for (User user : users) { if (user.getId() == userId && user.isActive()) { return user; } } //User was not found in memory, check database DatabaseManager databaseManager = Singleton.getApplicationManager().getDatabaseManager(); User user = databaseManager.getUser(userId, true); if(user != null) { users.add(user); } //If the user is not found in the database, null will be returned return user; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.metadata; import java.io.Serializable; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Properties; import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.ProtectMode; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.SkewedInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; import org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat; import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.OutputFormat; import org.apache.hadoop.mapred.SequenceFileInputFormat; /** * A Hive Table: is a fundamental unit of data in Hive that shares a common schema/DDL. * * Please note that the ql code should always go through methods of this class to access the * metadata, instead of directly accessing org.apache.hadoop.hive.metastore.api.Table. This * helps to isolate the metastore code and the ql code. */ public class Table implements Serializable { private static final long serialVersionUID = 1L; static final private Log LOG = LogFactory.getLog("hive.ql.metadata.Table"); private org.apache.hadoop.hive.metastore.api.Table tTable; /** * These fields are all cached fields. The information comes from tTable. */ private Deserializer deserializer; private Class<? extends OutputFormat> outputFormatClass; private Class<? extends InputFormat> inputFormatClass; private Path path; private transient HiveStorageHandler storageHandler; /** * Used only for serialization. */ public Table() { } public Table(org.apache.hadoop.hive.metastore.api.Table table) { initialize(table); } // Do initialization here, so as to keep the ctor minimal. protected void initialize(org.apache.hadoop.hive.metastore.api.Table table) { tTable = table; // Note that we do not set up fields like inputFormatClass, outputFormatClass // and deserializer because the Partition needs to be accessed from across // the metastore side as well, which will result in attempting to load // the class associated with them, which might not be available, and // the main reason to instantiate them would be to pre-cache them for // performance. Since those fields are null/cache-check by their accessors // anyway, that's not a concern. } public Table(String databaseName, String tableName) { this(getEmptyTable(databaseName, tableName)); } public boolean isDummyTable() { return tTable.getTableName().equals(SemanticAnalyzer.DUMMY_TABLE); } /** * This function should only be used in serialization. * We should never call this function to modify the fields, because * the cached fields will become outdated. */ public org.apache.hadoop.hive.metastore.api.Table getTTable() { return tTable; } /** * This function should only be called by Java serialization. */ public void setTTable(org.apache.hadoop.hive.metastore.api.Table tTable) { this.tTable = tTable; } /** * Initialize an empty table. */ public static org.apache.hadoop.hive.metastore.api.Table getEmptyTable(String databaseName, String tableName) { StorageDescriptor sd = new StorageDescriptor(); { sd.setSerdeInfo(new SerDeInfo()); sd.setNumBuckets(-1); sd.setBucketCols(new ArrayList<String>()); sd.setCols(new ArrayList<FieldSchema>()); sd.setParameters(new HashMap<String, String>()); sd.setSortCols(new ArrayList<Order>()); sd.getSerdeInfo().setParameters(new HashMap<String, String>()); // We have to use MetadataTypedColumnsetSerDe because LazySimpleSerDe does // not support a table with no columns. sd.getSerdeInfo().setSerializationLib(MetadataTypedColumnsetSerDe.class.getName()); sd.getSerdeInfo().getParameters().put(serdeConstants.SERIALIZATION_FORMAT, "1"); sd.setInputFormat(SequenceFileInputFormat.class.getName()); sd.setOutputFormat(HiveSequenceFileOutputFormat.class.getName()); SkewedInfo skewInfo = new SkewedInfo(); skewInfo.setSkewedColNames(new ArrayList<String>()); skewInfo.setSkewedColValues(new ArrayList<List<String>>()); skewInfo.setSkewedColValueLocationMaps(new HashMap<List<String>, String>()); sd.setSkewedInfo(skewInfo); } org.apache.hadoop.hive.metastore.api.Table t = new org.apache.hadoop.hive.metastore.api.Table(); { t.setSd(sd); t.setPartitionKeys(new ArrayList<FieldSchema>()); t.setParameters(new HashMap<String, String>()); t.setTableType(TableType.MANAGED_TABLE.toString()); t.setDbName(databaseName); t.setTableName(tableName); t.setOwner(SessionState.getUserFromAuthenticator()); // set create time t.setCreateTime((int) (System.currentTimeMillis() / 1000)); } return t; } public void checkValidity() throws HiveException { // check for validity String name = tTable.getTableName(); if (null == name || name.length() == 0 || !MetaStoreUtils.validateName(name)) { throw new HiveException("[" + name + "]: is not a valid table name"); } if (0 == getCols().size()) { throw new HiveException( "at least one column must be specified for the table"); } if (!isView()) { if (null == getDeserializerFromMetaStore(false)) { throw new HiveException("must specify a non-null serDe"); } if (null == getInputFormatClass()) { throw new HiveException("must specify an InputFormat class"); } if (null == getOutputFormatClass()) { throw new HiveException("must specify an OutputFormat class"); } } if (isView()) { assert(getViewOriginalText() != null); assert(getViewExpandedText() != null); } else { assert(getViewOriginalText() == null); assert(getViewExpandedText() == null); } validateColumns(getCols(), getPartCols()); } public StorageDescriptor getSd() { return tTable.getSd(); } public void setInputFormatClass(Class<? extends InputFormat> inputFormatClass) { this.inputFormatClass = inputFormatClass; tTable.getSd().setInputFormat(inputFormatClass.getName()); } public void setOutputFormatClass(Class<? extends OutputFormat> outputFormatClass) { this.outputFormatClass = outputFormatClass; tTable.getSd().setOutputFormat(outputFormatClass.getName()); } final public Properties getMetadata() { return MetaStoreUtils.getTableMetadata(tTable); } final public Path getPath() { String location = tTable.getSd().getLocation(); if (location == null) { return null; } return new Path(location); } final public String getTableName() { return tTable.getTableName(); } final public Path getDataLocation() { if (path == null) { path = getPath(); } return path; } final public Deserializer getDeserializer() { if (deserializer == null) { deserializer = getDeserializerFromMetaStore(false); } return deserializer; } final public Class<? extends Deserializer> getDeserializerClass() throws Exception { return MetaStoreUtils.getDeserializerClass(SessionState.getSessionConf(), tTable); } final public Deserializer getDeserializer(boolean skipConfError) { if (deserializer == null) { deserializer = getDeserializerFromMetaStore(skipConfError); } return deserializer; } final public Deserializer getDeserializerFromMetaStore(boolean skipConfError) { try { return MetaStoreUtils.getDeserializer(SessionState.getSessionConf(), tTable, skipConfError); } catch (MetaException e) { throw new RuntimeException(e); } } public HiveStorageHandler getStorageHandler() { if (storageHandler != null || !isNonNative()) { return storageHandler; } try { storageHandler = HiveUtils.getStorageHandler( SessionState.getSessionConf(), getProperty( org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE)); } catch (Exception e) { throw new RuntimeException(e); } return storageHandler; } final public Class<? extends InputFormat> getInputFormatClass() { if (inputFormatClass == null) { try { String className = tTable.getSd().getInputFormat(); if (className == null) { if (getStorageHandler() == null) { return null; } inputFormatClass = getStorageHandler().getInputFormatClass(); } else { inputFormatClass = (Class<? extends InputFormat>) Class.forName(className, true, Utilities.getSessionSpecifiedClassLoader()); } } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } return inputFormatClass; } final public Class<? extends OutputFormat> getOutputFormatClass() { if (outputFormatClass == null) { try { String className = tTable.getSd().getOutputFormat(); Class<?> c; if (className == null) { if (getStorageHandler() == null) { return null; } c = getStorageHandler().getOutputFormatClass(); } else { c = Class.forName(className, true, Utilities.getSessionSpecifiedClassLoader()); } // Replace FileOutputFormat for backward compatibility outputFormatClass = HiveFileFormatUtils.getOutputFormatSubstitute(c); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } return outputFormatClass; } /** * Marker SemanticException, so that processing that allows for table validation failures * and appropriately handles them can recover from these types of SemanticExceptions */ public class ValidationFailureSemanticException extends SemanticException{ public ValidationFailureSemanticException(String s) { super(s); } }; final public void validatePartColumnNames( Map<String, String> spec, boolean shouldBeFull) throws SemanticException { List<FieldSchema> partCols = tTable.getPartitionKeys(); if (partCols == null || (partCols.size() == 0)) { if (spec != null) { throw new ValidationFailureSemanticException("table is not partitioned but partition spec exists: " + spec); } return; } else if (spec == null) { if (shouldBeFull) { throw new ValidationFailureSemanticException("table is partitioned but partition spec is not specified"); } return; } int columnsFound = 0; for (FieldSchema fs : partCols) { if (spec.containsKey(fs.getName())) { ++columnsFound; } if (columnsFound == spec.size()) break; } if (columnsFound < spec.size()) { throw new ValidationFailureSemanticException("Partition spec " + spec + " contains non-partition columns"); } if (shouldBeFull && (spec.size() != partCols.size())) { throw new ValidationFailureSemanticException("partition spec " + spec + " doesn't contain all (" + partCols.size() + ") partition columns"); } } public void setProperty(String name, String value) { tTable.getParameters().put(name, value); } public void setParamters(Map<String, String> params) { tTable.setParameters(params); } public String getProperty(String name) { return tTable.getParameters().get(name); } public boolean isImmutable(){ return (tTable.getParameters().containsKey(hive_metastoreConstants.IS_IMMUTABLE) && tTable.getParameters().get(hive_metastoreConstants.IS_IMMUTABLE).equalsIgnoreCase("true")); } public void setTableType(TableType tableType) { tTable.setTableType(tableType.toString()); } public TableType getTableType() { return Enum.valueOf(TableType.class, tTable.getTableType()); } public ArrayList<StructField> getFields() { ArrayList<StructField> fields = new ArrayList<StructField>(); try { Deserializer decoder = getDeserializer(); // Expand out all the columns of the table StructObjectInspector structObjectInspector = (StructObjectInspector) decoder .getObjectInspector(); List<? extends StructField> fld_lst = structObjectInspector .getAllStructFieldRefs(); for (StructField field : fld_lst) { fields.add(field); } } catch (SerDeException e) { throw new RuntimeException(e); } return fields; } public StructField getField(String fld) { try { StructObjectInspector structObjectInspector = (StructObjectInspector) getDeserializer() .getObjectInspector(); return structObjectInspector.getStructFieldRef(fld); } catch (Exception e) { throw new RuntimeException(e); } } @Override public String toString() { return tTable.getTableName(); } /* (non-Javadoc) * @see java.lang.Object#hashCode() */ @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((tTable == null) ? 0 : tTable.hashCode()); return result; } /* (non-Javadoc) * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } Table other = (Table) obj; if (tTable == null) { if (other.tTable != null) { return false; } } else if (!tTable.equals(other.tTable)) { return false; } return true; } public List<FieldSchema> getPartCols() { List<FieldSchema> partKeys = tTable.getPartitionKeys(); if (partKeys == null) { partKeys = new ArrayList<FieldSchema>(); tTable.setPartitionKeys(partKeys); } return partKeys; } public boolean isPartitionKey(String colName) { for (FieldSchema key : getPartCols()) { if (key.getName().toLowerCase().equals(colName)) { return true; } } return false; } // TODO merge this with getBucketCols function public String getBucketingDimensionId() { List<String> bcols = tTable.getSd().getBucketCols(); if (bcols == null || bcols.size() == 0) { return null; } if (bcols.size() > 1) { LOG.warn(this + " table has more than one dimensions which aren't supported yet"); } return bcols.get(0); } public void setDataLocation(Path path) { this.path = path; tTable.getSd().setLocation(path.toString()); } public void unsetDataLocation() { this.path = null; tTable.getSd().unsetLocation(); } public void setBucketCols(List<String> bucketCols) throws HiveException { if (bucketCols == null) { return; } for (String col : bucketCols) { if (!isField(col)) { throw new HiveException("Bucket columns " + col + " is not part of the table columns (" + getCols() ); } } tTable.getSd().setBucketCols(bucketCols); } public void setSortCols(List<Order> sortOrder) throws HiveException { tTable.getSd().setSortCols(sortOrder); } public void setSkewedValueLocationMap(List<String> valList, String dirName) throws HiveException { Map<List<String>, String> mappings = tTable.getSd().getSkewedInfo() .getSkewedColValueLocationMaps(); if (null == mappings) { mappings = new HashMap<List<String>, String>(); tTable.getSd().getSkewedInfo().setSkewedColValueLocationMaps(mappings); } // Add or update new mapping mappings.put(valList, dirName); } public Map<List<String>,String> getSkewedColValueLocationMaps() { return (tTable.getSd().getSkewedInfo() != null) ? tTable.getSd().getSkewedInfo() .getSkewedColValueLocationMaps() : new HashMap<List<String>, String>(); } public void setSkewedColValues(List<List<String>> skewedValues) throws HiveException { tTable.getSd().getSkewedInfo().setSkewedColValues(skewedValues); } public List<List<String>> getSkewedColValues(){ return (tTable.getSd().getSkewedInfo() != null) ? tTable.getSd().getSkewedInfo() .getSkewedColValues() : new ArrayList<List<String>>(); } public void setSkewedColNames(List<String> skewedColNames) throws HiveException { tTable.getSd().getSkewedInfo().setSkewedColNames(skewedColNames); } public List<String> getSkewedColNames() { return (tTable.getSd().getSkewedInfo() != null) ? tTable.getSd().getSkewedInfo() .getSkewedColNames() : new ArrayList<String>(); } public SkewedInfo getSkewedInfo() { return tTable.getSd().getSkewedInfo(); } public void setSkewedInfo(SkewedInfo skewedInfo) throws HiveException { tTable.getSd().setSkewedInfo(skewedInfo); } public boolean isStoredAsSubDirectories() { return tTable.getSd().isStoredAsSubDirectories(); } public void setStoredAsSubDirectories(boolean storedAsSubDirectories) throws HiveException { tTable.getSd().setStoredAsSubDirectories(storedAsSubDirectories); } private boolean isField(String col) { for (FieldSchema field : getCols()) { if (field.getName().equals(col)) { return true; } } return false; } public List<FieldSchema> getCols() { String serializationLib = getSerializationLib(); try { if (hasMetastoreBasedSchema(SessionState.getSessionConf(), serializationLib)) { return tTable.getSd().getCols(); } else { return MetaStoreUtils.getFieldsFromDeserializer(getTableName(), getDeserializer()); } } catch (Exception e) { LOG.error("Unable to get field from serde: " + serializationLib, e); } return new ArrayList<FieldSchema>(); } /** * Returns a list of all the columns of the table (data columns + partition * columns in that order. * * @return List<FieldSchema> */ public List<FieldSchema> getAllCols() { ArrayList<FieldSchema> f_list = new ArrayList<FieldSchema>(); f_list.addAll(getPartCols()); f_list.addAll(getCols()); return f_list; } public void setPartCols(List<FieldSchema> partCols) { tTable.setPartitionKeys(partCols); } public String getDbName() { return tTable.getDbName(); } public int getNumBuckets() { return tTable.getSd().getNumBuckets(); } /** * Replaces the directory corresponding to the table by srcf. Works by * deleting the table directory and renaming the source directory. * * @param srcf * Source directory * @param isSrcLocal * If the source directory is LOCAL */ protected void replaceFiles(Path srcf, boolean isSrcLocal) throws HiveException { Path tableDest = getPath(); Hive.replaceFiles(tableDest, srcf, tableDest, tableDest, Hive.get().getConf(), isSrcLocal); } /** * Inserts files specified into the partition. Works by moving files * * @param srcf * Files to be moved. Leaf directories or globbed file paths * @param isSrcLocal * If the source directory is LOCAL * @param isAcid * True if this is an ACID based insert, update, or delete * @param newFiles optional list of paths. If non-null, then all files copyied to the table * will be added to this list. */ protected void copyFiles(Path srcf, boolean isSrcLocal, boolean isAcid, List<Path> newFiles) throws HiveException { FileSystem fs; try { fs = getDataLocation().getFileSystem(Hive.get().getConf()); Hive.copyFiles(Hive.get().getConf(), srcf, getPath(), fs, isSrcLocal, isAcid, newFiles); } catch (IOException e) { throw new HiveException("addFiles: filesystem error in check phase", e); } } public void setInputFormatClass(String name) throws HiveException { if (name == null) { inputFormatClass = null; tTable.getSd().setInputFormat(null); return; } try { setInputFormatClass((Class<? extends InputFormat<WritableComparable, Writable>>) Class .forName(name, true, Utilities.getSessionSpecifiedClassLoader())); } catch (ClassNotFoundException e) { throw new HiveException("Class not found: " + name, e); } } public void setOutputFormatClass(String name) throws HiveException { if (name == null) { outputFormatClass = null; tTable.getSd().setOutputFormat(null); return; } try { Class<?> origin = Class.forName(name, true, Utilities.getSessionSpecifiedClassLoader()); setOutputFormatClass(HiveFileFormatUtils.getOutputFormatSubstitute(origin)); } catch (ClassNotFoundException e) { throw new HiveException("Class not found: " + name, e); } } public boolean isPartitioned() { if (getPartCols() == null) { return false; } return (getPartCols().size() != 0); } public void setFields(List<FieldSchema> fields) { tTable.getSd().setCols(fields); } public void setNumBuckets(int nb) { tTable.getSd().setNumBuckets(nb); } /** * @return The owner of the table. * @see org.apache.hadoop.hive.metastore.api.Table#getOwner() */ public String getOwner() { return tTable.getOwner(); } /** * @return The table parameters. * @see org.apache.hadoop.hive.metastore.api.Table#getParameters() */ public Map<String, String> getParameters() { return tTable.getParameters(); } /** * @return The retention on the table. * @see org.apache.hadoop.hive.metastore.api.Table#getRetention() */ public int getRetention() { return tTable.getRetention(); } /** * @param owner * @see org.apache.hadoop.hive.metastore.api.Table#setOwner(java.lang.String) */ public void setOwner(String owner) { tTable.setOwner(owner); } /** * @param retention * @see org.apache.hadoop.hive.metastore.api.Table#setRetention(int) */ public void setRetention(int retention) { tTable.setRetention(retention); } private SerDeInfo getSerdeInfo() { return tTable.getSd().getSerdeInfo(); } public void setSerializationLib(String lib) { getSerdeInfo().setSerializationLib(lib); } public String getSerializationLib() { return getSerdeInfo().getSerializationLib(); } public String getSerdeParam(String param) { return getSerdeInfo().getParameters().get(param); } public String setSerdeParam(String param, String value) { return getSerdeInfo().getParameters().put(param, value); } public List<String> getBucketCols() { return tTable.getSd().getBucketCols(); } public List<Order> getSortCols() { return tTable.getSd().getSortCols(); } public void setTableName(String tableName) { tTable.setTableName(tableName); } public void setDbName(String databaseName) { tTable.setDbName(databaseName); } public List<FieldSchema> getPartitionKeys() { return tTable.getPartitionKeys(); } /** * @return the original view text, or null if this table is not a view */ public String getViewOriginalText() { return tTable.getViewOriginalText(); } /** * @param viewOriginalText * the original view text to set */ public void setViewOriginalText(String viewOriginalText) { tTable.setViewOriginalText(viewOriginalText); } /** * @return the expanded view text, or null if this table is not a view */ public String getViewExpandedText() { return tTable.getViewExpandedText(); } public void clearSerDeInfo() { tTable.getSd().getSerdeInfo().getParameters().clear(); } /** * @param viewExpandedText * the expanded view text to set */ public void setViewExpandedText(String viewExpandedText) { tTable.setViewExpandedText(viewExpandedText); } /** * @return whether this table is actually a view */ public boolean isView() { return TableType.VIRTUAL_VIEW.equals(getTableType()); } /** * @return whether this table is actually an index table */ public boolean isIndexTable() { return TableType.INDEX_TABLE.equals(getTableType()); } /** * Creates a partition name -> value spec map object * * @param tp * Use the information from this partition. * @return Partition name to value mapping. */ public LinkedHashMap<String, String> createSpec( org.apache.hadoop.hive.metastore.api.Partition tp) { List<FieldSchema> fsl = getPartCols(); List<String> tpl = tp.getValues(); LinkedHashMap<String, String> spec = new LinkedHashMap<>(fsl.size()); for (int i = 0; i < fsl.size(); i++) { FieldSchema fs = fsl.get(i); String value = tpl.get(i); spec.put(fs.getName(), value); } return spec; } public Table copy() throws HiveException { return new Table(tTable.deepCopy()); } public void setCreateTime(int createTime) { tTable.setCreateTime(createTime); } public int getLastAccessTime() { return tTable.getLastAccessTime(); } public void setLastAccessTime(int lastAccessTime) { tTable.setLastAccessTime(lastAccessTime); } public boolean isNonNative() { return getProperty( org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE) != null; } /** * @param protectMode */ public void setProtectMode(ProtectMode protectMode){ Map<String, String> parameters = tTable.getParameters(); String pm = protectMode.toString(); if (pm != null) { parameters.put(ProtectMode.PARAMETER_NAME, pm); } else { parameters.remove(ProtectMode.PARAMETER_NAME); } tTable.setParameters(parameters); } /** * @return protect mode */ public ProtectMode getProtectMode(){ return MetaStoreUtils.getProtectMode(tTable); } /** * @return True protect mode indicates the table if offline. */ public boolean isOffline(){ return getProtectMode().offline; } /** * @return True if protect mode attribute of the partition indicate * that it is OK to drop the partition */ public boolean canDrop() { ProtectMode mode = getProtectMode(); return (!mode.noDrop && !mode.offline && !mode.readOnly && !mode.noDropCascade); } /** * @return True if protect mode attribute of the table indicate * that it is OK to write the table */ public boolean canWrite() { ProtectMode mode = getProtectMode(); return (!mode.offline && !mode.readOnly); } /** * @return include the db name */ public String getCompleteName() { return getCompleteName(getDbName(), getTableName()); } public static String getCompleteName(String dbName, String tabName) { return dbName + "@" + tabName; } @SuppressWarnings("nls") public FileStatus[] getSortedPaths() { try { // Previously, this got the filesystem of the Table, which could be // different from the filesystem of the partition. FileSystem fs = FileSystem.get(getPath().toUri(), SessionState.getSessionConf()); String pathPattern = getPath().toString(); if (getNumBuckets() > 0) { pathPattern = pathPattern + "/*"; } LOG.info("Path pattern = " + pathPattern); FileStatus srcs[] = fs.globStatus(new Path(pathPattern), FileUtils.HIDDEN_FILES_PATH_FILTER); Arrays.sort(srcs); for (FileStatus src : srcs) { LOG.info("Got file: " + src.getPath()); } if (srcs.length == 0) { return null; } return srcs; } catch (Exception e) { throw new RuntimeException("Cannot get path ", e); } } public boolean isTemporary() { return tTable.isTemporary(); } public static boolean hasMetastoreBasedSchema(HiveConf conf, StorageDescriptor serde) { return hasMetastoreBasedSchema(conf, serde.getSerdeInfo().getSerializationLib()); } public static boolean hasMetastoreBasedSchema(HiveConf conf, String serdeLib) { return StringUtils.isEmpty(serdeLib) || conf.getStringCollection(ConfVars.SERDESUSINGMETASTOREFORSCHEMA.varname).contains(serdeLib); } public static void validateColumns(List<FieldSchema> columns, List<FieldSchema> partCols) throws HiveException { List<String> colNames = new ArrayList<String>(); for (FieldSchema partCol: columns) { String colName = normalize(partCol.getName()); if (colNames.contains(colName)) { throw new HiveException("Duplicate column name " + colName + " in the table definition."); } colNames.add(colName); } if (partCols != null) { // there is no overlap between columns and partitioning columns for (FieldSchema partCol: partCols) { String colName = normalize(partCol.getName()); if (colNames.contains(colName)) { throw new HiveException("Partition column name " + colName + " conflicts with table columns."); } } } } private static String normalize(String colName) throws HiveException { if (!MetaStoreUtils.validateColumnName(colName)) { throw new HiveException("Invalid column name '" + colName + "' in the table definition"); } return colName.toLowerCase(); } };
package com.mobile.justmobiledev.twowaybindingsampleapp.models.employee; import com.mobile.justmobiledev.twowaybindingsampleapp.models.base.Person; public class Employee extends Person { private String employeeId; private String businessName; public Employee(String employeeId, String firstName, String lastName){ this.employeeId = employeeId; this.firstName = firstName; this.lastName = lastName; } public void setEmployeeId(String employeeId) { this.employeeId = employeeId; } public String getEmployeeId(){ return employeeId; } public void setBusinessName(String businessName) { this.businessName = businessName; } public String getBusinessName(){ return businessName; } public String getFullName(){ return firstName + " "+ getLastName(); } }
/* * Copyright (C) 2009 University of Washington * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.odk.collect.android.activities; import android.app.Activity; import android.app.ProgressDialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.location.Location; import android.location.LocationListener; import android.location.LocationManager; import android.location.LocationProvider; import android.os.Bundle; import android.provider.Settings; import android.view.Window; import org.odk.collect.android.R; import org.odk.collect.android.application.Collect; import org.odk.collect.android.utilities.InfoLogger; import org.odk.collect.android.utilities.ToastUtils; import org.odk.collect.android.widgets.GeoPointWidget; import java.text.DecimalFormat; import java.util.List; public class GeoPointActivity extends Activity implements LocationListener { private static final String LOCATION_COUNT = "locationCount"; private ProgressDialog mLocationDialog; private LocationManager mLocationManager; private Location mLocation; private boolean mGPSOn = false; private boolean mNetworkOn = false; private double mLocationAccuracy; private int mLocationCount = 0; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); requestWindowFeature(Window.FEATURE_NO_TITLE); if (savedInstanceState != null) { mLocationCount = savedInstanceState.getInt(LOCATION_COUNT); } Intent intent = getIntent(); mLocationAccuracy = GeoPointWidget.DEFAULT_LOCATION_ACCURACY; if (intent != null && intent.getExtras() != null) { if (intent.hasExtra(GeoPointWidget.ACCURACY_THRESHOLD)) { mLocationAccuracy = intent.getDoubleExtra(GeoPointWidget.ACCURACY_THRESHOLD, GeoPointWidget.DEFAULT_LOCATION_ACCURACY); } } setTitle(getString(R.string.get_location)); mLocationManager = (LocationManager) getSystemService(Context.LOCATION_SERVICE); // make sure we have a good location provider before continuing List<String> providers = mLocationManager.getProviders(true); for (String provider : providers) { if (provider.equalsIgnoreCase(LocationManager.GPS_PROVIDER)) { mGPSOn = true; } if (provider.equalsIgnoreCase(LocationManager.NETWORK_PROVIDER)) { mNetworkOn = true; } } if (!mGPSOn && !mNetworkOn) { ToastUtils.showShortToast(R.string.provider_disabled_error); Intent onGPS_intent = new Intent(Settings.ACTION_LOCATION_SOURCE_SETTINGS); startActivity(onGPS_intent); finish(); } if (mGPSOn) { Location loc = mLocationManager.getLastKnownLocation(LocationManager.GPS_PROVIDER); if (loc != null) { InfoLogger.geolog("GeoPointActivity: " + System.currentTimeMillis() + " lastKnownLocation(GPS) lat: " + loc.getLatitude() + " long: " + loc.getLongitude() + " acc: " + loc.getAccuracy()); } else { InfoLogger.geolog("GeoPointActivity: " + System.currentTimeMillis() + " lastKnownLocation(GPS) null location"); } } if (mNetworkOn) { Location loc = mLocationManager.getLastKnownLocation(LocationManager.NETWORK_PROVIDER); if (loc != null) { InfoLogger.geolog("GeoPointActivity: " + System.currentTimeMillis() + " lastKnownLocation(Network) lat: " + loc.getLatitude() + " long: " + loc.getLongitude() + " acc: " + loc.getAccuracy()); } else { InfoLogger.geolog("GeoPointActivity: " + System.currentTimeMillis() + " lastKnownLocation(Network) null location"); } } setupLocationDialog(); } @Override protected void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); outState.putInt(LOCATION_COUNT, mLocationCount); } @Override protected void onPause() { super.onPause(); // stops the GPS. Note that this will turn off the GPS if the screen goes to sleep. if (mLocationManager != null) { mLocationManager.removeUpdates(this); } // We're not using managed dialogs, so we have to dismiss the dialog to prevent it from // leaking memory. if (mLocationDialog != null && mLocationDialog.isShowing()) { mLocationDialog.dismiss(); } } @Override protected void onResume() { super.onResume(); if (mLocationManager != null) { if (mGPSOn) { mLocationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 0, 0, this); } if (mNetworkOn) { mLocationManager.requestLocationUpdates(LocationManager.NETWORK_PROVIDER, 0, 0, this); } } if (mLocationDialog != null) { mLocationDialog.show(); } } @Override protected void onStart() { super.onStart(); Collect.getInstance().getActivityLogger().logOnStart(this); } @Override protected void onStop() { Collect.getInstance().getActivityLogger().logOnStop(this); super.onStop(); } /** * Sets up the look and actions for the progress dialog while the GPS is searching. */ private void setupLocationDialog() { Collect.getInstance().getActivityLogger().logInstanceAction(this, "setupLocationDialog", "show"); // dialog displayed while fetching gps location mLocationDialog = new ProgressDialog(this); DialogInterface.OnClickListener geopointButtonListener = new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { switch (which) { case DialogInterface.BUTTON_POSITIVE: Collect.getInstance().getActivityLogger().logInstanceAction(this, "acceptLocation", "OK"); returnLocation(); break; case DialogInterface.BUTTON_NEGATIVE: Collect.getInstance().getActivityLogger().logInstanceAction(this, "cancelLocation", "cancel"); mLocation = null; finish(); break; } } }; // back button doesn't cancel mLocationDialog.setCancelable(false); mLocationDialog.setIndeterminate(true); mLocationDialog.setIcon(android.R.drawable.ic_dialog_info); mLocationDialog.setTitle(getString(R.string.getting_location)); mLocationDialog.setMessage(getString(R.string.please_wait_long)); mLocationDialog.setButton(DialogInterface.BUTTON_POSITIVE, getString(R.string.save_point), geopointButtonListener); mLocationDialog.setButton(DialogInterface.BUTTON_NEGATIVE, getString(R.string.cancel_location), geopointButtonListener); } private void returnLocation() { if (mLocation != null) { Intent i = new Intent(); i.putExtra( FormEntryActivity.LOCATION_RESULT, mLocation.getLatitude() + " " + mLocation.getLongitude() + " " + mLocation.getAltitude() + " " + mLocation.getAccuracy()); setResult(RESULT_OK, i); } finish(); } @Override public void onLocationChanged(Location location) { mLocation = location; if (mLocation != null) { // Bug report: cached GeoPoint is being returned as the first value. // Wait for the 2nd value to be returned, which is hopefully not cached? ++mLocationCount; InfoLogger.geolog("GeoPointActivity: " + System.currentTimeMillis() + " onLocationChanged(" + mLocationCount + ") lat: " + mLocation.getLatitude() + " long: " + mLocation.getLongitude() + " acc: " + mLocation.getAccuracy()); if (mLocationCount > 1) { mLocationDialog.setMessage(getString(R.string.location_provider_accuracy, mLocation.getProvider(), truncateDouble(mLocation.getAccuracy()))); if (mLocation.getAccuracy() <= mLocationAccuracy) { returnLocation(); } } } else { InfoLogger.geolog("GeoPointActivity: " + System.currentTimeMillis() + " onLocationChanged(" + mLocationCount + ") null location"); } } private String truncateDouble(float number) { DecimalFormat df = new DecimalFormat("#.##"); return df.format(number); } @Override public void onProviderDisabled(String provider) { } @Override public void onProviderEnabled(String provider) { } @Override public void onStatusChanged(String provider, int status, Bundle extras) { switch (status) { case LocationProvider.AVAILABLE: if (mLocation != null) { mLocationDialog.setMessage(getString(R.string.location_accuracy, mLocation.getAccuracy())); } break; case LocationProvider.OUT_OF_SERVICE: break; case LocationProvider.TEMPORARILY_UNAVAILABLE: break; } } }
package dtprogrammer.github.io.p34; import org.junit.Assert; import java.util.Arrays; public class Test { @org.junit.Test public void test() throws Exception { Solution soluton = new Solution(); int[] array = {5, 7, 7, 8, 8, 10}; int[] expected = {3, 4}; Assert.assertTrue(Arrays.equals(expected, soluton.searchRange(array, 8))); } }