text
stringlengths
7
1.01M
package com.rachio.iro.ui.fragment; import android.support.v4.app.Fragment; public interface OnFragmentSelectedListener { void onFragmentSelected(Fragment fragment); }
// Copyright Verizon Media. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.vespa.model.container; import com.yahoo.cloud.config.ZookeeperServerConfig; import com.yahoo.config.model.api.ModelContext; import com.yahoo.config.model.api.container.ContainerServiceType; import com.yahoo.config.model.deploy.DeployState; import com.yahoo.config.model.producer.AbstractConfigProducer; import com.yahoo.config.provision.ClusterSpec; import com.yahoo.config.provision.NodeResources; import com.yahoo.search.config.QrStartConfig; import com.yahoo.vespa.model.container.component.SimpleComponent; /** * A container that is typically used by container clusters set up from the user application. * * @author gjoranv */ public final class ApplicationContainer extends Container implements QrStartConfig.Producer, ZookeeperServerConfig.Producer { private static final String defaultHostedJVMArgs = "-XX:+UseOSErrorReporting -XX:+SuppressFatalErrorMessage"; private final boolean isHostedVespa; public ApplicationContainer(AbstractConfigProducer<?> parent, String name, int index, DeployState deployState) { this(parent, name, false, index, deployState); } public ApplicationContainer(AbstractConfigProducer<?> parent, String name, boolean retired, int index, DeployState deployState) { super(parent, name, retired, index, deployState); this.isHostedVespa = deployState.isHosted(); addComponent(new SimpleComponent("com.yahoo.container.jdisc.messagebus.NetworkMultiplexerHolder")); addComponent(new SimpleComponent("com.yahoo.container.jdisc.messagebus.NetworkMultiplexerProvider")); addComponent(new SimpleComponent("com.yahoo.container.jdisc.messagebus.SessionCache")); } @Override public void getConfig(QrStartConfig.Builder builder) { if (getHostResource() != null) { NodeResources nodeResources = getHostResource().realResources(); if ( ! nodeResources.isUnspecified()) { builder.jvm.availableProcessors(Math.max(2, (int)Math.ceil(nodeResources.vcpu()))); } } } @Override protected ContainerServiceType myServiceType() { if (parent instanceof ContainerCluster) { ContainerCluster<?> cluster = (ContainerCluster<?>)parent; // TODO: The 'qrserver' name is retained for legacy reasons (e.g. system tests and log parsing). if (cluster.getSearch() != null && cluster.getDocproc() == null && cluster.getDocumentApi() == null) { return ContainerServiceType.QRSERVER; } } return ContainerServiceType.CONTAINER; } /** Returns the jvm arguments this should start with */ @Override public String getJvmOptions() { String jvmArgs = super.getJvmOptions(); return isHostedVespa && hasDocproc() ? ("".equals(jvmArgs) ? defaultHostedJVMArgs : defaultHostedJVMArgs + " " + jvmArgs) : jvmArgs; } private boolean hasDocproc() { return (parent instanceof ContainerCluster) && (((ContainerCluster<?>)parent).getDocproc() != null); } @Override public void getConfig(ZookeeperServerConfig.Builder builder) { builder.myid(index()); } @Override protected String jvmOmitStackTraceInFastThrowOption(ModelContext.FeatureFlags featureFlags) { return featureFlags.jvmOmitStackTraceInFastThrowOption(ClusterSpec.Type.container); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.isis.testing.unittestsupport.applib.core.bidir; import javax.jdo.annotations.PersistenceCapable; @PersistenceCapable public class ChildDomainObject implements Comparable<ChildDomainObject> { // {{ Index (property) private int index; public int getIndex() { return index; } public void setIndex(final int index) { this.index = index; } // }} // {{ Parent (property) private ParentDomainObject parent; public ParentDomainObject getParent() { return parent; } public void setParent(final ParentDomainObject parent) { this.parent = parent; } public void modifyParent(final ParentDomainObject parent) { ParentDomainObject currentParent = getParent(); // check for no-op if (parent == null || parent.equals(currentParent)) { return; } // delegate to parent to associate parent.addToChildren(this); } public void clearParent() { ParentDomainObject currentParent = getParent(); // check for no-op if (currentParent == null) { return; } // delegate to parent to dissociate currentParent.removeFromChildren(this); } // }} @Override public int compareTo(ChildDomainObject other) { return this.getIndex() - other.getIndex(); } }
/* * Copyright 2014 Red Hat, Inc. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * and Apache License v2.0 which accompanies this distribution. * * The Eclipse Public License is available at * http://www.eclipse.org/legal/epl-v10.html * * The Apache License v2.0 is available at * http://www.opensource.org/licenses/apache2.0.php * * You may elect to redistribute this code under either of these licenses. */ package examples; import io.vertx.core.Vertx; import io.vertx.core.json.JsonObject; import io.vertx.ext.auth.htpasswd.HtpasswdAuth; import io.vertx.ext.auth.htpasswd.HtpasswdAuthOptions; /** * @author Neven Radovanović */ public class AuthHtpasswdExamples { public void example1(Vertx vertx) { HtpasswdAuth authProvider = HtpasswdAuth .create(vertx, new HtpasswdAuthOptions()); } public void example2(HtpasswdAuth authProvider) { JsonObject authInfo = new JsonObject() .put("username", "someUser") .put("password", "somePassword"); authProvider.authenticate(authInfo) .onSuccess(user -> { // OK }) .onFailure(err -> { // Failed! }); } }
package perobobbot.server.sse; import lombok.NonNull; import org.springframework.web.servlet.mvc.method.annotation.SseEmitter; import perobobbot.security.com.User; public interface Hub { /** * @param user the user that request to SSE connection * @return an sse emitter to transmit the event to the user */ @NonNull SseEmitter createEmitterForNewConnection(@NonNull User user); /** * @param user the user that request to SSE connection * @param lastEventId id of the last received event * @return an sse emitter to transmit the event to the user */ @NonNull SseEmitter createEmitterForReconnection(@NonNull User user, @NonNull String lastEventId); }
package org.uma.jmetal.problem.multiobjective.mop; import org.uma.jmetal.problem.doubleproblem.impl.AbstractDoubleProblem; import org.uma.jmetal.solution.doublesolution.DoubleSolution; import java.util.ArrayList; import java.util.List; /** * Problem MOP2. Defined in * H. L. Liu, F. Gu and Q. Zhang, "Decomposition of a Multiobjective * Optimization Problem Into a Number of Simple Multiobjective Subproblems," * in IEEE Transactions on Evolutionary Computation, vol. 18, no. 3, pp. * 450-455, June 2014. * * @author Mastermay <javismay@gmail.com> */ @SuppressWarnings("serial") public class MOP2 extends AbstractDoubleProblem { /** * Constructor. Creates default instance of problem MOP2 (10 decision variables) */ public MOP2() { this(10); } /** * Creates a new instance of problem MOP2. * * @param numberOfVariables Number of variables. */ public MOP2(Integer numberOfVariables) { setNumberOfVariables(numberOfVariables); setNumberOfObjectives(2); setName("MOP2"); List<Double> lowerLimit = new ArrayList<>(getNumberOfVariables()); List<Double> upperLimit = new ArrayList<>(getNumberOfVariables()); for (int i = 0; i < getNumberOfVariables(); i++) { lowerLimit.add(0.0); upperLimit.add(1.0); } setVariableBounds(lowerLimit, upperLimit); } /** * Evaluate() method */ public DoubleSolution evaluate(DoubleSolution solution) { double[] f = new double[solution.objectives().length]; double g = this.evalG(solution); f[0] = (1 + g) * solution.variables().get(0); f[1] = (1 + g) * (1 - solution.variables().get(0) * solution.variables().get(0)); solution.objectives()[0] = f[0]; solution.objectives()[1] = f[1]; return solution; } /** * Returns the value of the MOP2 function G. * * @param solution Solution */ private double evalG(DoubleSolution solution) { double g = 0.0; for (int i = 1; i < solution.variables().size(); i++) { double t = solution.variables().get(i) - Math.sin(0.5 * Math.PI * solution.variables().get(0)); g += Math.abs(t) / (1 + Math.exp(5 * Math.abs(t))); } g = 10 * Math.sin(Math.PI * solution.variables().get(0)) * g; return g; } }
import java.sql.*; public class UsuariosDAO { public static Statement stUsuarios; public static ResultSet rsUsuarios; public static UsuariosVO validaUsuario(String tmpNome, String tmpSenha) throws Exception{ UsuariosVO tmpUsuarios = new UsuariosVO(); try { ConexaoDAO.abreConexao(); } catch (Exception erro) { throw new Exception(erro.getMessage()); } try { String sqlLogin = "SELECT * FROM USERS WHERE USERNAME LIKE '"+tmpNome+"' " + "AND PASSWORD LIKE '"+tmpSenha+"'"; stUsuarios = ConexaoDAO.connSistema.createStatement(); rsUsuarios = stUsuarios.executeQuery(sqlLogin); if(rsUsuarios.next()){ //Preenchendo objeto tmpUsuarios.setNomeUsuario(rsUsuarios.getString("userName")); tmpUsuarios.setSenha(rsUsuarios.getString("password")); tmpUsuarios.setPermissao(rsUsuarios.getInt("permission")); } } catch (Exception erro) { throw new Exception("Usuário não encontrado/nErro original: "+erro.getMessage()); } try { ConexaoDAO.fechaConexao(); } catch (Exception erro) { throw new Exception(erro.getMessage()); } return tmpUsuarios; } } /* Trabalho para entregar: 6 grupos para desenvolver os 6 módulos que faltam no sistema */
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.elasticsearch; import io.prestosql.elasticsearch.client.ElasticsearchClient; import io.prestosql.spi.connector.ConnectorSession; import io.prestosql.spi.connector.ConnectorSplitManager; import io.prestosql.spi.connector.ConnectorSplitSource; import io.prestosql.spi.connector.ConnectorTableHandle; import io.prestosql.spi.connector.ConnectorTransactionHandle; import io.prestosql.spi.connector.FixedSplitSource; import javax.inject.Inject; import java.util.List; import static com.google.common.collect.ImmutableList.toImmutableList; import static java.util.Objects.requireNonNull; public class ElasticsearchSplitManager implements ConnectorSplitManager { private final ElasticsearchClient client; @Inject public ElasticsearchSplitManager(ElasticsearchClient client) { this.client = requireNonNull(client, "client is null"); } @Override public ConnectorSplitSource getSplits(ConnectorTransactionHandle transactionHandle, ConnectorSession session, ConnectorTableHandle table, SplitSchedulingStrategy splitSchedulingStrategy) { ElasticsearchTableHandle tableHandle = (ElasticsearchTableHandle) table; List<ElasticsearchSplit> splits = client.getSearchShards(tableHandle.getIndex()).stream() .map(shard -> new ElasticsearchSplit(shard.getId(), shard.getAddress())) .collect(toImmutableList()); return new FixedSplitSource(splits); } }
package android.support.v7.widget; import android.content.Context; import android.content.res.TypedArray; import android.database.DataSetObserver; import android.graphics.Rect; import android.graphics.drawable.Drawable; import android.os.Handler; import android.support.annotation.AttrRes; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.annotation.RestrictTo; import android.support.annotation.RestrictTo.Scope; import android.support.annotation.StyleRes; import android.support.v4.view.ViewCompat; import android.support.v4.widget.PopupWindowCompat; import android.support.v7.appcompat.C0385R; import android.support.v7.view.menu.ShowableListMenu; import android.util.AttributeSet; import android.util.Log; import android.view.KeyEvent; import android.view.KeyEvent.DispatcherState; import android.view.MotionEvent; import android.view.View; import android.view.View.MeasureSpec; import android.view.View.OnTouchListener; import android.view.ViewGroup; import android.view.ViewParent; import android.widget.AbsListView; import android.widget.AbsListView.OnScrollListener; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.AdapterView.OnItemSelectedListener; import android.widget.LinearLayout; import android.widget.LinearLayout.LayoutParams; import android.widget.ListAdapter; import android.widget.ListView; import android.widget.PopupWindow; import android.widget.PopupWindow.OnDismissListener; import com.google.common.primitives.Ints; import java.lang.reflect.Method; public class ListPopupWindow implements ShowableListMenu { private static final boolean DEBUG = false; static final int EXPAND_LIST_TIMEOUT = 250; public static final int INPUT_METHOD_FROM_FOCUSABLE = 0; public static final int INPUT_METHOD_NEEDED = 1; public static final int INPUT_METHOD_NOT_NEEDED = 2; public static final int MATCH_PARENT = -1; public static final int POSITION_PROMPT_ABOVE = 0; public static final int POSITION_PROMPT_BELOW = 1; private static final String TAG = "ListPopupWindow"; public static final int WRAP_CONTENT = -2; private static Method sClipToWindowEnabledMethod; private static Method sGetMaxAvailableHeightMethod; private static Method sSetEpicenterBoundsMethod; private ListAdapter mAdapter; private Context mContext; private boolean mDropDownAlwaysVisible; private View mDropDownAnchorView; private int mDropDownGravity; private int mDropDownHeight; private int mDropDownHorizontalOffset; DropDownListView mDropDownList; private Drawable mDropDownListHighlight; private int mDropDownVerticalOffset; private boolean mDropDownVerticalOffsetSet; private int mDropDownWidth; private int mDropDownWindowLayoutType; private Rect mEpicenterBounds; private boolean mForceIgnoreOutsideTouch; final Handler mHandler; private final ListSelectorHider mHideSelector; private boolean mIsAnimatedFromAnchor; private OnItemClickListener mItemClickListener; private OnItemSelectedListener mItemSelectedListener; int mListItemExpandMaximum; private boolean mModal; private DataSetObserver mObserver; private boolean mOverlapAnchor; private boolean mOverlapAnchorSet; PopupWindow mPopup; private int mPromptPosition; private View mPromptView; final ResizePopupRunnable mResizePopupRunnable; private final PopupScrollListener mScrollListener; private Runnable mShowDropDownRunnable; private final Rect mTempRect; private final PopupTouchInterceptor mTouchInterceptor; class C04492 implements Runnable { C04492() { } public void run() { View view = ListPopupWindow.this.getAnchorView(); if (view != null && view.getWindowToken() != null) { ListPopupWindow.this.show(); } } } class C04503 implements OnItemSelectedListener { C04503() { } public void onItemSelected(AdapterView<?> adapterView, View view, int position, long id) { if (position != -1) { DropDownListView dropDownList = ListPopupWindow.this.mDropDownList; if (dropDownList != null) { dropDownList.setListSelectionHidden(false); } } } public void onNothingSelected(AdapterView<?> adapterView) { } } private class ListSelectorHider implements Runnable { ListSelectorHider() { } public void run() { ListPopupWindow.this.clearListSelection(); } } private class PopupDataSetObserver extends DataSetObserver { PopupDataSetObserver() { } public void onChanged() { if (ListPopupWindow.this.isShowing()) { ListPopupWindow.this.show(); } } public void onInvalidated() { ListPopupWindow.this.dismiss(); } } private class PopupScrollListener implements OnScrollListener { PopupScrollListener() { } public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) { } public void onScrollStateChanged(AbsListView view, int scrollState) { if (scrollState == 1 && !ListPopupWindow.this.isInputMethodNotNeeded() && ListPopupWindow.this.mPopup.getContentView() != null) { ListPopupWindow.this.mHandler.removeCallbacks(ListPopupWindow.this.mResizePopupRunnable); ListPopupWindow.this.mResizePopupRunnable.run(); } } } private class PopupTouchInterceptor implements OnTouchListener { PopupTouchInterceptor() { } public boolean onTouch(View v, MotionEvent event) { int action = event.getAction(); int x = (int) event.getX(); int y = (int) event.getY(); if (action == 0 && ListPopupWindow.this.mPopup != null && ListPopupWindow.this.mPopup.isShowing() && x >= 0 && x < ListPopupWindow.this.mPopup.getWidth() && y >= 0 && y < ListPopupWindow.this.mPopup.getHeight()) { ListPopupWindow.this.mHandler.postDelayed(ListPopupWindow.this.mResizePopupRunnable, 250); } else if (action == 1) { ListPopupWindow.this.mHandler.removeCallbacks(ListPopupWindow.this.mResizePopupRunnable); } return false; } } private class ResizePopupRunnable implements Runnable { ResizePopupRunnable() { } public void run() { if (ListPopupWindow.this.mDropDownList != null && ViewCompat.isAttachedToWindow(ListPopupWindow.this.mDropDownList) && ListPopupWindow.this.mDropDownList.getCount() > ListPopupWindow.this.mDropDownList.getChildCount() && ListPopupWindow.this.mDropDownList.getChildCount() <= ListPopupWindow.this.mListItemExpandMaximum) { ListPopupWindow.this.mPopup.setInputMethodMode(2); ListPopupWindow.this.show(); } } } static { try { sClipToWindowEnabledMethod = PopupWindow.class.getDeclaredMethod("setClipToScreenEnabled", new Class[]{Boolean.TYPE}); } catch (NoSuchMethodException e) { Log.i(TAG, "Could not find method setClipToScreenEnabled() on PopupWindow. Oh well."); } try { sGetMaxAvailableHeightMethod = PopupWindow.class.getDeclaredMethod("getMaxAvailableHeight", new Class[]{View.class, Integer.TYPE, Boolean.TYPE}); } catch (NoSuchMethodException e2) { Log.i(TAG, "Could not find method getMaxAvailableHeight(View, int, boolean) on PopupWindow. Oh well."); } try { sSetEpicenterBoundsMethod = PopupWindow.class.getDeclaredMethod("setEpicenterBounds", new Class[]{Rect.class}); } catch (NoSuchMethodException e3) { Log.i(TAG, "Could not find method setEpicenterBounds(Rect) on PopupWindow. Oh well."); } } public ListPopupWindow(@NonNull Context context) { this(context, null, C0385R.attr.listPopupWindowStyle); } public ListPopupWindow(@NonNull Context context, @Nullable AttributeSet attrs) { this(context, attrs, C0385R.attr.listPopupWindowStyle); } public ListPopupWindow(@NonNull Context context, @Nullable AttributeSet attrs, @AttrRes int defStyleAttr) { this(context, attrs, defStyleAttr, 0); } public ListPopupWindow(@NonNull Context context, @Nullable AttributeSet attrs, @AttrRes int defStyleAttr, @StyleRes int defStyleRes) { this.mDropDownHeight = -2; this.mDropDownWidth = -2; this.mDropDownWindowLayoutType = 1002; this.mIsAnimatedFromAnchor = true; this.mDropDownGravity = 0; this.mDropDownAlwaysVisible = false; this.mForceIgnoreOutsideTouch = false; this.mListItemExpandMaximum = Integer.MAX_VALUE; this.mPromptPosition = 0; this.mResizePopupRunnable = new ResizePopupRunnable(); this.mTouchInterceptor = new PopupTouchInterceptor(); this.mScrollListener = new PopupScrollListener(); this.mHideSelector = new ListSelectorHider(); this.mTempRect = new Rect(); this.mContext = context; this.mHandler = new Handler(context.getMainLooper()); TypedArray a = context.obtainStyledAttributes(attrs, C0385R.styleable.ListPopupWindow, defStyleAttr, defStyleRes); this.mDropDownHorizontalOffset = a.getDimensionPixelOffset(C0385R.styleable.ListPopupWindow_android_dropDownHorizontalOffset, 0); this.mDropDownVerticalOffset = a.getDimensionPixelOffset(C0385R.styleable.ListPopupWindow_android_dropDownVerticalOffset, 0); if (this.mDropDownVerticalOffset != 0) { this.mDropDownVerticalOffsetSet = true; } a.recycle(); this.mPopup = new AppCompatPopupWindow(context, attrs, defStyleAttr, defStyleRes); this.mPopup.setInputMethodMode(1); } public void setAdapter(@Nullable ListAdapter adapter) { if (this.mObserver == null) { this.mObserver = new PopupDataSetObserver(); } else if (this.mAdapter != null) { this.mAdapter.unregisterDataSetObserver(this.mObserver); } this.mAdapter = adapter; if (this.mAdapter != null) { adapter.registerDataSetObserver(this.mObserver); } if (this.mDropDownList != null) { this.mDropDownList.setAdapter(this.mAdapter); } } public void setPromptPosition(int position) { this.mPromptPosition = position; } public int getPromptPosition() { return this.mPromptPosition; } public void setModal(boolean modal) { this.mModal = modal; this.mPopup.setFocusable(modal); } public boolean isModal() { return this.mModal; } @RestrictTo({Scope.LIBRARY_GROUP}) public void setForceIgnoreOutsideTouch(boolean forceIgnoreOutsideTouch) { this.mForceIgnoreOutsideTouch = forceIgnoreOutsideTouch; } @RestrictTo({Scope.LIBRARY_GROUP}) public void setDropDownAlwaysVisible(boolean dropDownAlwaysVisible) { this.mDropDownAlwaysVisible = dropDownAlwaysVisible; } @RestrictTo({Scope.LIBRARY_GROUP}) public boolean isDropDownAlwaysVisible() { return this.mDropDownAlwaysVisible; } public void setSoftInputMode(int mode) { this.mPopup.setSoftInputMode(mode); } public int getSoftInputMode() { return this.mPopup.getSoftInputMode(); } public void setListSelector(Drawable selector) { this.mDropDownListHighlight = selector; } @Nullable public Drawable getBackground() { return this.mPopup.getBackground(); } public void setBackgroundDrawable(@Nullable Drawable d) { this.mPopup.setBackgroundDrawable(d); } public void setAnimationStyle(@StyleRes int animationStyle) { this.mPopup.setAnimationStyle(animationStyle); } @StyleRes public int getAnimationStyle() { return this.mPopup.getAnimationStyle(); } @Nullable public View getAnchorView() { return this.mDropDownAnchorView; } public void setAnchorView(@Nullable View anchor) { this.mDropDownAnchorView = anchor; } public int getHorizontalOffset() { return this.mDropDownHorizontalOffset; } public void setHorizontalOffset(int offset) { this.mDropDownHorizontalOffset = offset; } public int getVerticalOffset() { if (this.mDropDownVerticalOffsetSet) { return this.mDropDownVerticalOffset; } return 0; } public void setVerticalOffset(int offset) { this.mDropDownVerticalOffset = offset; this.mDropDownVerticalOffsetSet = true; } @RestrictTo({Scope.LIBRARY_GROUP}) public void setEpicenterBounds(Rect bounds) { this.mEpicenterBounds = bounds; } public void setDropDownGravity(int gravity) { this.mDropDownGravity = gravity; } public int getWidth() { return this.mDropDownWidth; } public void setWidth(int width) { this.mDropDownWidth = width; } public void setContentWidth(int width) { Drawable popupBackground = this.mPopup.getBackground(); if (popupBackground != null) { popupBackground.getPadding(this.mTempRect); this.mDropDownWidth = (this.mTempRect.left + this.mTempRect.right) + width; return; } setWidth(width); } public int getHeight() { return this.mDropDownHeight; } public void setHeight(int height) { if (height >= 0 || -2 == height || -1 == height) { this.mDropDownHeight = height; return; } throw new IllegalArgumentException("Invalid height. Must be a positive value, MATCH_PARENT, or WRAP_CONTENT."); } public void setWindowLayoutType(int layoutType) { this.mDropDownWindowLayoutType = layoutType; } public void setOnItemClickListener(@Nullable OnItemClickListener clickListener) { this.mItemClickListener = clickListener; } public void setOnItemSelectedListener(@Nullable OnItemSelectedListener selectedListener) { this.mItemSelectedListener = selectedListener; } public void setPromptView(@Nullable View prompt) { boolean showing = isShowing(); if (showing) { removePromptView(); } this.mPromptView = prompt; if (showing) { show(); } } public void postShow() { this.mHandler.post(this.mShowDropDownRunnable); } public void show() { boolean z = true; boolean z2 = false; int i = -1; int height = buildDropDown(); boolean noInputMethod = isInputMethodNotNeeded(); PopupWindowCompat.setWindowLayoutType(this.mPopup, this.mDropDownWindowLayoutType); int widthSpec; int heightSpec; PopupWindow popupWindow; if (!this.mPopup.isShowing()) { if (this.mDropDownWidth == -1) { widthSpec = -1; } else if (this.mDropDownWidth == -2) { widthSpec = getAnchorView().getWidth(); } else { widthSpec = this.mDropDownWidth; } if (this.mDropDownHeight == -1) { heightSpec = -1; } else if (this.mDropDownHeight == -2) { heightSpec = height; } else { heightSpec = this.mDropDownHeight; } this.mPopup.setWidth(widthSpec); this.mPopup.setHeight(heightSpec); setPopupClipToScreenEnabled(true); popupWindow = this.mPopup; if (this.mForceIgnoreOutsideTouch || this.mDropDownAlwaysVisible) { z = false; } popupWindow.setOutsideTouchable(z); this.mPopup.setTouchInterceptor(this.mTouchInterceptor); if (this.mOverlapAnchorSet) { PopupWindowCompat.setOverlapAnchor(this.mPopup, this.mOverlapAnchor); } if (sSetEpicenterBoundsMethod != null) { try { sSetEpicenterBoundsMethod.invoke(this.mPopup, new Object[]{this.mEpicenterBounds}); } catch (Exception e) { Log.e(TAG, "Could not invoke setEpicenterBounds on PopupWindow", e); } } PopupWindowCompat.showAsDropDown(this.mPopup, getAnchorView(), this.mDropDownHorizontalOffset, this.mDropDownVerticalOffset, this.mDropDownGravity); this.mDropDownList.setSelection(-1); if (!this.mModal || this.mDropDownList.isInTouchMode()) { clearListSelection(); } if (!this.mModal) { this.mHandler.post(this.mHideSelector); } } else if (ViewCompat.isAttachedToWindow(getAnchorView())) { int i2; if (this.mDropDownWidth == -1) { widthSpec = -1; } else if (this.mDropDownWidth == -2) { widthSpec = getAnchorView().getWidth(); } else { widthSpec = this.mDropDownWidth; } if (this.mDropDownHeight == -1) { if (noInputMethod) { heightSpec = height; } else { heightSpec = -1; } PopupWindow popupWindow2; int i3; if (noInputMethod) { popupWindow2 = this.mPopup; if (this.mDropDownWidth == -1) { i3 = -1; } else { i3 = 0; } popupWindow2.setWidth(i3); this.mPopup.setHeight(0); } else { popupWindow2 = this.mPopup; if (this.mDropDownWidth == -1) { i3 = -1; } else { i3 = 0; } popupWindow2.setWidth(i3); this.mPopup.setHeight(-1); } } else if (this.mDropDownHeight == -2) { heightSpec = height; } else { heightSpec = this.mDropDownHeight; } popupWindow = this.mPopup; if (!(this.mForceIgnoreOutsideTouch || this.mDropDownAlwaysVisible)) { z2 = true; } popupWindow.setOutsideTouchable(z2); popupWindow = this.mPopup; View anchorView = getAnchorView(); int i4 = this.mDropDownHorizontalOffset; int i5 = this.mDropDownVerticalOffset; if (widthSpec < 0) { i2 = -1; } else { i2 = widthSpec; } if (heightSpec >= 0) { i = heightSpec; } popupWindow.update(anchorView, i4, i5, i2, i); } } public void dismiss() { this.mPopup.dismiss(); removePromptView(); this.mPopup.setContentView(null); this.mDropDownList = null; this.mHandler.removeCallbacks(this.mResizePopupRunnable); } public void setOnDismissListener(@Nullable OnDismissListener listener) { this.mPopup.setOnDismissListener(listener); } private void removePromptView() { if (this.mPromptView != null) { ViewParent parent = this.mPromptView.getParent(); if (parent instanceof ViewGroup) { ((ViewGroup) parent).removeView(this.mPromptView); } } } public void setInputMethodMode(int mode) { this.mPopup.setInputMethodMode(mode); } public int getInputMethodMode() { return this.mPopup.getInputMethodMode(); } public void setSelection(int position) { DropDownListView list = this.mDropDownList; if (isShowing() && list != null) { list.setListSelectionHidden(false); list.setSelection(position); if (list.getChoiceMode() != 0) { list.setItemChecked(position, true); } } } public void clearListSelection() { DropDownListView list = this.mDropDownList; if (list != null) { list.setListSelectionHidden(true); list.requestLayout(); } } public boolean isShowing() { return this.mPopup.isShowing(); } public boolean isInputMethodNotNeeded() { return this.mPopup.getInputMethodMode() == 2; } public boolean performItemClick(int position) { if (!isShowing()) { return false; } if (this.mItemClickListener != null) { DropDownListView list = this.mDropDownList; int i = position; this.mItemClickListener.onItemClick(list, list.getChildAt(position - list.getFirstVisiblePosition()), i, list.getAdapter().getItemId(position)); } return true; } @Nullable public Object getSelectedItem() { if (isShowing()) { return this.mDropDownList.getSelectedItem(); } return null; } public int getSelectedItemPosition() { if (isShowing()) { return this.mDropDownList.getSelectedItemPosition(); } return -1; } public long getSelectedItemId() { if (isShowing()) { return this.mDropDownList.getSelectedItemId(); } return Long.MIN_VALUE; } @Nullable public View getSelectedView() { if (isShowing()) { return this.mDropDownList.getSelectedView(); } return null; } @Nullable public ListView getListView() { return this.mDropDownList; } @NonNull DropDownListView createDropDownListView(Context context, boolean hijackFocus) { return new DropDownListView(context, hijackFocus); } void setListItemExpandMax(int max) { this.mListItemExpandMaximum = max; } public boolean onKeyDown(int keyCode, @NonNull KeyEvent event) { if (isShowing() && keyCode != 62 && (this.mDropDownList.getSelectedItemPosition() >= 0 || !isConfirmKey(keyCode))) { boolean below; int curIndex = this.mDropDownList.getSelectedItemPosition(); if (this.mPopup.isAboveAnchor()) { below = false; } else { below = true; } ListAdapter adapter = this.mAdapter; int firstItem = Integer.MAX_VALUE; int lastItem = Integer.MIN_VALUE; if (adapter != null) { boolean allEnabled = adapter.areAllItemsEnabled(); if (allEnabled) { firstItem = 0; } else { firstItem = this.mDropDownList.lookForSelectablePosition(0, true); } if (allEnabled) { lastItem = adapter.getCount() - 1; } else { lastItem = this.mDropDownList.lookForSelectablePosition(adapter.getCount() - 1, false); } } if (!(below && keyCode == 19 && curIndex <= firstItem) && (below || keyCode != 20 || curIndex < lastItem)) { this.mDropDownList.setListSelectionHidden(false); if (this.mDropDownList.onKeyDown(keyCode, event)) { this.mPopup.setInputMethodMode(2); this.mDropDownList.requestFocusFromTouch(); show(); switch (keyCode) { case 19: case 20: case 23: case 66: return true; } } else if (below && keyCode == 20) { if (curIndex == lastItem) { return true; } } else if (!below && keyCode == 19 && curIndex == firstItem) { return true; } } clearListSelection(); this.mPopup.setInputMethodMode(1); show(); return true; } return false; } public boolean onKeyUp(int keyCode, @NonNull KeyEvent event) { if (!isShowing() || this.mDropDownList.getSelectedItemPosition() < 0) { return false; } boolean consumed = this.mDropDownList.onKeyUp(keyCode, event); if (!consumed || !isConfirmKey(keyCode)) { return consumed; } dismiss(); return consumed; } public boolean onKeyPreIme(int keyCode, @NonNull KeyEvent event) { if (keyCode == 4 && isShowing()) { View anchorView = this.mDropDownAnchorView; DispatcherState state; if (event.getAction() == 0 && event.getRepeatCount() == 0) { state = anchorView.getKeyDispatcherState(); if (state == null) { return true; } state.startTracking(event, this); return true; } else if (event.getAction() == 1) { state = anchorView.getKeyDispatcherState(); if (state != null) { state.handleUpEvent(event); } if (event.isTracking() && !event.isCanceled()) { dismiss(); return true; } } } return false; } public OnTouchListener createDragToOpenListener(View src) { return new ForwardingListener(src) { public ListPopupWindow getPopup() { return ListPopupWindow.this; } }; } private int buildDropDown() { int padding; int otherHeights = 0; ViewGroup dropDownView; LayoutParams hintParams; if (this.mDropDownList == null) { Context context = this.mContext; this.mShowDropDownRunnable = new C04492(); this.mDropDownList = createDropDownListView(context, !this.mModal); if (this.mDropDownListHighlight != null) { this.mDropDownList.setSelector(this.mDropDownListHighlight); } this.mDropDownList.setAdapter(this.mAdapter); this.mDropDownList.setOnItemClickListener(this.mItemClickListener); this.mDropDownList.setFocusable(true); this.mDropDownList.setFocusableInTouchMode(true); this.mDropDownList.setOnItemSelectedListener(new C04503()); this.mDropDownList.setOnScrollListener(this.mScrollListener); if (this.mItemSelectedListener != null) { this.mDropDownList.setOnItemSelectedListener(this.mItemSelectedListener); } dropDownView = this.mDropDownList; View hintView = this.mPromptView; if (hintView != null) { int widthMode; int widthSize; ViewGroup hintContainer = new LinearLayout(context); hintContainer.setOrientation(1); hintParams = new LayoutParams(-1, 0, FlexItem.FLEX_SHRINK_DEFAULT); switch (this.mPromptPosition) { case 0: hintContainer.addView(hintView); hintContainer.addView(dropDownView, hintParams); break; case 1: hintContainer.addView(dropDownView, hintParams); hintContainer.addView(hintView); break; default: Log.e(TAG, "Invalid hint position " + this.mPromptPosition); break; } if (this.mDropDownWidth >= 0) { widthMode = Integer.MIN_VALUE; widthSize = this.mDropDownWidth; } else { widthMode = 0; widthSize = 0; } hintView.measure(MeasureSpec.makeMeasureSpec(widthSize, widthMode), 0); hintParams = (LayoutParams) hintView.getLayoutParams(); otherHeights = (hintView.getMeasuredHeight() + hintParams.topMargin) + hintParams.bottomMargin; dropDownView = hintContainer; } this.mPopup.setContentView(dropDownView); } else { dropDownView = (ViewGroup) this.mPopup.getContentView(); View view = this.mPromptView; if (view != null) { hintParams = (LayoutParams) view.getLayoutParams(); otherHeights = (view.getMeasuredHeight() + hintParams.topMargin) + hintParams.bottomMargin; } } Drawable background = this.mPopup.getBackground(); if (background != null) { background.getPadding(this.mTempRect); padding = this.mTempRect.top + this.mTempRect.bottom; if (!this.mDropDownVerticalOffsetSet) { this.mDropDownVerticalOffset = -this.mTempRect.top; } } else { this.mTempRect.setEmpty(); padding = 0; } int maxHeight = getMaxAvailableHeight(getAnchorView(), this.mDropDownVerticalOffset, this.mPopup.getInputMethodMode() == 2); if (this.mDropDownAlwaysVisible || this.mDropDownHeight == -1) { return maxHeight + padding; } int childWidthSpec; switch (this.mDropDownWidth) { case -2: childWidthSpec = MeasureSpec.makeMeasureSpec(this.mContext.getResources().getDisplayMetrics().widthPixels - (this.mTempRect.left + this.mTempRect.right), Integer.MIN_VALUE); break; case -1: childWidthSpec = MeasureSpec.makeMeasureSpec(this.mContext.getResources().getDisplayMetrics().widthPixels - (this.mTempRect.left + this.mTempRect.right), Ints.MAX_POWER_OF_TWO); break; default: childWidthSpec = MeasureSpec.makeMeasureSpec(this.mDropDownWidth, Ints.MAX_POWER_OF_TWO); break; } int listContent = this.mDropDownList.measureHeightOfChildrenCompat(childWidthSpec, 0, -1, maxHeight - otherHeights, -1); if (listContent > 0) { otherHeights += padding + (this.mDropDownList.getPaddingTop() + this.mDropDownList.getPaddingBottom()); } return listContent + otherHeights; } @RestrictTo({Scope.LIBRARY_GROUP}) public void setOverlapAnchor(boolean overlapAnchor) { this.mOverlapAnchorSet = true; this.mOverlapAnchor = overlapAnchor; } private static boolean isConfirmKey(int keyCode) { return keyCode == 66 || keyCode == 23; } private void setPopupClipToScreenEnabled(boolean clip) { if (sClipToWindowEnabledMethod != null) { try { sClipToWindowEnabledMethod.invoke(this.mPopup, new Object[]{Boolean.valueOf(clip)}); } catch (Exception e) { Log.i(TAG, "Could not call setClipToScreenEnabled() on PopupWindow. Oh well."); } } } private int getMaxAvailableHeight(View anchor, int yOffset, boolean ignoreBottomDecorations) { if (sGetMaxAvailableHeightMethod != null) { try { return ((Integer) sGetMaxAvailableHeightMethod.invoke(this.mPopup, new Object[]{anchor, Integer.valueOf(yOffset), Boolean.valueOf(ignoreBottomDecorations)})).intValue(); } catch (Exception e) { Log.i(TAG, "Could not call getMaxAvailableHeightMethod(View, int, boolean) on PopupWindow. Using the public version."); } } return this.mPopup.getMaxAvailableHeight(anchor, yOffset); } }
package tech.tablesaw.columns.numbers; import static java.lang.Double.NaN; import static org.junit.jupiter.api.Assertions.assertEquals; import org.junit.jupiter.api.Test; import tech.tablesaw.api.DoubleColumn; import tech.tablesaw.api.Table; import tech.tablesaw.io.csv.CsvReadOptions; import tech.tablesaw.selection.Selection; public class NumberFiltersTest { @Test public void testIsEqualTo() { double[] values = {4, 1, 1, 2, 2}; DoubleColumn doubles = DoubleColumn.create("doubles", values); Selection selection = doubles.isEqualTo(1.0); assertEquals(1, selection.get(0)); assertEquals(2, selection.get(1)); assertEquals(2, selection.size()); } @Test public void testIsNotEqualTo() { double[] values = {4, 1, 1, 2, 2}; DoubleColumn doubles = DoubleColumn.create("doubles", values); Selection selection = doubles.isNotEqualTo(1.0); assertEquals(0, selection.get(0)); assertEquals(3, selection.get(1)); assertEquals(4, selection.get(2)); assertEquals(3, selection.size()); } @Test public void testIsZero() { double[] values = {4, 0, -1}; DoubleColumn doubles = DoubleColumn.create("doubles", values); Selection selection = doubles.isZero(); assertEquals(1, selection.get(0)); assertEquals(1, selection.size()); } @Test public void testIsPositive() { double[] values = {4, 0, -1}; DoubleColumn doubles = DoubleColumn.create("doubles", values); Selection selection = doubles.isPositive(); assertEquals(0, selection.get(0)); assertEquals(1, selection.size()); } @Test public void testIsNegative() { double[] values = {4, 0, -0.00001}; DoubleColumn doubles = DoubleColumn.create("doubles", values); Selection selection = doubles.isNegative(); assertEquals(2, selection.get(0)); assertEquals(1, selection.size()); } @Test public void testIsNonNegative() { double[] values = {4, 0, -0.00001}; DoubleColumn doubles = DoubleColumn.create("doubles", values); Selection selection = doubles.isNonNegative(); assertEquals(0, selection.get(0)); assertEquals(1, selection.get(1)); assertEquals(2, selection.size()); } @Test public void testIsGreaterThanOrEqualTo() { double[] values = {4, 0, -0.00001}; double[] otherValues = {4, -1.3, 0.00001, NaN}; DoubleColumn doubles = DoubleColumn.create("doubles", values); Selection selection = doubles.isGreaterThanOrEqualTo(0.0); assertEquals(0, selection.get(0)); assertEquals(1, selection.get(1)); assertEquals(2, selection.size()); DoubleColumn others = DoubleColumn.create("others", otherValues); Selection selection1 = doubles.isGreaterThanOrEqualTo(others); assertEquals(0, selection1.get(0)); assertEquals(1, selection1.get(1)); assertEquals(2, selection1.size()); } @Test public void testIsLessThanOrEqualTo() { double[] values = {4, 0, -0.00001}; double[] otherValues = {4, -1.3, 0.00001, NaN}; DoubleColumn doubles = DoubleColumn.create("doubles", values); Selection selection = doubles.isLessThanOrEqualTo(0.0); assertEquals(1, selection.get(0)); assertEquals(2, selection.get(1)); assertEquals(2, selection.size()); DoubleColumn others = DoubleColumn.create("others", otherValues); Selection selection1 = doubles.isLessThanOrEqualTo(others); assertEquals(0, selection1.get(0)); assertEquals(2, selection1.get(1)); assertEquals(2, selection1.size()); } @Test public void testIsLessThan() { double[] values = {4, 0, -0.00001, 5.0}; double[] values2 = {4, 11, -3.00001, 5.1}; DoubleColumn doubles = DoubleColumn.create("doubles", values); DoubleColumn doubles2 = DoubleColumn.create("doubles2", values2); Selection selection = doubles.isLessThan(doubles2); assertEquals(1, selection.get(0)); assertEquals(3, selection.get(1)); assertEquals(2, selection.size()); } @Test public void testIsGreaterThan() { double[] values = {4, 0, -0.00001, 5.0}; double[] otherValues = {4, -1.3, 0.00001, NaN}; DoubleColumn doubles = DoubleColumn.create("doubles", values); Selection selection = doubles.isGreaterThan(0); assertEquals(0, selection.get(0)); assertEquals(3, selection.get(1)); assertEquals(2, selection.size()); DoubleColumn others = DoubleColumn.create("others", otherValues); Selection selection1 = doubles.isGreaterThan(others); assertEquals(1, selection1.get(0)); assertEquals(1, selection1.size()); } @Test public void testIsEqualTo1() { double[] values = {4, 0, -0.00001, 5.0, 4.44443}; double[] values2 = {4, 11, -3.00001, 5.1, 4.44443}; DoubleColumn doubles = DoubleColumn.create("doubles", values); DoubleColumn doubles2 = DoubleColumn.create("doubles2", values2); Selection selection = doubles.isEqualTo(doubles2); assertEquals(0, selection.get(0)); assertEquals(4, selection.get(1)); assertEquals(2, selection.size()); } @Test public void testIsNotEqualTo1() { double[] values = {4, 0, -0.00001, 5.0, 4.44443}; double[] values2 = {4, 11, -3.00001, 5.1, 4.44443}; DoubleColumn doubles = DoubleColumn.create("doubles", values); DoubleColumn doubles2 = DoubleColumn.create("doubles2", values2); Selection selection = doubles.isNotEqualTo(doubles2); assertEquals(1, selection.get(0)); assertEquals(2, selection.get(1)); assertEquals(3, selection.get(2)); assertEquals(3, selection.size()); Selection selection1 = doubles.isNotEqualTo(doubles2); assertEquals(1, selection1.get(0)); assertEquals(2, selection1.get(1)); assertEquals(3, selection1.get(2)); assertEquals(3, selection1.size()); } @Test public void testIsMissing() { double[] values = {4, 1, NaN, 2, 2}; DoubleColumn doubles = DoubleColumn.create("doubles", values); Selection selection = doubles.isMissing(); assertEquals(2, selection.get(0)); assertEquals(1, selection.size()); } @Test public void testIsNotMissing() { double[] values = {4, 1, NaN, 2, 2}; DoubleColumn doubles = DoubleColumn.create("doubles", values); Selection selection = doubles.isNotMissing(); assertEquals(0, selection.get(0)); assertEquals(1, selection.get(1)); assertEquals(4, selection.size()); } @Test public void testNotIn() { double[] values = {4, 1, NaN, 2, 2}; DoubleColumn doubles = DoubleColumn.create("doubles", values); double[] comparison = {1, 2}; Selection selection = doubles.isNotIn(comparison); assertEquals(0, selection.get(0)); assertEquals(2, selection.get(1)); assertEquals(2, selection.size()); } @Test public void testIsBetweenInclusive() throws Exception { Table bush = Table.read().csv(CsvReadOptions.builder("../data/bush.csv")); Table result = bush.where(bush.numberColumn("approval").isBetweenInclusive(0, 49)); assertEquals(10, result.rowCount()); } }
/* * Copyright 2002-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.messaging.simp.annotation.support; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import reactor.core.publisher.EmitterProcessor; import reactor.core.publisher.Flux; import reactor.core.publisher.FluxProcessor; import reactor.core.publisher.Mono; import reactor.core.publisher.MonoProcessor; import org.springframework.context.support.StaticApplicationContext; import org.springframework.lang.Nullable; import org.springframework.messaging.Message; import org.springframework.messaging.MessageChannel; import org.springframework.messaging.MessageHeaders; import org.springframework.messaging.SubscribableChannel; import org.springframework.messaging.converter.MessageConverter; import org.springframework.messaging.handler.HandlerMethod; import org.springframework.messaging.handler.annotation.DestinationVariable; import org.springframework.messaging.handler.annotation.Header; import org.springframework.messaging.handler.annotation.Headers; import org.springframework.messaging.handler.annotation.MessageExceptionHandler; import org.springframework.messaging.handler.annotation.MessageMapping; import org.springframework.messaging.handler.annotation.Payload; import org.springframework.messaging.handler.annotation.support.MethodArgumentNotValidException; import org.springframework.messaging.simp.SimpAttributes; import org.springframework.messaging.simp.SimpAttributesContextHolder; import org.springframework.messaging.simp.SimpMessageHeaderAccessor; import org.springframework.messaging.simp.SimpMessageSendingOperations; import org.springframework.messaging.simp.SimpMessageType; import org.springframework.messaging.simp.SimpMessagingTemplate; import org.springframework.messaging.simp.annotation.SubscribeMapping; import org.springframework.messaging.support.MessageBuilder; import org.springframework.stereotype.Controller; import org.springframework.util.AntPathMatcher; import org.springframework.util.concurrent.ListenableFutureTask; import org.springframework.validation.Errors; import org.springframework.validation.Validator; import org.springframework.validation.annotation.Validated; import static org.hamcrest.Matchers.*; import static org.junit.Assert.*; import static org.mockito.BDDMockito.any; import static org.mockito.BDDMockito.*; /** * Test fixture for * {@link org.springframework.messaging.simp.annotation.support.SimpAnnotationMethodMessageHandler}. * * @author Rossen Stoyanchev * @author Brian Clozel * @author Sebastien Deleuze */ public class SimpAnnotationMethodMessageHandlerTests { private static final String TEST_INVALID_VALUE = "invalidValue"; private TestSimpAnnotationMethodMessageHandler messageHandler; private TestController testController; @Mock private SubscribableChannel channel; @Mock private MessageConverter converter; @Captor private ArgumentCaptor<Object> payloadCaptor; @Before public void setup() { MockitoAnnotations.initMocks(this); SimpMessagingTemplate brokerTemplate = new SimpMessagingTemplate(this.channel); brokerTemplate.setMessageConverter(this.converter); this.messageHandler = new TestSimpAnnotationMethodMessageHandler(brokerTemplate, this.channel, this.channel); this.messageHandler.setApplicationContext(new StaticApplicationContext()); this.messageHandler.setValidator(new StringTestValidator(TEST_INVALID_VALUE)); this.messageHandler.afterPropertiesSet(); this.testController = new TestController(); } @Test @SuppressWarnings("unchecked") public void headerArgumentResolution() { Map<String, Object> headers = Collections.singletonMap("foo", "bar"); Message<?> message = createMessage("/pre/headers", headers); this.messageHandler.registerHandler(this.testController); this.messageHandler.handleMessage(message); assertEquals("headers", this.testController.method); assertEquals("bar", this.testController.arguments.get("foo")); assertEquals("bar", ((Map<String, Object>) this.testController.arguments.get("headers")).get("foo")); } @Test public void optionalHeaderArgumentResolutionWhenPresent() { Map<String, Object> headers = Collections.singletonMap("foo", "bar"); Message<?> message = createMessage("/pre/optionalHeaders", headers); this.messageHandler.registerHandler(this.testController); this.messageHandler.handleMessage(message); assertEquals("optionalHeaders", this.testController.method); assertEquals("bar", this.testController.arguments.get("foo1")); assertEquals("bar", this.testController.arguments.get("foo2")); } @Test public void optionalHeaderArgumentResolutionWhenNotPresent() { Message<?> message = createMessage("/pre/optionalHeaders"); this.messageHandler.registerHandler(this.testController); this.messageHandler.handleMessage(message); assertEquals("optionalHeaders", this.testController.method); assertNull(this.testController.arguments.get("foo1")); assertNull(this.testController.arguments.get("foo2")); } @Test public void messageMappingDestinationVariableResolution() { Message<?> message = createMessage("/pre/message/bar/value"); this.messageHandler.registerHandler(this.testController); this.messageHandler.handleMessage(message); assertEquals("messageMappingDestinationVariable", this.testController.method); assertEquals("bar", this.testController.arguments.get("foo")); assertEquals("value", this.testController.arguments.get("name")); } @Test public void subscribeEventDestinationVariableResolution() { Message<?> message = createMessage(SimpMessageType.SUBSCRIBE, "/pre/sub/bar/value", null); this.messageHandler.registerHandler(this.testController); this.messageHandler.handleMessage(message); assertEquals("subscribeEventDestinationVariable", this.testController.method); assertEquals("bar", this.testController.arguments.get("foo")); assertEquals("value", this.testController.arguments.get("name")); } @Test public void simpleBinding() { Message<?> message = createMessage("/pre/binding/id/12"); this.messageHandler.registerHandler(this.testController); this.messageHandler.handleMessage(message); assertEquals("simpleBinding", this.testController.method); assertTrue("should be bound to type long", this.testController.arguments.get("id") instanceof Long); assertEquals(12L, this.testController.arguments.get("id")); } @Test public void validationError() { Message<?> message = createMessage("/pre/validation/payload"); this.messageHandler.registerHandler(this.testController); this.messageHandler.handleMessage(message); assertEquals("handleValidationException", this.testController.method); } @Test public void exceptionWithHandlerMethodArg() { Message<?> message = createMessage("/pre/illegalState"); this.messageHandler.registerHandler(this.testController); this.messageHandler.handleMessage(message); assertEquals("handleExceptionWithHandlerMethodArg", this.testController.method); HandlerMethod handlerMethod = (HandlerMethod) this.testController.arguments.get("handlerMethod"); assertNotNull(handlerMethod); assertEquals("illegalState", handlerMethod.getMethod().getName()); } @Test public void exceptionAsCause() { Message<?> message = createMessage("/pre/illegalStateCause"); this.messageHandler.registerHandler(this.testController); this.messageHandler.handleMessage(message); assertEquals("handleExceptionWithHandlerMethodArg", this.testController.method); HandlerMethod handlerMethod = (HandlerMethod) this.testController.arguments.get("handlerMethod"); assertNotNull(handlerMethod); assertEquals("illegalStateCause", handlerMethod.getMethod().getName()); } @Test public void errorAsMessageHandlingException() { Message<?> message = createMessage("/pre/error"); this.messageHandler.registerHandler(this.testController); this.messageHandler.handleMessage(message); assertEquals("handleErrorWithHandlerMethodArg", this.testController.method); HandlerMethod handlerMethod = (HandlerMethod) this.testController.arguments.get("handlerMethod"); assertNotNull(handlerMethod); assertEquals("errorAsThrowable", handlerMethod.getMethod().getName()); } @Test public void simpScope() { Map<String, Object> sessionAttributes = new ConcurrentHashMap<>(); sessionAttributes.put("name", "value"); SimpMessageHeaderAccessor headers = SimpMessageHeaderAccessor.create(); headers.setSessionId("session1"); headers.setSessionAttributes(sessionAttributes); headers.setDestination("/pre/scope"); Message<?> message = MessageBuilder.withPayload(new byte[0]).setHeaders(headers).build(); this.messageHandler.registerHandler(this.testController); this.messageHandler.handleMessage(message); assertEquals("scope", this.testController.method); } @Test public void dotPathSeparator() { DotPathSeparatorController controller = new DotPathSeparatorController(); this.messageHandler.setPathMatcher(new AntPathMatcher(".")); this.messageHandler.registerHandler(controller); this.messageHandler.setDestinationPrefixes(Arrays.asList("/app1", "/app2/")); Message<?> message = createMessage("/app1/pre.foo"); this.messageHandler.registerHandler(this.testController); this.messageHandler.handleMessage(message); assertEquals("handleFoo", controller.method); message = createMessage("/app2/pre.foo"); this.messageHandler.handleMessage(message); assertEquals("handleFoo", controller.method); } @Test @SuppressWarnings({"unchecked", "rawtypes"}) public void listenableFutureSuccess() { Message emptyMessage = MessageBuilder.withPayload(new byte[0]).build(); given(this.channel.send(any(Message.class))).willReturn(true); given(this.converter.toMessage(any(), any(MessageHeaders.class))).willReturn(emptyMessage); ListenableFutureController controller = new ListenableFutureController(); this.messageHandler.registerHandler(controller); this.messageHandler.setDestinationPrefixes(Arrays.asList("/app1", "/app2/")); Message<?> message = createMessage("/app1/listenable-future/success"); this.messageHandler.handleMessage(message); assertNotNull(controller.future); controller.future.run(); verify(this.converter).toMessage(this.payloadCaptor.capture(), any(MessageHeaders.class)); assertEquals("foo", this.payloadCaptor.getValue()); } @Test @SuppressWarnings({"unchecked", "rawtypes"}) public void listenableFutureFailure() { Message emptyMessage = MessageBuilder.withPayload(new byte[0]).build(); given(this.channel.send(any(Message.class))).willReturn(true); given(this.converter.toMessage(any(), any(MessageHeaders.class))).willReturn(emptyMessage); ListenableFutureController controller = new ListenableFutureController(); this.messageHandler.registerHandler(controller); this.messageHandler.setDestinationPrefixes(Arrays.asList("/app1", "/app2/")); Message<?> message = createMessage("/app1/listenable-future/failure"); this.messageHandler.handleMessage(message); controller.future.run(); assertTrue(controller.exceptionCaught); } @Test @SuppressWarnings({"unchecked", "rawtypes"}) public void completableFutureSuccess() { Message emptyMessage = MessageBuilder.withPayload(new byte[0]).build(); given(this.channel.send(any(Message.class))).willReturn(true); given(this.converter.toMessage(any(), any(MessageHeaders.class))).willReturn(emptyMessage); CompletableFutureController controller = new CompletableFutureController(); this.messageHandler.registerHandler(controller); this.messageHandler.setDestinationPrefixes(Arrays.asList("/app1", "/app2/")); Message<?> message = createMessage("/app1/completable-future"); this.messageHandler.handleMessage(message); assertNotNull(controller.future); controller.future.complete("foo"); verify(this.converter).toMessage(this.payloadCaptor.capture(), any(MessageHeaders.class)); assertEquals("foo", this.payloadCaptor.getValue()); } @Test @SuppressWarnings({"unchecked", "rawtypes"}) public void completableFutureFailure() { Message emptyMessage = MessageBuilder.withPayload(new byte[0]).build(); given(this.channel.send(any(Message.class))).willReturn(true); given(this.converter.toMessage(any(), any(MessageHeaders.class))).willReturn(emptyMessage); CompletableFutureController controller = new CompletableFutureController(); this.messageHandler.registerHandler(controller); this.messageHandler.setDestinationPrefixes(Arrays.asList("/app1", "/app2/")); Message<?> message = createMessage("/app1/completable-future"); this.messageHandler.handleMessage(message); controller.future.completeExceptionally(new IllegalStateException()); assertTrue(controller.exceptionCaught); } @Test @SuppressWarnings({"unchecked", "rawtypes"}) public void monoSuccess() { Message emptyMessage = MessageBuilder.withPayload(new byte[0]).build(); given(this.channel.send(any(Message.class))).willReturn(true); given(this.converter.toMessage(any(), any(MessageHeaders.class))).willReturn(emptyMessage); ReactiveController controller = new ReactiveController(); this.messageHandler.registerHandler(controller); this.messageHandler.setDestinationPrefixes(Arrays.asList("/app1", "/app2/")); Message<?> message = createMessage("/app1/mono"); this.messageHandler.handleMessage(message); assertNotNull(controller.mono); controller.mono.onNext("foo"); verify(this.converter).toMessage(this.payloadCaptor.capture(), any(MessageHeaders.class)); assertEquals("foo", this.payloadCaptor.getValue()); } @Test @SuppressWarnings({"unchecked", "rawtypes"}) public void monoFailure() { Message emptyMessage = MessageBuilder.withPayload(new byte[0]).build(); given(this.channel.send(any(Message.class))).willReturn(true); given(this.converter.toMessage(any(), any(MessageHeaders.class))).willReturn(emptyMessage); ReactiveController controller = new ReactiveController(); this.messageHandler.registerHandler(controller); this.messageHandler.setDestinationPrefixes(Arrays.asList("/app1", "/app2/")); Message<?> message = createMessage("/app1/mono"); this.messageHandler.handleMessage(message); controller.mono.onError(new IllegalStateException()); assertTrue(controller.exceptionCaught); } @Test @SuppressWarnings({"unchecked", "rawtypes"}) public void fluxNotHandled() { Message emptyMessage = MessageBuilder.withPayload(new byte[0]).build(); given(this.channel.send(any(Message.class))).willReturn(true); given(this.converter.toMessage(any(), any(MessageHeaders.class))).willReturn(emptyMessage); ReactiveController controller = new ReactiveController(); this.messageHandler.registerHandler(controller); this.messageHandler.setDestinationPrefixes(Arrays.asList("/app1", "/app2/")); Message<?> message = createMessage("/app1/flux"); this.messageHandler.handleMessage(message); assertNotNull(controller.flux); controller.flux.onNext("foo"); verify(this.converter, never()).toMessage(any(), any(MessageHeaders.class)); } @Test public void placeholder() throws Exception { Message<?> message = createMessage("/pre/myValue"); this.messageHandler.setEmbeddedValueResolver(value -> ("/${myProperty}".equals(value) ? "/myValue" : value)); this.messageHandler.registerHandler(this.testController); this.messageHandler.handleMessage(message); assertEquals("placeholder", this.testController.method); } private Message<?> createMessage(String destination) { return createMessage(destination, null); } private Message<?> createMessage(String destination, Map<String, Object> headers) { return createMessage(SimpMessageType.MESSAGE, destination, headers); } private Message<?> createMessage(SimpMessageType messageType, String destination, Map<String, Object> headers) { SimpMessageHeaderAccessor accessor = SimpMessageHeaderAccessor.create(messageType); accessor.setSessionId("session1"); accessor.setSessionAttributes(new HashMap<>()); accessor.setDestination(destination); if (headers != null) { for (Map.Entry<String, Object> entry : headers.entrySet()) { accessor.setHeader(entry.getKey(), entry.getValue()); } } return MessageBuilder.withPayload(new byte[0]).setHeaders(accessor).build(); } private static class TestSimpAnnotationMethodMessageHandler extends SimpAnnotationMethodMessageHandler { public TestSimpAnnotationMethodMessageHandler(SimpMessageSendingOperations brokerTemplate, SubscribableChannel clientInboundChannel, MessageChannel clientOutboundChannel) { super(clientInboundChannel, clientOutboundChannel, brokerTemplate); } public void registerHandler(Object handler) { super.detectHandlerMethods(handler); } } @Controller @MessageMapping("/pre") private static class TestController { private String method; private Map<String, Object> arguments = new LinkedHashMap<>(); @MessageMapping("/headers") public void headers(@Header String foo, @Headers Map<String, Object> headers) { this.method = "headers"; this.arguments.put("foo", foo); this.arguments.put("headers", headers); } @MessageMapping("/optionalHeaders") public void optionalHeaders(@Header(name = "foo", required = false) String foo1, @Header("foo") Optional<String> foo2) { this.method = "optionalHeaders"; this.arguments.put("foo1", foo1); this.arguments.put("foo2", (foo2.isPresent() ? foo2.get() : null)); } @MessageMapping("/message/{foo}/{name}") public void messageMappingDestinationVariable(@DestinationVariable("foo") String param1, @DestinationVariable("name") String param2) { this.method = "messageMappingDestinationVariable"; this.arguments.put("foo", param1); this.arguments.put("name", param2); } @SubscribeMapping("/sub/{foo}/{name}") public void subscribeEventDestinationVariable(@DestinationVariable("foo") String param1, @DestinationVariable("name") String param2) { this.method = "subscribeEventDestinationVariable"; this.arguments.put("foo", param1); this.arguments.put("name", param2); } @MessageMapping("/binding/id/{id}") public void simpleBinding(@DestinationVariable("id") Long id) { this.method = "simpleBinding"; this.arguments.put("id", id); } @MessageMapping("/validation/payload") public void payloadValidation(@Validated @Payload String payload) { this.method = "payloadValidation"; this.arguments.put("message", payload); } @MessageMapping("/illegalState") public void illegalState() { throw new IllegalStateException("my cause"); } @MessageMapping("/illegalStateCause") public void illegalStateCause() { throw new RuntimeException(new IllegalStateException("my cause")); } @MessageMapping("/error") public void errorAsThrowable() { throw new Error("my cause"); } @MessageExceptionHandler(MethodArgumentNotValidException.class) public void handleValidationException() { this.method = "handleValidationException"; } @MessageExceptionHandler public void handleExceptionWithHandlerMethodArg(IllegalStateException ex, HandlerMethod handlerMethod) { this.method = "handleExceptionWithHandlerMethodArg"; this.arguments.put("handlerMethod", handlerMethod); assertEquals("my cause", ex.getMessage()); } @MessageExceptionHandler public void handleErrorWithHandlerMethodArg(Error ex, HandlerMethod handlerMethod) { this.method = "handleErrorWithHandlerMethodArg"; this.arguments.put("handlerMethod", handlerMethod); assertEquals("my cause", ex.getMessage()); } @MessageMapping("/scope") public void scope() { SimpAttributes simpAttributes = SimpAttributesContextHolder.currentAttributes(); assertThat(simpAttributes.getAttribute("name"), is("value")); this.method = "scope"; } @MessageMapping("/${myProperty}") public void placeholder() { this.method = "placeholder"; } } @Controller @MessageMapping("pre") private static class DotPathSeparatorController { private String method; @MessageMapping("foo") public void handleFoo() { this.method = "handleFoo"; } } @Controller @MessageMapping("listenable-future") private static class ListenableFutureController { private ListenableFutureTask<String> future; private boolean exceptionCaught = false; @MessageMapping("success") public ListenableFutureTask<String> handleListenableFuture() { this.future = new ListenableFutureTask<>(() -> "foo"); return this.future; } @MessageMapping("failure") public ListenableFutureTask<String> handleListenableFutureException() { this.future = new ListenableFutureTask<>(() -> { throw new IllegalStateException(); }); return this.future; } @MessageExceptionHandler(IllegalStateException.class) public void handleValidationException() { this.exceptionCaught = true; } } @Controller private static class CompletableFutureController { private CompletableFuture<String> future; private boolean exceptionCaught = false; @MessageMapping("completable-future") public CompletableFuture<String> handleCompletableFuture() { this.future = new CompletableFuture<>(); return this.future; } @MessageExceptionHandler(IllegalStateException.class) public void handleValidationException() { this.exceptionCaught = true; } } @Controller private static class ReactiveController { private MonoProcessor<String> mono; private FluxProcessor<String, String> flux; private boolean exceptionCaught = false; @MessageMapping("mono") public Mono<String> handleMono() { this.mono = MonoProcessor.create(); return this.mono; } @MessageMapping("flux") public Flux<String> handleFlux() { this.flux = EmitterProcessor.create(); return this.flux; } @MessageExceptionHandler(IllegalStateException.class) public void handleValidationException() { this.exceptionCaught = true; } } private static class StringTestValidator implements Validator { private final String invalidValue; public StringTestValidator(String invalidValue) { this.invalidValue = invalidValue; } @Override public boolean supports(Class<?> clazz) { return String.class.isAssignableFrom(clazz); } @Override public void validate(@Nullable Object target, Errors errors) { String value = (String) target; if (invalidValue.equals(value)) { errors.reject("invalid value '" + invalidValue + "'"); } } } }
/* * Licensed to Jasig under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Jasig licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at the following location: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jasig.cas.services; import static org.junit.Assert.*; import org.junit.Test; public class UnauthorizedServiceExceptionTests { @Test public void testCodeConstructor() { final String MESSAGE = "GG"; final UnauthorizedServiceException e = new UnauthorizedServiceException(MESSAGE); assertEquals(MESSAGE, e.getMessage()); } @Test public void testThrowableConstructorWithCode() { final String MESSAGE = "GG"; final RuntimeException r = new RuntimeException(); final UnauthorizedServiceException e = new UnauthorizedServiceException(MESSAGE, r); assertEquals(MESSAGE, e.getMessage()); assertEquals(r, e.getCause()); } }
package cyan.simple.pgsql.jsonb; import cyan.simple.pgsql.builder.SqlBuilder; import cyan.simple.pgsql.builder.SqlBuilders; import cyan.simple.pgsql.util.GeneralUtils; import java.util.Optional; /** * <p>ContrastRule</p> * @author Cyan (snow22314@outlook.com) * @version V.0.0.1 * @group cyan.tool.kit * @date 11:49 2021/5/7 */ public class ContrastRule extends JsonbRule<ContrastRule> { /** 属性值类型 */ protected ValueType type = ValueType.LONG; /** 属性值 */ protected Object value; /** 计算操作 */ protected ContrastOperation operation = ContrastOperation.EQUAL_OPERATION; public ContrastRule() { } public ContrastRule(String name, ValueType type, Object value, ContrastOperation operation) { super(name); this.type = type; this.value = value; this.operation = operation; } public ContrastRule(ContrastRule.Builder builder) { super(builder); this.type = builder.type; this.value = builder.value; this.operation = builder.operation; } public ValueType getType() { return type; } public void setType(ValueType type) { this.type = type; } public Object getValue() { return value; } public void setValue(Object value) { this.value = value; } public ContrastOperation getOperation() { return operation; } public void setOperation(ContrastOperation operation) { this.operation = operation; } @Override public String toSql(String alias) { return toSql(alias,"value"); } @Override public String toSql(String alias, String variable) { if (GeneralUtils.isEmpty(this.value) || GeneralUtils.isEmpty(this.type) || GeneralUtils.isEmpty(this.operation)) { return SqlBuilder.EMPTY; } SqlBuilder sqlBuilder = SqlBuilders.newSqlBuilder(); if (ValueType.isContrast(this.type.getKey())) { String target = target(alias, this.name, variable, this.type); String sql = this.operation.translateSql(target, this.value); sqlBuilder.andOfOr(true).append(sql); } return sqlBuilder.toString(); } public static class Builder extends JsonbRule.Builder<ContrastRule> { protected ValueType type; protected Object value; protected ContrastOperation operation; public Builder() { } public ContrastRule.Builder name(String name) { this.name = name; return this; } public ContrastRule.Builder type(ValueType type) { this.type = type; return this; } public ContrastRule.Builder type(Integer type) { this.type = Optional.ofNullable(type).map(ValueType::parserKey).orElse(ValueType.LONG); return this; } public ContrastRule.Builder value(Object value) { this.value = value; return this; } public ContrastRule.Builder operation(ContrastOperation operation) { this.operation = operation; return this; } public ContrastRule.Builder operation(Integer operation) { this.operation = Optional.ofNullable(operation).map(ContrastOperation::parserKey).orElse(ContrastOperation.EQUAL_OPERATION); return this; } @Override public ContrastRule build() { return new ContrastRule(this); } } }
/** * Copyright 2021 Shulie Technology, Co.Ltd * Email: shulie@shulie.io * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * See the License for the specific language governing permissions and * limitations under the License. */ package com.pamirs.attach.plugin.lettuce; import com.pamirs.pradar.MiddlewareType; public interface LettuceConstants { int PLUGIN_TYPE = MiddlewareType.TYPE_CACHE; String PLUGIN_NAME = "redis-lettuce"; String MODULE_NAME = "redis-lettuce"; String MIDDLEWARE_NAME = "redis"; String REFLECT_FIELD_REDIS_URI = "redisURI"; String REFLECT_FIELD_INITIAL_URIS = "initialUris"; String REFLECT_FIELD_CONNECTION = "connection"; String REFLECT_FIELD_CHANNEL_WRITER = "channelWriter"; String REFLECT_FIELD_DEFAULT_WRITER = "defaultWriter"; String REFLECT_FIELD_WRITER = "writer"; String REFLECT_FIELD_CHANNEL = "channel"; String REFLECT_FIELD_KEYS = "keys"; String REFLECT_FIELD_CLUSTERDISTRIBUTIONCHANNELWRITER = "io.lettuce.core.cluster.ClusterDistributionChannelWriter"; String DYNAMIC_FIELD_REDIS_URIS = "redisURIs"; String DYNAMIC_FIELD_LETTUCE_TARGET = "lettuceTarget"; String DYNAMIC_FIELD_LETTUCE_METHOD = "lettuceMethod"; String DYNAMIC_FIELD_LETTUCE_ARGS = "lettuceArgs"; String DYNAMIC_FIELD_LETTUCE_RESULT = "lettuceResult"; String CONNECT = "connect"; String CONNECT_ASYNC = "connectAsync"; String CONNECT_PUB_SUB = "connectPubSub"; String CONNECT_PUB_SUB_ASYNC = "connectPubSubAsync"; String CONNECT_TO_NODE = "connectToNode"; String CONNECT_TO_NODE_ASYNC = "connectToNodeAsync"; String CONNECT_PUB_SUB_TO_NODE_ASYNC = "connectPubSubToNodeAsync"; String CONNECT_CLUSTER_ASYNC = "connectClusterAsync"; String CONNECT_SENTINEL = "connectSentinel"; String CONNECT_SENTINEL_ASYNC = "connectSentinelAsync"; ThreadLocal<Boolean> masterSlave = new ThreadLocal<Boolean>() { @Override protected Boolean initialValue() { return true; } }; String ADDRESS_UNKNOW = "unknow"; }
package jp.co.yahoo.adssearchapi.v4.model; import java.util.Objects; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import javax.validation.Valid; import javax.validation.constraints.*; /** * &lt;div lang&#x3D;\&quot;ja\&quot;&gt;BiddingStrategyServiceTargetSpendBiddingSchemeオブジェクトは、クリック数の最大化の自動入札設定情報を表します。 (BiddingStrategyService用のオブジェクトです。)&lt;br&gt; このフィールドは、省略可能となります。※ADD時、typeがTARGET_SPENDの場合は必須です。&lt;/div&gt; &lt;div lang&#x3D;\&quot;en\&quot;&gt;BiddingStrategyServiceTargetSpendBiddingScheme object describes Auto Bidding setting for Maximize Clicks.&lt;br&gt; This field is optional. *If type is &#39;TARGET_SPEND&#39;, this field is required in ADD operation.&lt;/div&gt; */ @ApiModel(description = "<div lang=\"ja\">BiddingStrategyServiceTargetSpendBiddingSchemeオブジェクトは、クリック数の最大化の自動入札設定情報を表します。 (BiddingStrategyService用のオブジェクトです。)<br> このフィールドは、省略可能となります。※ADD時、typeがTARGET_SPENDの場合は必須です。</div> <div lang=\"en\">BiddingStrategyServiceTargetSpendBiddingScheme object describes Auto Bidding setting for Maximize Clicks.<br> This field is optional. *If type is 'TARGET_SPEND', this field is required in ADD operation.</div> ") @javax.annotation.Generated(value = "org.openapitools.codegen.languages.SpringCodegen") public class BiddingStrategyServiceTargetSpendBiddingScheme { @JsonProperty("bidCeiling") private Long bidCeiling = null; public BiddingStrategyServiceTargetSpendBiddingScheme bidCeiling(Long bidCeiling) { this.bidCeiling = bidCeiling; return this; } /** * <div lang=\"ja\">入札価格の上限です。(0~50000)<br> ※「0」が設定された場合、上限設定はありません。<br> このフィールドは、いずれの場合でも省略可能となります。</div> <div lang=\"en\">Bid amount limit (0-50000).<br> * No limits if &#34;0&#34; is set.<br> This field is optional in any cases.</div> * @return bidCeiling */ @ApiModelProperty(value = "<div lang=\"ja\">入札価格の上限です。(0~50000)<br> ※「0」が設定された場合、上限設定はありません。<br> このフィールドは、いずれの場合でも省略可能となります。</div> <div lang=\"en\">Bid amount limit (0-50000).<br> * No limits if &#34;0&#34; is set.<br> This field is optional in any cases.</div> ") public Long getBidCeiling() { return bidCeiling; } public void setBidCeiling(Long bidCeiling) { this.bidCeiling = bidCeiling; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } BiddingStrategyServiceTargetSpendBiddingScheme biddingStrategyServiceTargetSpendBiddingScheme = (BiddingStrategyServiceTargetSpendBiddingScheme) o; return Objects.equals(this.bidCeiling, biddingStrategyServiceTargetSpendBiddingScheme.bidCeiling); } @Override public int hashCode() { return Objects.hash(bidCeiling); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class BiddingStrategyServiceTargetSpendBiddingScheme {\n"); sb.append(" bidCeiling: ").append(toIndentedString(bidCeiling)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
// Targeted by JavaCPP version 1.5.3: DO NOT EDIT THIS FILE package org.bytedeco.caffe; import java.nio.*; import org.bytedeco.javacpp.*; import org.bytedeco.javacpp.annotation.*; import static org.bytedeco.javacpp.presets.javacpp.*; import static org.bytedeco.openblas.global.openblas_nolapack.*; import static org.bytedeco.openblas.global.openblas.*; import org.bytedeco.opencv.opencv_core.*; import static org.bytedeco.opencv.global.opencv_core.*; import org.bytedeco.opencv.opencv_imgproc.*; import static org.bytedeco.opencv.global.opencv_imgproc.*; import static org.bytedeco.opencv.global.opencv_imgcodecs.*; import org.bytedeco.opencv.opencv_videoio.*; import static org.bytedeco.opencv.global.opencv_videoio.*; import org.bytedeco.opencv.opencv_highgui.*; import static org.bytedeco.opencv.global.opencv_highgui.*; import org.bytedeco.hdf5.*; import static org.bytedeco.hdf5.global.hdf5.*; import static org.bytedeco.caffe.global.caffe.*; @Name("caffe::FlattenLayer<double>") @Properties(inherit = org.bytedeco.caffe.presets.caffe.class) public class DoubleFlattenLayer extends DoubleLayer { static { Loader.load(); } /** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */ public DoubleFlattenLayer(Pointer p) { super(p); } public DoubleFlattenLayer(@Const @ByRef LayerParameter param) { super((Pointer)null); allocate(param); } private native void allocate(@Const @ByRef LayerParameter param); @Virtual public native void Reshape(@Const @ByRef DoubleBlobVector bottom, @Const @ByRef DoubleBlobVector top); @Virtual public native @Const({false, false, true}) @Cast("const char*") BytePointer type(); @Virtual public native @Const({false, false, true}) int ExactNumBottomBlobs(); @Virtual public native @Const({false, false, true}) int ExactNumTopBlobs(); @Virtual protected native void Forward_cpu(@Const @ByRef DoubleBlobVector bottom, @Const @ByRef DoubleBlobVector top); @Virtual protected native void Backward_cpu(@Const @ByRef DoubleBlobVector top, @Const @ByRef BoolVector propagate_down, @Const @ByRef DoubleBlobVector bottom); }
package com.littleji.leetcode; import java.util.Stack; /** * Created by Jimmy on 2016/10/19. */ public class No20_ValidParentheses { public boolean isValid(String s) { if (s.length() == 0 || s.length()%2 != 0) return false; char [] ref = new char[256]; Stack<Character> stack = new Stack<Character>(); ref[(int)')'] = '('; ref[(int)']'] = '['; ref[(int)'}'] = '{'; for (int i=0; i<s.length();i++){ if (s.charAt(i)=='(' || s.charAt(i)=='{' || s.charAt(i)=='[' ){ stack.push(s.charAt(i)); } else if (s.charAt(i)==')' || s.charAt(i)=='}' ||s.charAt(i)==']'){ if(stack.empty()) return false; if (stack.peek() == ref[s.charAt(i)]) stack.pop(); else return false; } else return false; } if (stack.empty()) return true; else return false; } }
package org.bian.dto; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import org.bian.dto.CRCorporateTreasuryManagementPlanInitiateInputModelCorporateTreasuryManagementPlanInstanceRecord; import javax.validation.Valid; /** * CRCorporateTreasuryManagementPlanInitiateInputModel */ public class CRCorporateTreasuryManagementPlanInitiateInputModel { private String corporateTreasuryServicingSessionReference = null; private Object corporateTreasuryManagementPlanInitiateActionRecord = null; private String corporateTreasuryManagementPlanInstanceStatus = null; private CRCorporateTreasuryManagementPlanInitiateInputModelCorporateTreasuryManagementPlanInstanceRecord corporateTreasuryManagementPlanInstanceRecord = null; /** * `status: Not Mapped` core-data-type-reference: BIAN::DataTypesLibrary::CoreDataTypes::ISO20022andUNCEFACT::Identifier general-info: Reference to the active servicing session * @return corporateTreasuryServicingSessionReference **/ public String getCorporateTreasuryServicingSessionReference() { return corporateTreasuryServicingSessionReference; } public void setCorporateTreasuryServicingSessionReference(String corporateTreasuryServicingSessionReference) { this.corporateTreasuryServicingSessionReference = corporateTreasuryServicingSessionReference; } /** * `status: Not Mapped` core-data-type-reference: BIAN::DataTypesLibrary::CoreDataTypes::UNCEFACT::Binary general-info: The Initiate service call input and output record * @return corporateTreasuryManagementPlanInitiateActionRecord **/ public Object getCorporateTreasuryManagementPlanInitiateActionRecord() { return corporateTreasuryManagementPlanInitiateActionRecord; } public void setCorporateTreasuryManagementPlanInitiateActionRecord(Object corporateTreasuryManagementPlanInitiateActionRecord) { this.corporateTreasuryManagementPlanInitiateActionRecord = corporateTreasuryManagementPlanInitiateActionRecord; } /** * `status: Not Mapped` core-data-type-reference: BIAN::DataTypesLibrary::CoreDataTypes::UNCEFACT::Text general-info: The status of the Corporate Treasury Management Plan instance (e.g. initialised, pending, active) * @return corporateTreasuryManagementPlanInstanceStatus **/ public String getCorporateTreasuryManagementPlanInstanceStatus() { return corporateTreasuryManagementPlanInstanceStatus; } public void setCorporateTreasuryManagementPlanInstanceStatus(String corporateTreasuryManagementPlanInstanceStatus) { this.corporateTreasuryManagementPlanInstanceStatus = corporateTreasuryManagementPlanInstanceStatus; } /** * Get corporateTreasuryManagementPlanInstanceRecord * @return corporateTreasuryManagementPlanInstanceRecord **/ public CRCorporateTreasuryManagementPlanInitiateInputModelCorporateTreasuryManagementPlanInstanceRecord getCorporateTreasuryManagementPlanInstanceRecord() { return corporateTreasuryManagementPlanInstanceRecord; } public void setCorporateTreasuryManagementPlanInstanceRecord(CRCorporateTreasuryManagementPlanInitiateInputModelCorporateTreasuryManagementPlanInstanceRecord corporateTreasuryManagementPlanInstanceRecord) { this.corporateTreasuryManagementPlanInstanceRecord = corporateTreasuryManagementPlanInstanceRecord; } }
/* * The MIT License * * Copyright (c) 2017 aoju.org All rights reserved. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.aoju.bus.core.convert.impl; import org.aoju.bus.core.convert.AbstractConverter; import org.aoju.bus.core.utils.DateUtils; import org.aoju.bus.core.utils.StringUtils; import java.util.Calendar; import java.util.Date; /** * 日期转换器 * * @author Kimi Liu * @version 5.2.5 * @since JDK 1.8+ */ public class CalendarConverter extends AbstractConverter<Calendar> { /** * 日期格式化 */ private String format; /** * 获取日期格式 * * @return 设置日期格式 */ public String getFormat() { return format; } /** * 设置日期格式 * * @param format 日期格式 */ public void setFormat(String format) { this.format = format; } @Override protected Calendar convertInternal(Object value) { // Handle Date if (value instanceof Date) { return DateUtils.calendar((Date) value); } // Handle Long if (value instanceof Long) { //此处使用自动拆装箱 return DateUtils.calendar((Long) value); } final String valueStr = convertToStr(value); return DateUtils.calendar(StringUtils.isBlank(format) ? DateUtils.parse(valueStr) : DateUtils.parse(valueStr, format)); } }
/* * Copyright 2000-2020 Vaadin Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.vaadin.flow.server.connect.typeconversion; import org.junit.Test; public class BeanConversionTest extends BaseTypeConversionTest { @Test public void should_ConvertToBean_When_ReceiveBeanObject() { String inputValue = "{\"name\":\"mybean\",\"address\":\"myaddress\"," + "\"age\":10,\"isAdmin\":true,\"testEnum\":\"FIRST\"," + "\"roles\":[\"Admin\"], \"customProperty\": \"customValue\"}"; String expectedValue = "{\"name\":\"mybean-foo\"," + "\"address\":\"myaddress-foo\"," + "\"age\":11,\"isAdmin\":false," + "\"testEnum\":\"SECOND\"," + "\"roles\":[\"Admin\",\"User\"],\"customProperty\":\"customValue-foo\"}"; assertEqualExpectedValueWhenCallingMethod("getFooBean", inputValue, expectedValue); } }
package io.github.shishito_megane.dest_bbs_client_android_app; import android.Manifest; import android.accounts.AccountManager; import android.app.Activity; import android.app.Dialog; import android.app.ProgressDialog; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.graphics.Bitmap; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.net.Uri; import android.os.AsyncTask; import android.os.Bundle; import android.preference.PreferenceManager; import android.provider.MediaStore; import android.support.annotation.NonNull; import android.support.v7.app.AppCompatActivity; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.Button; import android.widget.EditText; import android.widget.ImageView; import android.widget.TextView; import android.widget.Toast; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.GoogleApiAvailability; import com.google.api.client.extensions.android.http.AndroidHttp; import com.google.api.client.googleapis.extensions.android.gms.auth.GoogleAccountCredential; import com.google.api.client.googleapis.extensions.android.gms.auth.GooglePlayServicesAvailabilityIOException; import com.google.api.client.googleapis.extensions.android.gms.auth.UserRecoverableAuthIOException; import com.google.api.client.http.HttpTransport; import com.google.api.client.json.JsonFactory; import com.google.api.client.json.jackson2.JacksonFactory; import com.google.api.client.util.ExponentialBackOff; import com.google.api.services.calendar.CalendarScopes; import com.google.api.services.calendar.model.Calendar; import com.google.api.services.calendar.model.CalendarListEntry; import java.io.IOException; import java.util.Arrays; import java.util.List; import pub.devrel.easypermissions.AfterPermissionGranted; import pub.devrel.easypermissions.EasyPermissions; public class PersonAddActivity extends AppCompatActivity implements EasyPermissions.PermissionCallbacks { static final int REQUEST_ACCOUNT_PICKER_ERR_CODE = 1000; static final int REQUEST_AUTHORIZATION_ERR_CODE = 1001; static final int REQUEST_GOOGLE_PLAY_SERVICES = 1002; static final int REQUEST_PERMISSION_GET_ACCOUNTS_ERR_CODE = 1003; static final int REQUEST_PICK_IMAGE_FILE = 2001; private static final String PREF_ACCOUNT_NAME = "accountName"; private static final String[] SCOPES = {CalendarScopes.CALENDAR}; GoogleAccountCredential mCredential; ProgressDialog mProgress; private TextView textViewCalenderIdResult; private EditText editTextCalenderId; private String personName; private ImageView imageViewPersonImage; private TextView textViewPersonImageResult; private Db db = new Db(this); @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_person_add); // set default lab address SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(getApplicationContext()); String default_person_address = getString(R.string.pref_default_address); String address = prefs.getString("lab_address", default_person_address); // set default lab address final EditText editTextPersonAddress = findViewById(R.id.editTextPersonAddress); editTextPersonAddress.setText(address); // calender id final Button buttonCreateNewCalender = findViewById(R.id.buttonCreateNewCalender); editTextCalenderId = findViewById(R.id.editTextCalenderId); textViewCalenderIdResult = findViewById(R.id.textViewCalenderIdResult); final EditText editPersonName = findViewById(R.id.editTextPersonName); // Google Calendar API を呼び出す buttonCreateNewCalender.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { buttonCreateNewCalender.setEnabled(false); textViewCalenderIdResult.setText(""); editTextCalenderId.setText(""); // get inputted personName personName = editPersonName.getText().toString(); if (personName.length() < 1) { personName = getString(R.string.default_person_name); } // create new calender task // personName is calender title getResultsFromApi(); buttonCreateNewCalender.setEnabled(true); } }); // Google Calendar API の呼び出し中を表す ProgressDialog を準備する mProgress = new ProgressDialog(this); mProgress.setMessage("Calling Google Calendar API ..."); // Google Calendar API の呼び出しのための認証情報を初期化する mCredential = GoogleAccountCredential.usingOAuth2( getApplicationContext(), Arrays.asList(SCOPES) ).setBackOff(new ExponentialBackOff()); // サムネイル選択 final Button buttonSelectImage = findViewById(R.id.buttonSelectImage); imageViewPersonImage = findViewById(R.id.imageViewPersonImage); textViewPersonImageResult = findViewById(R.id.textViewPersonImageResult); buttonSelectImage.setOnClickListener(new View.OnClickListener() { public void onClick(View v) { textViewPersonImageResult.setText(""); // ACTION_OPEN_DOCUMENT is the intent to choose a file via the system's file browser. Intent intent = new Intent(Intent.ACTION_OPEN_DOCUMENT); // Filter to only show results that can be "opened", such as a // file (as opposed to a list of contacts or timezones) intent.addCategory(Intent.CATEGORY_OPENABLE); // Filter to show only images, using the image MIME data type. // it would be "*/*". intent.setType("image/*"); startActivityForResult(intent, REQUEST_PICK_IMAGE_FILE); } }); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.personaddactivity_action_bar, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // get person info EditText editPersonName = findViewById(R.id.editTextPersonName); String personName = editPersonName.getText().toString(); EditText editPersonAddress = findViewById(R.id.editTextPersonAddress); String personAddress = editPersonAddress.getText().toString(); EditText editTextPersonDetail = findViewById(R.id.editTextPersonDetail); String personDetail = editTextPersonDetail.getText().toString(); EditText editTextCalenderId = findViewById(R.id.editTextCalenderId); String personCalenderId = editTextCalenderId.getText().toString(); String personThumbnail = textViewPersonImageResult.getText().toString(); db.saveData( personName, personDetail, personThumbnail, personAddress, personCalenderId, getString(R.string.default_person_status) ); // display toast Toast toast = Toast.makeText( this, R.string.finish_person_add_register, Toast.LENGTH_SHORT ); toast.show(); // Return HomeActivity and finish this activity Intent intent = new Intent( this, HomeActivity.class ); intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_TASK); startActivity(intent); return true; } /** * Google Calendar API の呼び出しの事前条件を確認し、条件を満たしていればAPIを呼び出す。 * <p> * 事前条件: * - 有効な Google Play Services がインストールされていること * - 有効な Google アカウントが選択されていること * - 端末がインターネット接続可能であること * <p> * 事前条件を満たしていない場合には、ユーザーに説明を表示する。 */ private void getResultsFromApi() { if (!isGooglePlayServicesAvailable()) { // Google Play Services が無効な場合 acquireGooglePlayServices(); } else if (mCredential.getSelectedAccountName() == null) { // 有効な Google アカウントが選択されていない場合 chooseAccount(); } else if (!isDeviceOnline()) { // 端末がインターネットに接続されていない場合 textViewCalenderIdResult.setText(getString(R.string.no_internet_connection_msg)); } else { new MakeRequestTask(mCredential, personName).execute(); } } /** * 端末に Google Play Services がインストールされ、アップデートされているか否かを確認する。 * * @return 利用可能な Google Play Services がインストールされ、アップデートされている場合にはtrueを、 * そうでない場合にはfalseを返す。 */ private boolean isGooglePlayServicesAvailable() { GoogleApiAvailability apiAvailability = GoogleApiAvailability.getInstance(); final int connectionStatusCode = apiAvailability.isGooglePlayServicesAvailable(this); return connectionStatusCode == ConnectionResult.SUCCESS; } /** * ユーザーにダイアログを表示して、Google Play Services を利用可能な状態に設定するように促す。 * ただし、ユーザーが解決できないようなエラーの場合には、ダイアログを表示しない。 */ private void acquireGooglePlayServices() { GoogleApiAvailability apiAvailability = GoogleApiAvailability.getInstance(); final int connectionStatusCode = apiAvailability.isGooglePlayServicesAvailable(this); if (apiAvailability.isUserResolvableError(connectionStatusCode)) { showGooglePlayServicesAvailabilityErrorDialog(connectionStatusCode); } } /** * 有効な Google Play Services が見つからないことをエラーダイアログで表示する。 * * @param connectionStatusCode Google Play Services が無効であることを示すコード */ void showGooglePlayServicesAvailabilityErrorDialog( final int connectionStatusCode ) { GoogleApiAvailability apiAvailability = GoogleApiAvailability.getInstance(); Dialog dialog = apiAvailability.getErrorDialog( this, connectionStatusCode, REQUEST_GOOGLE_PLAY_SERVICES ); dialog.show(); } /** * Google Calendar API の認証情報を使用するGoogleアカウントを設定する。 * <p> * 既にGoogleアカウント名が保存されていればそれを使用し、保存されていなければ、 * Googleアカウントの選択ダイアログを表示する。 * <p> * 認証情報を用いたGoogleアカウントの設定には、"GET_ACCOUNTS"パーミッションを * 必要とするため、必要に応じてユーザーに"GET_ACCOUNTS"パーミッションを要求する * ダイアログが表示する。 */ @AfterPermissionGranted(REQUEST_PERMISSION_GET_ACCOUNTS_ERR_CODE) private void chooseAccount() { // "GET_ACCOUNTS"パーミッションを取得済みか確認する if (EasyPermissions.hasPermissions(this, Manifest.permission.GET_ACCOUNTS)) { // SharedPreferencesから保存済みGoogleアカウントを取得する String accountName = getPreferences(Context.MODE_PRIVATE) .getString(PREF_ACCOUNT_NAME, null); if (accountName != null) { mCredential.setSelectedAccountName(accountName); getResultsFromApi(); } else { // Googleアカウントの選択を表示する // GoogleAccountCredentialのアカウント選択画面を使用する startActivityForResult( mCredential.newChooseAccountIntent(), REQUEST_ACCOUNT_PICKER_ERR_CODE); } } else { // ダイアログを表示して、ユーザーに"GET_ACCOUNTS"パーミッションを要求する EasyPermissions.requestPermissions( this, "This app needs to access your Google account (via Contacts).", REQUEST_PERMISSION_GET_ACCOUNTS_ERR_CODE, Manifest.permission.GET_ACCOUNTS); } } /** * 呼び出し先のActivityから戻ってきた際に呼び出される。 * REQUEST_GOOGLE_PLAY_SERVICES: google calender api * REQUEST_ACCOUNT_PICKER_ERR_CODE: google calender api * REQUEST_AUTHORIZATION_ERR_CODE: google calender api * REQUEST_PICK_IMAGE_FILE: person image (thumbnail) select * * @param requestCode Activityの呼び出し時に指定したコード * @param resultCode 呼び出し先のActivityでの処理結果を表すコード * @param data 呼び出し先のActivityでの処理結果のデータ */ @Override protected void onActivityResult( int requestCode, int resultCode, Intent data ) { super.onActivityResult(requestCode, resultCode, data); switch (requestCode) { // google calender api case REQUEST_GOOGLE_PLAY_SERVICES: if (resultCode != RESULT_OK) { textViewCalenderIdResult.setText( getString(R.string.install_google_play_service_msg) ); } else { getResultsFromApi(); } break; // google calender api case REQUEST_ACCOUNT_PICKER_ERR_CODE: if (resultCode == RESULT_OK && data != null && data.getExtras() != null) { String accountName = data.getStringExtra(AccountManager.KEY_ACCOUNT_NAME); if (accountName != null) { SharedPreferences settings = getPreferences(Context.MODE_PRIVATE); SharedPreferences.Editor editor = settings.edit(); editor.putString(PREF_ACCOUNT_NAME, accountName); editor.apply(); mCredential.setSelectedAccountName(accountName); getResultsFromApi(); } } break; // google calender api case REQUEST_AUTHORIZATION_ERR_CODE: if (resultCode == RESULT_OK) { getResultsFromApi(); } break; // person image case REQUEST_PICK_IMAGE_FILE: if (resultCode == Activity.RESULT_OK && data.getData() != null) { Uri uri = data.getData(); try { Bitmap bitmap = MediaStore.Images.Media.getBitmap(getContentResolver(), uri); imageViewPersonImage.setImageBitmap(bitmap); textViewPersonImageResult.setText(uri.toString()); } catch (IOException e) { e.printStackTrace(); } } } } /** * Android 6.0 (API 23) 以降にて、実行時にパーミッションを要求した際の結果を受け取る。 * * @param requestCode requestPermissions(android.app.Activity, String, int, String[]) * を呼び出した際に渡した request code * @param permissions 要求したパーミッションの一覧 * @param grantResults 要求したパーミッションに対する承諾結果の配列 * PERMISSION_GRANTED または PERMISSION_DENIED が格納される。 */ @Override public void onRequestPermissionsResult( int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults ) { super.onRequestPermissionsResult(requestCode, permissions, grantResults); EasyPermissions.onRequestPermissionsResult(requestCode, permissions, grantResults, this); } /** * 要求したパーミッションがユーザーに承諾された際に、EasyPermissionsライブラリから呼び出される。 * * @param requestCode 要求したパーミッションに関連した request code * @param list 要求したパーミッションのリスト */ @Override public void onPermissionsGranted(int requestCode, List<String> list) { // 何もしない } /** * 要求したパーミッションがユーザーに拒否された際に、EasyPermissionsライブラリから呼び出される。 * * @param requestCode 要求したパーミッションに関連した request code * @param list 要求したパーミッションのリスト */ @Override public void onPermissionsDenied(int requestCode, List<String> list) { // 何もしない } /** * 現在、端末がネットワークに接続されているかを確認する。 * * @return ネットワークに接続されている場合にはtrueを、そうでない場合にはfalseを返す。 */ private boolean isDeviceOnline() { ConnectivityManager connMgr = (ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE); NetworkInfo networkInfo = connMgr.getActiveNetworkInfo(); return (networkInfo != null && networkInfo.isConnected()); } /** * 非同期で Google Calendar API の呼び出しを行うクラス。 */ private class MakeRequestTask extends AsyncTask<Void, Void, String> { String calenderTitle; private com.google.api.services.calendar.Calendar mService = null; private Exception mLastError = null; public MakeRequestTask( GoogleAccountCredential credential, String calenderTitile ) { HttpTransport transport = AndroidHttp.newCompatibleTransport(); JsonFactory jsonFactory = JacksonFactory.getDefaultInstance(); mService = new com.google.api.services.calendar.Calendar .Builder(transport, jsonFactory, credential) .setApplicationName("Google Calendar API Android Quickstart") .build(); this.calenderTitle = calenderTitile; } /** * Google Calendar API を呼び出すバックグラウンド処理。 * * @param params 引数は不要 */ @Override protected String doInBackground(Void... params) { try { return createCalendar(); } catch (Exception e) { mLastError = e; cancel(true); return null; } } /** * 選択されたGoogleアカウントに対して、新規にカレンダーを追加する。 * * @return 作成したカレンダーのID * @throws IOException */ private String createCalendar() throws IOException { // 新規にカレンダーを作成する com.google.api.services.calendar.model.Calendar calendar = new Calendar(); // カレンダーにタイトルを設定する calendar.setSummary(this.calenderTitle); // カレンダーにタイムゾーンを設定する calendar.setTimeZone("Asia/Tokyo"); // 作成したカレンダーをGoogleカレンダーに追加する Calendar createdCalendar = mService.calendars().insert(calendar).execute(); String calendarId = createdCalendar.getId(); // カレンダー一覧から新規に作成したカレンダーのエントリを取得する CalendarListEntry calendarListEntry = mService.calendarList().get(calendarId).execute(); // カレンダーのデフォルトの背景色を設定する calendarListEntry.setBackgroundColor("#ff0000"); // カレンダーのデフォルトの背景色をGoogleカレンダーに反映させる CalendarListEntry updatedCalendarListEntry = mService.calendarList() .update(calendarListEntry.getId(), calendarListEntry) .setColorRgbFormat(true) .execute(); // 新規に作成したカレンダーのIDを返却する return calendarId; } @Override protected void onPreExecute() { textViewCalenderIdResult.setText(""); mProgress.show(); } @Override protected void onPostExecute(String output) { if (mProgress != null && mProgress.isShowing()) { mProgress.dismiss(); } if (output == null || output.isEmpty()) { textViewCalenderIdResult.setText(getString(R.string.err_create_calender_msg)); } else { textViewCalenderIdResult.setText(getString(R.string.result_create_calender)); editTextCalenderId.setText(output); } } @Override protected void onCancelled() { if (mProgress != null && mProgress.isShowing()) { mProgress.dismiss(); } if (mLastError != null) { if (mLastError instanceof GooglePlayServicesAvailabilityIOException) { showGooglePlayServicesAvailabilityErrorDialog( ((GooglePlayServicesAvailabilityIOException) mLastError) .getConnectionStatusCode()); } else if (mLastError instanceof UserRecoverableAuthIOException) { startActivityForResult( ((UserRecoverableAuthIOException) mLastError).getIntent(), REQUEST_AUTHORIZATION_ERR_CODE); } else { textViewCalenderIdResult.setText("The following error occurred:\n" + mLastError.getMessage()); } } else { textViewCalenderIdResult.setText(getString(R.string.cancelled_create_calender)); } } } }
package org.fenixedu.notifications.core.json; import org.fenixedu.bennu.core.json.JsonBuilder; import org.fenixedu.bennu.core.json.JsonCreator; import org.fenixedu.notifications.core.domain.Message; import org.fenixedu.notifications.core.service.MessageService; import com.google.gson.JsonElement; import com.google.gson.JsonObject; public class MessageJsonCreator implements JsonCreator<Message> { @Override public Message create(JsonElement json, JsonBuilder ctx) { JsonObject jsonObject = json.getAsJsonObject(); String from = jsonObject.get("from").getAsString(); String to = jsonObject.get("to").getAsString(); String text = jsonObject.get("text").getAsString(); return MessageService.create(from, to, text); } }
package 工厂模式; public class VeggiePizza extends Pizza { public void prepare(){ System.out.println("prepare VeggiePizza"); } public void bake(){ System.out.println("bake VeggiePizza"); } public void cut(){ System.out.println("cut VeggiePizza"); } public void box(){ System.out.println("box VeggiePizza"); } }
package seedu.zerotoone; import java.io.IOException; import java.nio.file.Path; import java.util.Optional; import java.util.logging.Logger; import javafx.application.Application; import javafx.application.Preloader.ProgressNotification; import javafx.application.Preloader.StateChangeNotification; import javafx.stage.Stage; import seedu.zerotoone.commons.core.Config; import seedu.zerotoone.commons.core.LogsCenter; import seedu.zerotoone.commons.core.Version; import seedu.zerotoone.commons.exceptions.DataConversionException; import seedu.zerotoone.commons.util.ConfigUtil; import seedu.zerotoone.commons.util.StringUtil; import seedu.zerotoone.logic.Logic; import seedu.zerotoone.logic.LogicManager; import seedu.zerotoone.model.Model; import seedu.zerotoone.model.ModelManager; import seedu.zerotoone.model.exercise.ExerciseList; import seedu.zerotoone.model.exercise.ReadOnlyExerciseList; import seedu.zerotoone.model.log.LogList; import seedu.zerotoone.model.log.ReadOnlyLogList; import seedu.zerotoone.model.schedule.ScheduleList; import seedu.zerotoone.model.userprefs.ReadOnlyUserPrefs; import seedu.zerotoone.model.userprefs.UserPrefs; import seedu.zerotoone.model.util.SampleExerciseDataUtil; import seedu.zerotoone.model.util.SampleLogDataUtil; import seedu.zerotoone.model.util.SampleScheduleDataUtil; import seedu.zerotoone.model.util.SampleWorkoutDataUtil; import seedu.zerotoone.model.workout.ReadOnlyWorkoutList; import seedu.zerotoone.model.workout.WorkoutList; import seedu.zerotoone.storage.Storage; import seedu.zerotoone.storage.StorageManager; import seedu.zerotoone.storage.exercise.ExerciseListStorage; import seedu.zerotoone.storage.exercise.ExerciseListStorageManager; import seedu.zerotoone.storage.log.LogListStorage; import seedu.zerotoone.storage.log.LogListStorageManager; import seedu.zerotoone.storage.schedule.ScheduleListStorage; import seedu.zerotoone.storage.schedule.ScheduleListStorageManager; import seedu.zerotoone.storage.userprefs.UserPrefsStorage; import seedu.zerotoone.storage.userprefs.UserPrefsStorageManager; import seedu.zerotoone.storage.workout.WorkoutListStorage; import seedu.zerotoone.storage.workout.WorkoutListStorageManager; import seedu.zerotoone.ui.Ui; import seedu.zerotoone.ui.UiManager; /** * Runs the application. */ public class MainApp extends Application { public static final Version VERSION = new Version(1, 4, 0, true); private static final Logger logger = LogsCenter.getLogger(MainApp.class); protected Ui ui; protected Logic logic; protected Storage storage; protected Model model; protected Config config; private final int totalNumSteps = 13; private int numCompletedSteps = 0; @Override public void init() throws Exception { logger.info("=============================[ Initializing ZeroToOne ]==========================="); super.init(); // ----------------------------------------------------------------------------------------- // Common AppParameters appParameters = AppParameters.parse(getParameters()); increaseProgress(); config = initConfig(appParameters.getConfigPath()); increaseProgress(); UserPrefsStorage userPrefsStorage = new UserPrefsStorageManager(config.getUserPrefsFilePath()); increaseProgress(); UserPrefs userPrefs = initPrefs(userPrefsStorage); increaseProgress(); initLogging(config); increaseProgress(); // ----------------------------------------------------------------------------------------- // Exercise List ExerciseListStorage exerciseListStorage = new ExerciseListStorageManager(userPrefs.getExerciseListFilePath()); increaseProgress(); // Workout List WorkoutListStorage workoutListStorage = new WorkoutListStorageManager(userPrefs.getWorkoutListFilePath()); increaseProgress(); // Schedule ScheduleListStorage scheduleListStorage = new ScheduleListStorageManager(userPrefs.getScheduleListFilePath()); increaseProgress(); // Log LogListStorage logListStorage = new LogListStorageManager(userPrefs.getLogListFilePath()); increaseProgress(); // ----------------------------------------------------------------------------------------- // Common storage = new StorageManager(userPrefsStorage, exerciseListStorage, workoutListStorage, scheduleListStorage, logListStorage); increaseProgress(); model = initModelManager(storage, userPrefs); increaseProgress(); logic = new LogicManager(model, storage); increaseProgress(); ui = new UiManager(logic); increaseProgress(); } private void increaseProgress() { this.numCompletedSteps++; notifyPreloader(new ProgressNotification((double) this.numCompletedSteps / totalNumSteps)); } /** * Returns a {@code ModelManager} with the data from {@code storage}'s ZeroToOne storage * and {@code userPrefs}. <br> The data from the sample ZeroToOne storage will * be used instead if {@code storage}'s ZeroToOne storage is not found, or an empty ZeroToOne * storage will be used instead if errors occur when reading {@code storage}'s ZeroToOne storage. */ private Model initModelManager(Storage storage, ReadOnlyUserPrefs userPrefs) { Optional<ReadOnlyExerciseList> exerciseListOptional; ReadOnlyExerciseList initialExerciseListData; Optional<ScheduleList> scheduleListOptional; ScheduleList initialScheduleListData; Optional<ReadOnlyWorkoutList> workoutListOptional; ReadOnlyWorkoutList initialWorkoutListData; Optional<ReadOnlyLogList> logListOptional; ReadOnlyLogList initialLogListData; // ----------------------------------------------------------------------------------------- // Exercise List try { exerciseListOptional = storage.readExerciseList(); if (!exerciseListOptional.isPresent()) { logger.info("Data file not found. Will be starting with a sample ExerciseList"); } initialExerciseListData = exerciseListOptional.orElseGet(SampleExerciseDataUtil::getSampleExerciseList); } catch (DataConversionException e) { logger.warning("Data file not in the correct format. Will be starting with an empty ExerciseList"); initialExerciseListData = new ExerciseList(); } catch (IOException e) { logger.warning("Problem while reading from the file. Will be starting with an empty ExerciseList"); initialExerciseListData = new ExerciseList(); } // Workout List try { workoutListOptional = storage.readWorkoutList(); if (!workoutListOptional.isPresent()) { logger.info("Data file not found. Will be starting with a sample WorkoutList"); } initialWorkoutListData = workoutListOptional.orElseGet(SampleWorkoutDataUtil::getSampleWorkoutList); } catch (DataConversionException e) { logger.warning("Data file not in the correct format. Will be starting with an empty WorkoutList"); initialWorkoutListData = new WorkoutList(); } catch (IOException e) { logger.warning("Problem while reading from the file. Will be starting with an empty WorkoutList"); initialWorkoutListData = new WorkoutList(); } // Schedule List try { scheduleListOptional = storage.readScheduleList(); if (scheduleListOptional.isEmpty()) { logger.info("Data file not found. Will be starting with an empty ScheduleList"); } initialScheduleListData = scheduleListOptional.orElseGet(SampleScheduleDataUtil::getSampleScheduleList); } catch (DataConversionException e) { logger.warning("Data file not in the correct format. Will be starting with an empty ScheduleList"); initialScheduleListData = new ScheduleList(); } catch (IOException e) { logger.warning("Problem while reading from the file. Will be starting with an empty ScheduleList"); initialScheduleListData = new ScheduleList(); } // Log List try { logListOptional = storage.readLogList(); if (logListOptional.isEmpty()) { logger.info("Data file not found. Will be starting with a sample ExerciseList"); } initialLogListData = logListOptional.orElseGet(SampleLogDataUtil::getSampleLogList); } catch (DataConversionException e) { logger.warning("Data file not in the correct format. Will be starting with an empty LogList"); initialLogListData = new LogList(); } catch (IOException e) { logger.warning("Problem while reading from the file. Will be starting with an empty LogList"); initialLogListData = new LogList(); } return new ModelManager(userPrefs, initialExerciseListData, initialWorkoutListData, initialScheduleListData, initialLogListData); } private void initLogging(Config config) { LogsCenter.init(config); } /** * Returns a {@code Config} using the file at {@code configFilePath}. <br> * The default file path {@code Config#DEFAULT_CONFIG_FILE} will be used instead * if {@code configFilePath} is null. */ protected Config initConfig(Path configFilePath) { Config initializedConfig; Path configFilePathUsed; configFilePathUsed = Config.DEFAULT_CONFIG_FILE; if (configFilePath != null) { logger.info("Custom Config file specified " + configFilePath); configFilePathUsed = configFilePath; } logger.info("Using config file : " + configFilePathUsed); try { Optional<Config> configOptional = ConfigUtil.readConfig(configFilePathUsed); initializedConfig = configOptional.orElse(new Config()); } catch (DataConversionException e) { logger.warning("Config file at " + configFilePathUsed + " is not in the correct format. " + "Using default config properties"); initializedConfig = new Config(); } //Update config file in case it was missing to begin with or there are new/unused fields try { ConfigUtil.saveConfig(initializedConfig, configFilePathUsed); } catch (IOException e) { logger.warning("Failed to save config file : " + StringUtil.getDetails(e)); } return initializedConfig; } /** * Returns a {@code UserPrefs} using the file at {@code storage}'s user prefs file path, * or a new {@code UserPrefs} with default configuration if errors occur when * reading from the file. */ protected UserPrefs initPrefs(UserPrefsStorage storage) { Path prefsFilePath = storage.getUserPrefsFilePath(); logger.info("Using prefs file : " + prefsFilePath); UserPrefs initializedPrefs; try { Optional<UserPrefs> prefsOptional = storage.readUserPrefs(); initializedPrefs = prefsOptional.orElse(new UserPrefs()); } catch (DataConversionException e) { logger.warning("UserPrefs file at " + prefsFilePath + " is not in the correct format. " + "Using default user prefs"); initializedPrefs = new UserPrefs(); } catch (IOException e) { logger.warning("Problem while reading from the file. Will be starting with an empty ExerciseList"); initializedPrefs = new UserPrefs(); } //Update prefs file in case it was missing to begin with or there are new/unused fields try { storage.saveUserPrefs(initializedPrefs); } catch (IOException e) { logger.warning("Failed to save config file : " + StringUtil.getDetails(e)); } return initializedPrefs; } @Override public void start(Stage primaryStage) { logger.info("Starting ZeroToOne " + MainApp.VERSION); notifyPreloader(new StateChangeNotification(StateChangeNotification.Type.BEFORE_START)); ui.start(primaryStage); } @Override public void stop() { logger.info("============================ [ Stopping ZeroToOne ] ============================="); try { storage.saveUserPrefs(model.getUserPrefs()); } catch (IOException e) { logger.severe("Failed to save preferences " + StringUtil.getDetails(e)); } } }
package org.firstinspires.ftc.teamcode.thirdWheel.auto; import com.qualcomm.robotcore.eventloop.opmode.Autonomous; import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; import com.qualcomm.robotcore.util.ElapsedTime; import org.firstinspires.ftc.teamcode.hardware.Gyro; import org.firstinspires.ftc.teamcode.thirdWheel.hardware.DriveThirdWheel; import org.firstinspires.ftc.teamcode.thirdWheel.hardware.lift.Lift; @Autonomous(name="ThirdWheel InnerRed", group="ThirdWheel") public class InnerRed extends LinearOpMode { @Override public void runOpMode() throws InterruptedException { DriveThirdWheel drive = new DriveThirdWheel(this, hardwareMap); Lift lift = new Lift(this, hardwareMap); // Elapsed time for timed motion ElapsedTime runtime = new ElapsedTime(); // Send telemetry message to signify robot waiting; telemetry.addData("Status", "Ready to run"); telemetry.update(); waitForStart(); if (isStopRequested()) return; while (runtime.seconds() < 3) { lift.override(1, -1); lift.assist(); drive.setPowers(0.5, -0.6, -0.6, 0.5); } drive.stop(); lift.override(0, -1); lift.assist(); } }
/* Copyright 2015, 2016 Tremolo Security, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.tremolosecurity.provisioning.core.providers; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.List; import org.apache.http.HttpResponse; import org.apache.http.NameValuePair; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.params.ClientPNames; import org.apache.http.conn.scheme.Scheme; import org.apache.http.cookie.Cookie; import org.apache.http.cookie.CookieOrigin; import org.apache.http.cookie.CookieSpec; import org.apache.http.cookie.CookieSpecFactory; import org.apache.http.cookie.MalformedCookieException; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; import org.apache.http.impl.cookie.BrowserCompatSpec; import org.apache.http.message.BasicNameValuePair; import org.apache.http.params.HttpParams; import org.apache.logging.log4j.Logger; import com.google.gson.Gson; import com.tremolosecurity.config.util.ConfigManager; import com.tremolosecurity.provisioning.core.ProvisioningException; import com.tremolosecurity.provisioning.core.ProvisioningParams; import com.tremolosecurity.provisioning.core.User; import com.tremolosecurity.provisioning.core.UserStoreProvider; import com.tremolosecurity.provisioning.service.util.ProvisioningResult; import com.tremolosecurity.provisioning.service.util.TremoloUser; import com.tremolosecurity.provisioning.service.util.WFCall; import com.tremolosecurity.saml.Attribute; public class TremoloTarget implements UserStoreProvider { static Logger logger = org.apache.logging.log4j.LogManager.getLogger(TremoloTarget.class.getName()); String createUserWF; String setUserPasswordWF; String syncUserWF; String deleteUserWF; String wfUrlBase; String uidAttrName; ConfigManager cfgMgr; int port; private String name; private PoolingHttpClientConnectionManager phcm; private CloseableHttpClient httpclient; @Override public void createUser(User user, Set<String> attributes,Map<String,Object> request) throws ProvisioningException { this.executeWorkFlow(this.createUserWF, user, attributes,request); } @Override public void setUserPassword(User user,Map<String,Object> request) throws ProvisioningException { this.executeWorkFlow(this.setUserPasswordWF, user, new HashSet<String>(),request); } @Override public void syncUser(User user, boolean addOnly, Set<String> attributes,Map<String,Object> request) throws ProvisioningException { this.executeWorkFlow(this.syncUserWF, user, attributes,request); } @Override public void deleteUser(User user,Map<String,Object> request) throws ProvisioningException { this.executeWorkFlow(this.deleteUserWF, user, new HashSet<String>(),request); } @Override public User findUser(String userID, Set<String> attributes,Map<String,Object> request) throws ProvisioningException { StringBuffer sbUrl = new StringBuffer(); sbUrl.append(this.wfUrlBase).append("/services/wf/search?uid=").append(userID); HttpGet httpget = new HttpGet(sbUrl.toString()); try { HttpResponse response = httpclient.execute(httpget); BufferedReader in = new BufferedReader(new InputStreamReader(response.getEntity().getContent())); String line = null; StringBuffer json = new StringBuffer(); while ((line = in.readLine()) != null) { json.append(line); } Gson gson = new Gson(); TremoloUser tuser = gson.fromJson(json.toString(), TremoloUser.class); User toret = new User(tuser.getUid()); for (Attribute attr : tuser.getAttributes()) { if (attributes.contains(attr.getName())) { toret.getAttribs().put(attr.getName(), attr); } } httpget.abort(); return toret; } catch (Exception e) { throw new ProvisioningException("Could not find user",e); } finally { httpget.releaseConnection(); } } @Override public void init(Map<String, Attribute> cfg, ConfigManager cfgMgr,String name) throws ProvisioningException { this.name = name; this.cfgMgr = cfgMgr; if (cfg.get("createUsersWF") == null) { throw new ProvisioningException("Create user workflow not specified"); } this.createUserWF = cfg.get("createUsersWF").getValues().get(0); if (cfg.get("deleteUserWF") == null) { throw new ProvisioningException("Delete user workflow not specified"); } this.deleteUserWF = cfg.get("deleteUserWF").getValues().get(0); if (cfg.get("setUserPasswordWF") == null) { throw new ProvisioningException("Set user password workflow not specified"); } this.setUserPasswordWF = cfg.get("setUserPasswordWF").getValues().get(0); if (cfg.get("syncUserWF") == null) { throw new ProvisioningException("Synchronize user workflow not specified"); } this.syncUserWF = cfg.get("syncUserWF").getValues().get(0); if (cfg.get("uidAttrName") == null) { throw new ProvisioningException("User identifier attribute name not found"); } this.uidAttrName = cfg.get("uidAttrName").getValues().get(0); if (cfg.get("wfUrlBase") == null) { throw new ProvisioningException("WorkflowImpl URL base not specified"); } this.wfUrlBase = cfg.get("wfUrlBase").getValues().get(0); try { URL url = new URL(this.wfUrlBase); if (url.getPort() > 0) { this.port = url.getPort(); } else { this.port = 443; } } catch (MalformedURLException e) { throw new ProvisioningException("Could not configure target",e); } phcm = new PoolingHttpClientConnectionManager(cfgMgr.getHttpClientSocketRegistry()); httpclient = HttpClients.custom().setConnectionManager(phcm).build(); } private void executeWorkFlow(String wfName,User user,Set<String> attributes,Map<String,Object> request) throws ProvisioningException { StringBuffer surl = new StringBuffer(); surl.append(this.wfUrlBase).append("/services/wf/login"); HttpGet get = new HttpGet(surl.toString()); try { try { httpclient.execute(get); } catch (ClientProtocolException e1) { } catch (IOException e1) { } } finally { get.releaseConnection(); } surl.setLength(0); surl.append(this.wfUrlBase).append("/services/wf/execute"); HttpPost post = new HttpPost(surl.toString()); try { TremoloUser tu = new TremoloUser(); tu.setAttributes(new ArrayList<Attribute>()); tu.setUid(user.getUserID()); tu.setUserPassword(user.getPassword()); for (String attrName : user.getAttribs().keySet()) { Attribute attr = user.getAttribs().get(attrName); if (attributes.size() == 0 || attributes.contains(attrName)) { tu.getAttributes().add(attr); } } WFCall wfcall = new WFCall(); wfcall.setName(wfName); wfcall.setUidAttributeName(this.uidAttrName); wfcall.setUser(tu); wfcall.setRequestParams(new HashMap<String,Object>()); wfcall.getRequestParams().put(ProvisioningParams.UNISON_EXEC_TYPE, ProvisioningParams.UNISON_EXEC_SYNC); Gson gson = new Gson(); String jsonOut = gson.toJson(wfcall); List<NameValuePair> formparams = new ArrayList<NameValuePair>(); formparams.add(new BasicNameValuePair("wfcall", jsonOut)); UrlEncodedFormEntity entity = new UrlEncodedFormEntity(formparams, "UTF-8"); post.setEntity(entity); HttpResponse response = httpclient.execute(post); BufferedReader in = new BufferedReader(new InputStreamReader(response.getEntity().getContent())); String line = null; StringBuffer res = new StringBuffer(); while ((line = in.readLine()) != null) { //System.out.println(line); res.append(line).append('\n'); } ProvisioningResult provRes = gson.fromJson(res.toString(), ProvisioningResult.class); if (! provRes.isSuccess()) { throw new ProvisioningException(provRes.getError().getError()); } } catch (Exception e) { throw new ProvisioningException("Could not execute workflow",e); } finally { post.releaseConnection(); } } @Override public void shutdown() throws ProvisioningException { phcm.close(); try { httpclient.close(); } catch (IOException e) { logger.warn("Error shutting down",e); } } }
package org.basex.query.expr; import org.basex.query.value.node.*; import org.basex.util.*; /** * Expression information, used for debugging and logging. * * @author BaseX Team 2005-16, BSD License * @author Christian Gruen */ public abstract class ExprInfo { /** * Returns a string description of the expression. This method is only * called by error messages. Contrary to the {@link #toString()} method, * arguments are not included in the output. * @return result of check */ public String description() { return Token.string(info()) + " expression"; } /** * Returns the simplified class name. * @return class name */ private byte[] info() { return Token.token(Util.className(this)); } /** * Returns a string representation of the expression that can be embedded in error messages. * Defaults to {@link #toString()}. * @return class name */ public String toErrorString() { return toString(); } @Override public abstract String toString(); /** * Creates an expression tree. * @param e root element */ public abstract void plan(FElem e); /** * Creates a new element node to be added to the expression tree. * @param atts optional attribute names and values * @return tree node */ protected FElem planElem(final Object... atts) { final FElem el = new FElem(info()); final int al = atts.length; for(int a = 0; a < al - 1; a += 2) { if(atts[a + 1] != null) el.add(planAttr(atts[a], atts[a + 1])); } return el; } /** * Adds trees of the specified expressions to the root node. * @param plan root node * @param el new element * @param exprs expressions */ protected void addPlan(final FElem plan, final FElem el, final Object... exprs) { plan.add(el); for(final Object expr : exprs) { if(expr instanceof ExprInfo) { ((ExprInfo) expr).plan(el); } else if(expr instanceof ExprInfo[]) { for(final ExprInfo ex : (ExprInfo[]) expr) { if(ex != null) ex.plan(el); } } else if(expr instanceof byte[]) { el.add((byte[]) expr); } else if(expr != null) { el.add(expr.toString()); } } } /** * Adds trees of the specified expressions to the root node. * @param plan root node * @param el new element * @param expr expressions */ protected void addPlan(final FElem plan, final FElem el, final ExprInfo... expr) { addPlan(plan, el, (Object) expr); } /** * Creates a new attribute to be added to the expression tree. * @param name name of attribute * @param value value of attribute * @return tree node */ protected FAttr planAttr(final Object name, final Object value) { return new FAttr(Util.inf(name), Util.inf(value)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.optimizer.physical; import static org.apache.hadoop.hive.ql.plan.ReduceSinkDesc.ReducerTraits.UNIFORM; import java.io.IOException; import java.lang.annotation.Annotation; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.IdentityHashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.Set; import java.util.Stack; import java.util.TreeSet; import java.util.regex.Pattern; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.hadoop.hive.ql.exec.vector.VectorizedInputFormatInterface; import org.apache.hadoop.hive.ql.exec.vector.expressions.ConvertDecimal64ToDecimal; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorCoalesce; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DecimalColDivideDecimalScalar; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinAntiJoinLongOperator; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinAntiJoinMultiKeyOperator; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinAntiJoinStringOperator; import org.apache.hadoop.hive.ql.exec.vector.reducesink.*; import org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFArgDesc; import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.parse.spark.SparkPartitionPruningSinkOperator; import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe2; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.llap.io.api.LlapProxy; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.exec.*; import org.apache.hadoop.hive.ql.exec.mr.MapRedTask; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinKey; import org.apache.hadoop.hive.ql.exec.spark.SparkTask; import org.apache.hadoop.hive.ql.exec.tez.TezTask; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; import org.apache.hadoop.hive.ql.exec.vector.filesink.VectorFileSinkArrowOperator; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerBigOnlyLongOperator; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerBigOnlyMultiKeyOperator; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerBigOnlyStringOperator; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerLongOperator; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerMultiKeyOperator; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerStringOperator; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinLeftSemiLongOperator; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinLeftSemiMultiKeyOperator; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinLeftSemiStringOperator; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinOuterLongOperator; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinOuterMultiKeyOperator; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinOuterStringOperator; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinFullOuterLongOperator; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinFullOuterMultiKeyOperator; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinFullOuterStringOperator; import org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFOperator; import org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFAdaptor; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector.Type; import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationDesc; import org.apache.hadoop.hive.ql.exec.vector.VectorColumnOutputMapping; import org.apache.hadoop.hive.ql.exec.vector.VectorColumnSourceMapping; import org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOperator; import org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOuterFilteredOperator; import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; import org.apache.hadoop.hive.ql.exec.vector.VectorizationOperator; import org.apache.hadoop.hive.ql.exec.vector.VectorizedUDAFs; import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext.HiveVectorAdaptorUsageMode; import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext.InConstantType; import org.apache.hadoop.hive.ql.exec.vector.VectorizationContextRegion; import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport.Support; import org.apache.hadoop.hive.ql.exec.vector.expressions.IdentityExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression; import org.apache.hadoop.hive.ql.io.NullRowsInputFormat; import org.apache.hadoop.hive.ql.io.OneNullRowInputFormat; import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; import org.apache.hadoop.hive.ql.io.ZeroRowsInputFormat; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx; import org.apache.hadoop.hive.ql.lib.SemanticDispatcher; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.lib.TaskGraphWalker; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.VirtualColumn; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowType; import org.apache.hadoop.hive.ql.plan.AbstractOperatorDesc; import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.plan.AppMasterEventDesc; import org.apache.hadoop.hive.ql.plan.BaseWork; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc.ExprNodeDescEqualityWrapper; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; import org.apache.hadoop.hive.ql.plan.FileSinkDesc; import org.apache.hadoop.hive.ql.plan.FilterDesc; import org.apache.hadoop.hive.ql.plan.GroupByDesc; import org.apache.hadoop.hive.ql.plan.JoinDesc; import org.apache.hadoop.hive.ql.plan.LimitDesc; import org.apache.hadoop.hive.ql.plan.MapJoinDesc; import org.apache.hadoop.hive.ql.plan.MapWork; import org.apache.hadoop.hive.ql.plan.MapredWork; import org.apache.hadoop.hive.ql.plan.MergeJoinWork; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.PTFDesc; import org.apache.hadoop.hive.ql.plan.SelectDesc; import org.apache.hadoop.hive.ql.plan.TopNKeyDesc; import org.apache.hadoop.hive.ql.plan.VectorAppMasterEventDesc; import org.apache.hadoop.hive.ql.plan.VectorDesc; import org.apache.hadoop.hive.ql.plan.VectorFileSinkDesc; import org.apache.hadoop.hive.ql.plan.VectorFilterDesc; import org.apache.hadoop.hive.ql.plan.VectorPTFDesc; import org.apache.hadoop.hive.ql.plan.VectorPTFInfo; import org.apache.hadoop.hive.ql.plan.VectorPTFDesc.SupportedFunctionType; import org.apache.hadoop.hive.ql.plan.VectorTableScanDesc; import org.apache.hadoop.hive.ql.plan.VectorGroupByDesc.ProcessingMode; import org.apache.hadoop.hive.ql.plan.VectorSparkHashTableSinkDesc; import org.apache.hadoop.hive.ql.plan.VectorSparkPartitionPruningSinkDesc; import org.apache.hadoop.hive.ql.plan.VectorTopNKeyDesc; import org.apache.hadoop.hive.ql.plan.VectorLimitDesc; import org.apache.hadoop.hive.ql.plan.VectorMapJoinInfo; import org.apache.hadoop.hive.ql.plan.VectorSMBJoinDesc; import org.apache.hadoop.hive.ql.plan.PartitionDesc; import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc; import org.apache.hadoop.hive.ql.plan.ReduceWork; import org.apache.hadoop.hive.ql.plan.SMBJoinDesc; import org.apache.hadoop.hive.ql.plan.SparkHashTableSinkDesc; import org.apache.hadoop.hive.ql.optimizer.spark.SparkPartitionPruningSinkDesc; import org.apache.hadoop.hive.ql.plan.SparkWork; import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.plan.TableScanDesc; import org.apache.hadoop.hive.ql.plan.TezWork; import org.apache.hadoop.hive.ql.plan.VectorGroupByDesc; import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc; import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.HashTableImplementationType; import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.HashTableKeyType; import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.HashTableKind; import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.VectorMapJoinVariation; import org.apache.hadoop.hive.ql.plan.VectorPartitionDesc.VectorDeserializeType; import org.apache.hadoop.hive.ql.plan.VectorReduceSinkDesc; import org.apache.hadoop.hive.ql.plan.VectorReduceSinkInfo; import org.apache.hadoop.hive.ql.plan.VectorPartitionDesc; import org.apache.hadoop.hive.ql.plan.VectorSelectDesc; import org.apache.hadoop.hive.ql.plan.api.OperatorType; import org.apache.hadoop.hive.ql.plan.mapper.PlanMapper; import org.apache.hadoop.hive.ql.plan.ptf.OrderExpressionDef; import org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef; import org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef; import org.apache.hadoop.hive.ql.plan.ptf.WindowFrameDef; import org.apache.hadoop.hive.ql.plan.ptf.WindowFunctionDef; import org.apache.hadoop.hive.ql.plan.ptf.WindowTableFunctionDef; import org.apache.hadoop.hive.ql.udf.UDFAcos; import org.apache.hadoop.hive.ql.udf.UDFAsin; import org.apache.hadoop.hive.ql.udf.UDFAtan; import org.apache.hadoop.hive.ql.udf.UDFBin; import org.apache.hadoop.hive.ql.udf.UDFConv; import org.apache.hadoop.hive.ql.udf.UDFCos; import org.apache.hadoop.hive.ql.udf.UDFDayOfMonth; import org.apache.hadoop.hive.ql.udf.UDFDayOfWeek; import org.apache.hadoop.hive.ql.udf.UDFDegrees; import org.apache.hadoop.hive.ql.udf.UDFExp; import org.apache.hadoop.hive.ql.udf.UDFHex; import org.apache.hadoop.hive.ql.udf.UDFHour; import org.apache.hadoop.hive.ql.udf.UDFLike; import org.apache.hadoop.hive.ql.udf.UDFLn; import org.apache.hadoop.hive.ql.udf.UDFLog; import org.apache.hadoop.hive.ql.udf.UDFLog10; import org.apache.hadoop.hive.ql.udf.UDFLog2; import org.apache.hadoop.hive.ql.udf.UDFMinute; import org.apache.hadoop.hive.ql.udf.UDFMonth; import org.apache.hadoop.hive.ql.udf.UDFRadians; import org.apache.hadoop.hive.ql.udf.UDFRand; import org.apache.hadoop.hive.ql.udf.UDFRegExpExtract; import org.apache.hadoop.hive.ql.udf.UDFRegExpReplace; import org.apache.hadoop.hive.ql.udf.UDFSecond; import org.apache.hadoop.hive.ql.udf.UDFSign; import org.apache.hadoop.hive.ql.udf.UDFSin; import org.apache.hadoop.hive.ql.udf.UDFSqrt; import org.apache.hadoop.hive.ql.udf.UDFSubstr; import org.apache.hadoop.hive.ql.udf.UDFTan; import org.apache.hadoop.hive.ql.udf.UDFToBoolean; import org.apache.hadoop.hive.ql.udf.UDFToByte; import org.apache.hadoop.hive.ql.udf.UDFToDouble; import org.apache.hadoop.hive.ql.udf.UDFToFloat; import org.apache.hadoop.hive.ql.udf.UDFToInteger; import org.apache.hadoop.hive.ql.udf.UDFToLong; import org.apache.hadoop.hive.ql.udf.UDFToShort; import org.apache.hadoop.hive.ql.udf.UDFWeekOfYear; import org.apache.hadoop.hive.ql.udf.UDFYear; import org.apache.hadoop.hive.ql.udf.generic.*; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.NullStructSerDe; import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.SequenceFileInputFormat; import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hive.common.util.AnnotationUtils; import org.apache.hadoop.util.ReflectionUtils; import com.google.common.collect.ImmutableSet; import com.google.common.base.Preconditions; public class Vectorizer implements PhysicalPlanResolver { protected static transient final Logger LOG = LoggerFactory.getLogger(Vectorizer.class); private static final Pattern supportedDataTypesPattern; private static final TypeInfo[] EMPTY_TYPEINFO_ARRAY = new TypeInfo[0]; static { StringBuilder patternBuilder = new StringBuilder(); patternBuilder.append("int"); patternBuilder.append("|smallint"); patternBuilder.append("|tinyint"); patternBuilder.append("|bigint"); patternBuilder.append("|integer"); patternBuilder.append("|long"); patternBuilder.append("|short"); patternBuilder.append("|timestamp"); patternBuilder.append("|" + serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME); patternBuilder.append("|" + serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME); patternBuilder.append("|boolean"); patternBuilder.append("|binary"); patternBuilder.append("|string"); patternBuilder.append("|byte"); patternBuilder.append("|float"); patternBuilder.append("|double"); patternBuilder.append("|date"); patternBuilder.append("|void"); // Decimal types can be specified with different precision and scales e.g. decimal(10,5), // as opposed to other data types which can be represented by constant strings. // The regex matches only the "decimal" prefix of the type. patternBuilder.append("|decimal.*"); // CHAR and VARCHAR types can be specified with maximum length. patternBuilder.append("|char.*"); patternBuilder.append("|varchar.*"); supportedDataTypesPattern = Pattern.compile(patternBuilder.toString()); } private Set<Class<?>> supportedGenericUDFs = new HashSet<Class<?>>(); private Set<String> supportedAggregationUdfs = new HashSet<String>(); // The set of virtual columns that vectorized readers *MAY* support. public static final ImmutableSet<VirtualColumn> vectorizableVirtualColumns = ImmutableSet.of(VirtualColumn.ROWID); private HiveConf hiveConf; public enum EnabledOverride { NONE, DISABLE, ENABLE; public static final Map<String, EnabledOverride> nameMap = new HashMap<String, EnabledOverride>(); static { for (EnabledOverride vectorizationEnabledOverride : values()) { nameMap.put( vectorizationEnabledOverride.name().toLowerCase(), vectorizationEnabledOverride); } }; } private boolean isVectorizationEnabled; private EnabledOverride vectorizationEnabledOverride; private boolean isTestForcedVectorizationEnable; private boolean useVectorizedInputFileFormat; private boolean useVectorDeserialize; private boolean useRowDeserialize; private boolean isReduceVectorizationEnabled; private boolean isPtfVectorizationEnabled; private boolean isVectorizationComplexTypesEnabled; // Now deprecated. private boolean isVectorizationGroupByComplexTypesEnabled; private boolean isVectorizedRowIdentifierEnabled; private String vectorizedInputFormatSupportEnabled; private boolean isLlapIoEnabled; private Set<Support> vectorizedInputFormatSupportEnabledSet; private Collection<Class<?>> rowDeserializeInputFormatExcludes; private int vectorizedPTFMaxMemoryBufferingBatchCount; private int vectorizedTestingReducerBatchSize; private boolean isTestVectorizerSuppressFatalExceptions; private boolean isSchemaEvolution; private HiveVectorAdaptorUsageMode hiveVectorAdaptorUsageMode; private static final Set<Support> vectorDeserializeTextSupportSet = new TreeSet<Support>(); static { vectorDeserializeTextSupportSet.addAll(Arrays.asList(Support.values())); } private static final Set<String> supportedAcidInputFormats = new TreeSet<String>(); static { supportedAcidInputFormats.add(OrcInputFormat.class.getName()); // For metadataonly or empty rows optimizations, null/onerow input format can be selected. supportedAcidInputFormats.add(NullRowsInputFormat.class.getName()); supportedAcidInputFormats.add(OneNullRowInputFormat.class.getName()); supportedAcidInputFormats.add(ZeroRowsInputFormat.class.getName()); } private boolean isTestVectorizationSuppressExplainExecutionMode; private BaseWork currentBaseWork; private Operator<? extends OperatorDesc> currentOperator; private Collection<Class<?>> vectorizedInputFormatExcludes; private Map<Operator<? extends OperatorDesc>, Set<ImmutablePair<Operator<? extends OperatorDesc>, Operator<? extends OperatorDesc>>>> delayedFixups = new IdentityHashMap<Operator<? extends OperatorDesc>, Set<ImmutablePair<Operator<?>, Operator<?>>>>(); public void testSetCurrentBaseWork(BaseWork testBaseWork) { currentBaseWork = testBaseWork; } private void setNodeIssue(String issue) { currentBaseWork.setNotVectorizedReason( VectorizerReason.createNodeIssue(issue)); } private void setOperatorIssue(String issue) { currentBaseWork.setNotVectorizedReason( VectorizerReason.createOperatorIssue(currentOperator, issue)); } private void setExpressionIssue(String expressionTitle, String issue) { currentBaseWork.setNotVectorizedReason( VectorizerReason.createExpressionIssue(currentOperator, expressionTitle, issue)); } private void clearNotVectorizedReason() { currentBaseWork.setNotVectorizedReason(null); } private long vectorizedVertexNum = -1; private Set<VirtualColumn> availableVectorizedVirtualColumnSet = null; private Set<VirtualColumn> neededVirtualColumnSet = null; private PlanMapper planMapper; public class VectorizerCannotVectorizeException extends Exception { } public Vectorizer() { /* * We check UDFs against the supportedGenericUDFs when * hive.vectorized.adaptor.usage.mode=chosen or none. * * We allow all UDFs for hive.vectorized.adaptor.usage.mode=all. */ supportedGenericUDFs.add(GenericUDFOPPlus.class); supportedGenericUDFs.add(GenericUDFOPMinus.class); supportedGenericUDFs.add(GenericUDFOPMultiply.class); supportedGenericUDFs.add(GenericUDFOPDivide.class); supportedGenericUDFs.add(GenericUDFOPMod.class); supportedGenericUDFs.add(GenericUDFOPNegative.class); supportedGenericUDFs.add(GenericUDFOPPositive.class); supportedGenericUDFs.add(GenericUDFOPEqualOrLessThan.class); supportedGenericUDFs.add(GenericUDFOPEqualOrGreaterThan.class); supportedGenericUDFs.add(GenericUDFOPGreaterThan.class); supportedGenericUDFs.add(GenericUDFOPLessThan.class); supportedGenericUDFs.add(GenericUDFOPNot.class); supportedGenericUDFs.add(GenericUDFOPNotEqual.class); supportedGenericUDFs.add(GenericUDFOPNotNull.class); supportedGenericUDFs.add(GenericUDFOPNull.class); supportedGenericUDFs.add(GenericUDFOPOr.class); supportedGenericUDFs.add(GenericUDFOPAnd.class); supportedGenericUDFs.add(GenericUDFOPEqual.class); supportedGenericUDFs.add(GenericUDFLength.class); supportedGenericUDFs.add(GenericUDFCharacterLength.class); supportedGenericUDFs.add(GenericUDFOctetLength.class); supportedGenericUDFs.add(UDFYear.class); supportedGenericUDFs.add(UDFMonth.class); supportedGenericUDFs.add(UDFDayOfMonth.class); supportedGenericUDFs.add(UDFDayOfWeek.class); supportedGenericUDFs.add(UDFHour.class); supportedGenericUDFs.add(UDFMinute.class); supportedGenericUDFs.add(UDFSecond.class); supportedGenericUDFs.add(UDFWeekOfYear.class); supportedGenericUDFs.add(GenericUDFToUnixTimeStamp.class); supportedGenericUDFs.add(GenericUDFFromUnixTime.class); supportedGenericUDFs.add(GenericUDFDateAdd.class); supportedGenericUDFs.add(GenericUDFDateSub.class); supportedGenericUDFs.add(GenericUDFDate.class); supportedGenericUDFs.add(GenericUDFDateDiff.class); supportedGenericUDFs.add(UDFLike.class); supportedGenericUDFs.add(GenericUDFRegExp.class); supportedGenericUDFs.add(UDFRegExpExtract.class); supportedGenericUDFs.add(UDFRegExpReplace.class); supportedGenericUDFs.add(UDFSubstr.class); supportedGenericUDFs.add(GenericUDFLTrim.class); supportedGenericUDFs.add(GenericUDFRTrim.class); supportedGenericUDFs.add(GenericUDFTrim.class); supportedGenericUDFs.add(UDFSin.class); supportedGenericUDFs.add(UDFCos.class); supportedGenericUDFs.add(UDFTan.class); supportedGenericUDFs.add(UDFAsin.class); supportedGenericUDFs.add(UDFAcos.class); supportedGenericUDFs.add(UDFAtan.class); supportedGenericUDFs.add(UDFDegrees.class); supportedGenericUDFs.add(UDFRadians.class); supportedGenericUDFs.add(GenericUDFFloor.class); supportedGenericUDFs.add(GenericUDFCeil.class); supportedGenericUDFs.add(UDFExp.class); supportedGenericUDFs.add(UDFLn.class); supportedGenericUDFs.add(UDFLog2.class); supportedGenericUDFs.add(UDFLog10.class); supportedGenericUDFs.add(UDFLog.class); supportedGenericUDFs.add(GenericUDFPower.class); supportedGenericUDFs.add(GenericUDFRound.class); supportedGenericUDFs.add(GenericUDFBRound.class); supportedGenericUDFs.add(GenericUDFPosMod.class); supportedGenericUDFs.add(UDFSqrt.class); supportedGenericUDFs.add(UDFSign.class); supportedGenericUDFs.add(UDFRand.class); supportedGenericUDFs.add(UDFBin.class); supportedGenericUDFs.add(UDFHex.class); supportedGenericUDFs.add(UDFConv.class); supportedGenericUDFs.add(GenericUDFLower.class); supportedGenericUDFs.add(GenericUDFUpper.class); supportedGenericUDFs.add(GenericUDFConcat.class); supportedGenericUDFs.add(GenericUDFAbs.class); supportedGenericUDFs.add(GenericUDFBetween.class); supportedGenericUDFs.add(GenericUDFIn.class); supportedGenericUDFs.add(GenericUDFCase.class); supportedGenericUDFs.add(GenericUDFWhen.class); supportedGenericUDFs.add(GenericUDFCoalesce.class); supportedGenericUDFs.add(GenericUDFElt.class); supportedGenericUDFs.add(GenericUDFInitCap.class); supportedGenericUDFs.add(GenericUDFInBloomFilter.class); supportedGenericUDFs.add(GenericUDFMurmurHash.class); // For type casts supportedGenericUDFs.add(UDFToLong.class); supportedGenericUDFs.add(UDFToInteger.class); supportedGenericUDFs.add(UDFToShort.class); supportedGenericUDFs.add(UDFToByte.class); supportedGenericUDFs.add(UDFToBoolean.class); supportedGenericUDFs.add(UDFToFloat.class); supportedGenericUDFs.add(UDFToDouble.class); supportedGenericUDFs.add(GenericUDFToString.class); supportedGenericUDFs.add(GenericUDFTimestamp.class); supportedGenericUDFs.add(GenericUDFToDecimal.class); supportedGenericUDFs.add(GenericUDFToDate.class); supportedGenericUDFs.add(GenericUDFToChar.class); supportedGenericUDFs.add(GenericUDFToVarchar.class); supportedGenericUDFs.add(GenericUDFToIntervalYearMonth.class); supportedGenericUDFs.add(GenericUDFToIntervalDayTime.class); // For conditional expressions supportedGenericUDFs.add(GenericUDFIf.class); supportedAggregationUdfs.add("min"); supportedAggregationUdfs.add("max"); supportedAggregationUdfs.add("count"); supportedAggregationUdfs.add("sum"); supportedAggregationUdfs.add("avg"); supportedAggregationUdfs.add("variance"); supportedAggregationUdfs.add("var_pop"); supportedAggregationUdfs.add("var_samp"); supportedAggregationUdfs.add("std"); supportedAggregationUdfs.add("stddev"); supportedAggregationUdfs.add("stddev_pop"); supportedAggregationUdfs.add("stddev_samp"); supportedAggregationUdfs.add("bloom_filter"); supportedAggregationUdfs.add("compute_bit_vector_hll"); } private class VectorTaskColumnInfo { List<String> allColumnNames; List<TypeInfo> allTypeInfos; List<Integer> dataColumnNums; int partitionColumnCount; List<VirtualColumn> availableVirtualColumnList; List<VirtualColumn> neededVirtualColumnList; //not to be confused with useVectorizedInputFileFormat at Vectorizer level //which represents the value of configuration hive.vectorized.use.vectorized.input.format private boolean useVectorizedInputFileFormat; Set<Support> inputFormatSupportSet; Set<Support> supportSetInUse; List<String> supportRemovedReasons; List<DataTypePhysicalVariation> allDataTypePhysicalVariations; boolean allNative; boolean usesVectorUDFAdaptor; String[] scratchTypeNameArray; DataTypePhysicalVariation[] scratchdataTypePhysicalVariations; String reduceColumnSortOrder; String reduceColumnNullOrder; VectorTaskColumnInfo() { partitionColumnCount = 0; } public void assume() { allNative = true; usesVectorUDFAdaptor = false; } public void setAllColumnNames(List<String> allColumnNames) { this.allColumnNames = allColumnNames; } public void setAllTypeInfos(List<TypeInfo> allTypeInfos) { this.allTypeInfos = allTypeInfos; } public void setDataColumnNums(List<Integer> dataColumnNums) { this.dataColumnNums = dataColumnNums; } public void setPartitionColumnCount(int partitionColumnCount) { this.partitionColumnCount = partitionColumnCount; } public void setAvailableVirtualColumnList(List<VirtualColumn> availableVirtualColumnList) { this.availableVirtualColumnList = availableVirtualColumnList; } public void setNeededVirtualColumnList(List<VirtualColumn> neededVirtualColumnList) { this.neededVirtualColumnList = neededVirtualColumnList; } public void setSupportSetInUse(Set<Support> supportSetInUse) { this.supportSetInUse = supportSetInUse; } public void setSupportRemovedReasons(List<String> supportRemovedReasons) { this.supportRemovedReasons = supportRemovedReasons; } public void setAlldataTypePhysicalVariations(List<DataTypePhysicalVariation> allDataTypePhysicalVariations) { this.allDataTypePhysicalVariations = allDataTypePhysicalVariations; } public void setScratchTypeNameArray(String[] scratchTypeNameArray) { this.scratchTypeNameArray = scratchTypeNameArray; } public void setScratchdataTypePhysicalVariationsArray(DataTypePhysicalVariation[] scratchdataTypePhysicalVariations) { this.scratchdataTypePhysicalVariations = scratchdataTypePhysicalVariations; } public void setAllNative(boolean allNative) { this.allNative = allNative; } public void setUsesVectorUDFAdaptor(boolean usesVectorUDFAdaptor) { this.usesVectorUDFAdaptor = usesVectorUDFAdaptor; } public void setUseVectorizedInputFileFormat(boolean useVectorizedInputFileFormat) { this.useVectorizedInputFileFormat = useVectorizedInputFileFormat; } public void setInputFormatSupportSet(Set<Support> inputFormatSupportSet) { this.inputFormatSupportSet = inputFormatSupportSet; } public void setReduceColumnSortOrder(String reduceColumnSortOrder) { this.reduceColumnSortOrder = reduceColumnSortOrder; } public void setReduceColumnNullOrder(String reduceColumnNullOrder) { this.reduceColumnNullOrder = reduceColumnNullOrder; } public void transferToBaseWork(BaseWork baseWork) { final int virtualColumnCount = (availableVirtualColumnList == null ? 0 : availableVirtualColumnList.size()); VirtualColumn[] neededVirtualColumns; if (neededVirtualColumnList != null && neededVirtualColumnList.size() > 0) { neededVirtualColumns = neededVirtualColumnList.toArray(new VirtualColumn[0]); } else { neededVirtualColumns = new VirtualColumn[0]; } String[] allColumnNameArray = allColumnNames.toArray(new String[0]); TypeInfo[] allTypeInfoArray = allTypeInfos.toArray(new TypeInfo[0]); int[] dataColumnNumsArray; if (dataColumnNums != null) { dataColumnNumsArray = ArrayUtils.toPrimitive(dataColumnNums.toArray(new Integer[0])); } else { dataColumnNumsArray = null; } DataTypePhysicalVariation[] allDataTypePhysicalVariationArray; if (allDataTypePhysicalVariations == null) { allDataTypePhysicalVariationArray = new DataTypePhysicalVariation[allTypeInfoArray.length]; Arrays.fill(allDataTypePhysicalVariationArray, DataTypePhysicalVariation.NONE); } else { allDataTypePhysicalVariationArray = allDataTypePhysicalVariations.toArray(new DataTypePhysicalVariation[0]); } VectorizedRowBatchCtx vectorizedRowBatchCtx = new VectorizedRowBatchCtx( allColumnNameArray, allTypeInfoArray, allDataTypePhysicalVariationArray, dataColumnNumsArray, partitionColumnCount, virtualColumnCount, neededVirtualColumns, scratchTypeNameArray, scratchdataTypePhysicalVariations); baseWork.setVectorizedRowBatchCtx(vectorizedRowBatchCtx); if (baseWork instanceof MapWork) { MapWork mapWork = (MapWork) baseWork; mapWork.setUseVectorizedInputFileFormat(useVectorizedInputFileFormat); mapWork.setInputFormatSupportSet(inputFormatSupportSet); mapWork.setSupportSetInUse(supportSetInUse); mapWork.setSupportRemovedReasons(supportRemovedReasons); } if (baseWork instanceof ReduceWork) { ReduceWork reduceWork = (ReduceWork) baseWork; reduceWork.setVectorReduceColumnSortOrder(reduceColumnSortOrder); reduceWork.setVectorReduceColumnNullOrder(reduceColumnNullOrder); } baseWork.setAllNative(allNative); baseWork.setUsesVectorUDFAdaptor(usesVectorUDFAdaptor); baseWork.setIsTestForcedVectorizationEnable(isTestForcedVectorizationEnable); baseWork.setIsTestVectorizationSuppressExplainExecutionMode( isTestVectorizationSuppressExplainExecutionMode); } } /* * Used as a dummy root operator to attach vectorized operators that will be built in parallel * to the current non-vectorized operator tree. */ private static class DummyRootVectorDesc extends AbstractOperatorDesc { public DummyRootVectorDesc() { super(); } } private static class DummyOperator extends Operator<DummyRootVectorDesc> { public DummyOperator() { super(new CompilationOpContext()); } @Override public void process(Object row, int tag) throws HiveException { throw new RuntimeException("Not used"); } @Override public String getName() { return "DUMMY"; } @Override public OperatorType getType() { return null; } } private static class DummyVectorOperator extends DummyOperator implements VectorizationOperator { private VectorizationContext vContext; public DummyVectorOperator(VectorizationContext vContext) { super(); this.conf = new DummyRootVectorDesc(); this.vContext = vContext; } @Override public VectorizationContext getInputVectorizationContext() { return vContext; } @Override public VectorDesc getVectorDesc() { return null; } } private static List<Operator<? extends OperatorDesc>> newOperatorList() { return new ArrayList<Operator<? extends OperatorDesc>>(); } public static void debugDisplayJoinOperatorTree(Operator<? extends OperatorDesc> joinOperator, String prefix) { List<Operator<? extends OperatorDesc>> currentParentList = newOperatorList(); currentParentList.add(joinOperator); int depth = 0; do { List<Operator<? extends OperatorDesc>> nextParentList = newOperatorList(); final int count = currentParentList.size(); for (int i = 0; i < count; i++) { Operator<? extends OperatorDesc> parent = currentParentList.get(i); System.out.println(prefix + " parent depth " + depth + " " + parent.getClass().getSimpleName() + " " + parent.toString()); List<Operator<? extends OperatorDesc>> parentList = parent.getParentOperators(); if (parentList == null || parentList.size() == 0) { continue; } nextParentList.addAll(parentList); } currentParentList = nextParentList; depth--; } while (currentParentList.size() > 0); List<Operator<? extends OperatorDesc>> currentChildList = newOperatorList(); currentChildList.addAll(joinOperator.getChildOperators()); depth = 1; do { List<Operator<? extends OperatorDesc>> nextChildList = newOperatorList(); final int count = currentChildList.size(); for (int i = 0; i < count; i++) { Operator<? extends OperatorDesc> child = currentChildList.get(i); System.out.println(prefix + " child depth " + depth + " " + child.getClass().getSimpleName() + " " + child.toString()); List<Operator<? extends OperatorDesc>> childList = child.getChildOperators(); if (childList == null || childList.size() == 0) { continue; } nextChildList.addAll(childList); } currentChildList = nextChildList; depth--; } while (currentChildList.size() > 0); } private Operator<? extends OperatorDesc> validateAndVectorizeOperatorTree( Operator<? extends OperatorDesc> nonVecRootOperator, boolean isReduce, boolean isTezOrSpark, VectorTaskColumnInfo vectorTaskColumnInfo) throws VectorizerCannotVectorizeException { VectorizationContext taskVContext = new VectorizationContext( "Task", vectorTaskColumnInfo.allColumnNames, vectorTaskColumnInfo.allTypeInfos, vectorTaskColumnInfo.allDataTypePhysicalVariations, hiveConf); List<Operator<? extends OperatorDesc>> currentParentList = newOperatorList(); currentParentList.add(nonVecRootOperator); // Start with dummy vector operator as the parent of the parallel vector operator tree we are // creating Operator<? extends OperatorDesc> dummyVectorOperator = new DummyVectorOperator(taskVContext); List<Operator<? extends OperatorDesc>> currentVectorParentList = newOperatorList(); currentVectorParentList.add(dummyVectorOperator); delayedFixups.clear(); do { List<Operator<? extends OperatorDesc>> nextParentList = newOperatorList(); List<Operator<? extends OperatorDesc>> nextVectorParentList= newOperatorList(); final int count = currentParentList.size(); for (int i = 0; i < count; i++) { Operator<? extends OperatorDesc> parent = currentParentList.get(i); List<Operator<? extends OperatorDesc>> childrenList = parent.getChildOperators(); if (childrenList == null || childrenList.size() == 0) { continue; } Operator<? extends OperatorDesc> vectorParent = currentVectorParentList.get(i); /* * Vectorize this parent's children. Plug them into vectorParent's children list. * * Add those children / vector children to nextParentList / nextVectorParentList. */ doProcessChildren( parent, vectorParent, nextParentList, nextVectorParentList, isReduce, isTezOrSpark, vectorTaskColumnInfo); } currentParentList = nextParentList; currentVectorParentList = nextVectorParentList; } while (currentParentList.size() > 0); runDelayedFixups(); return dummyVectorOperator; } private void doProcessChildren( Operator<? extends OperatorDesc> parent, Operator<? extends OperatorDesc> vectorParent, List<Operator<? extends OperatorDesc>> nextParentList, List<Operator<? extends OperatorDesc>> nextVectorParentList, boolean isReduce, boolean isTezOrSpark, VectorTaskColumnInfo vectorTaskColumnInfo) throws VectorizerCannotVectorizeException { List<Operator<? extends OperatorDesc>> children = parent.getChildOperators(); final int childrenCount = children.size(); for (int i = 0; i < childrenCount; i++) { Operator<? extends OperatorDesc> child = children.get(i); Operator<? extends OperatorDesc> vectorChild = doProcessChild( child, vectorParent, isReduce, isTezOrSpark, vectorTaskColumnInfo); fixupNewVectorChild( parent, vectorParent, child, vectorChild); nextParentList.add(child); nextVectorParentList.add(vectorChild); } } /* * Fixup the children and parents of a new vector child. * * 1) Add new vector child to the vector parent's children list. * * 2) Copy and fixup the parent list of the original child instead of just assuming a 1:1 * relationship. * * a) When the child is MapJoinOperator, it will have an extra parent HashTableDummyOperator * for the MapJoinOperator's small table. It needs to be fixed up, too. */ private void fixupNewVectorChild( Operator<? extends OperatorDesc> parent, Operator<? extends OperatorDesc> vectorParent, Operator<? extends OperatorDesc> child, Operator<? extends OperatorDesc> vectorChild) { // 1) Add new vector child to the vector parent's children list. vectorParent.getChildOperators().add(vectorChild); // 2) Copy and fixup the parent list of the original child instead of just assuming a 1:1 // relationship. List<Operator<? extends OperatorDesc>> childMultipleParents = newOperatorList(); childMultipleParents.addAll(child.getParentOperators()); final int childMultipleParentCount = childMultipleParents.size(); for (int i = 0; i < childMultipleParentCount; i++) { Operator<? extends OperatorDesc> childMultipleParent = childMultipleParents.get(i); if (childMultipleParent == parent) { childMultipleParents.set(i, vectorParent); } else { queueDelayedFixup(childMultipleParent, child, vectorChild); } } vectorChild.setParentOperators(childMultipleParents); } /* * The fix up is delayed so that the parent operators aren't modified until the entire operator * tree has been vectorized. */ private void queueDelayedFixup(Operator<? extends OperatorDesc> parent, Operator<? extends OperatorDesc> child, Operator<? extends OperatorDesc> vectorChild) { if (delayedFixups.get(parent) == null) { HashSet<ImmutablePair<Operator<? extends OperatorDesc>, Operator<? extends OperatorDesc>>> value = new HashSet<ImmutablePair<Operator<? extends OperatorDesc>, Operator<? extends OperatorDesc>>>(1); delayedFixups.put(parent, value); } delayedFixups.get(parent).add( new ImmutablePair<Operator<? extends OperatorDesc>, Operator<? extends OperatorDesc>>( child, vectorChild)); } private void runDelayedFixups() { for (Entry<Operator<? extends OperatorDesc>, Set<ImmutablePair<Operator<? extends OperatorDesc>, Operator<? extends OperatorDesc>>>> delayed : delayedFixups.entrySet()) { Operator<? extends OperatorDesc> key = delayed.getKey(); Set<ImmutablePair<Operator<? extends OperatorDesc>, Operator<? extends OperatorDesc>>> value = delayed.getValue(); for (ImmutablePair<Operator<? extends OperatorDesc>, Operator<? extends OperatorDesc>> swap : value) { fixupOtherParent(key, swap.getLeft(), swap.getRight()); } } delayedFixups.clear(); } private void fixupOtherParent( Operator<? extends OperatorDesc> childMultipleParent, Operator<? extends OperatorDesc> child, Operator<? extends OperatorDesc> vectorChild) { List<Operator<? extends OperatorDesc>> children = childMultipleParent.getChildOperators(); final int childrenCount = children.size(); for (int i = 0; i < childrenCount; i++) { Operator<? extends OperatorDesc> myChild = children.get(i); if (myChild == child) { children.set(i, vectorChild); } } } private Operator<? extends OperatorDesc> doProcessChild( Operator<? extends OperatorDesc> child, Operator<? extends OperatorDesc> vectorParent, boolean isReduce, boolean isTezOrSpark, VectorTaskColumnInfo vectorTaskColumnInfo) throws VectorizerCannotVectorizeException { // Use vector parent to get VectorizationContext. final VectorizationContext vContext; if (vectorParent instanceof VectorizationContextRegion) { vContext = ((VectorizationContextRegion) vectorParent).getOutputVectorizationContext(); } else { vContext = ((VectorizationOperator) vectorParent).getInputVectorizationContext(); } Operator<? extends OperatorDesc> vectorChild; try { vectorChild = validateAndVectorizeOperator(child, vContext, isReduce, isTezOrSpark, vectorTaskColumnInfo); } catch (HiveException e) { String issue = "exception: " + VectorizationContext.getStackTraceAsSingleLine(e); setNodeIssue(issue); throw new VectorizerCannotVectorizeException(); } return vectorChild; } class VectorizationDispatcher implements SemanticDispatcher { @Override public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs) throws SemanticException { Task<?> currTask = (Task<?>) nd; if (currTask instanceof MapRedTask) { MapredWork mapredWork = ((MapRedTask) currTask).getWork(); MapWork mapWork = mapredWork.getMapWork(); setMapWorkExplainConditions(mapWork); convertMapWork(mapredWork.getMapWork(), /* isTezOrSpark */ false); logMapWorkExplainVectorization(mapWork); ReduceWork reduceWork = mapredWork.getReduceWork(); if (reduceWork != null) { // Always set the EXPLAIN conditions. setReduceWorkExplainConditions(reduceWork); // We do not vectorize MR Reduce. logReduceWorkExplainVectorization(reduceWork); } } else if (currTask instanceof TezTask) { TezWork work = ((TezTask) currTask).getWork(); for (BaseWork baseWork: work.getAllWork()) { if (baseWork instanceof MapWork) { MapWork mapWork = (MapWork) baseWork; setMapWorkExplainConditions(mapWork); convertMapWork(mapWork, /* isTezOrSpark */ true); logMapWorkExplainVectorization(mapWork); } else if (baseWork instanceof ReduceWork) { ReduceWork reduceWork = (ReduceWork) baseWork; // Always set the EXPLAIN conditions. setReduceWorkExplainConditions(reduceWork); // We are only vectorizing Reduce under Tez/Spark. if (isReduceVectorizationEnabled) { convertReduceWork(reduceWork); } logReduceWorkExplainVectorization(reduceWork); } else if (baseWork instanceof MergeJoinWork){ MergeJoinWork mergeJoinWork = (MergeJoinWork) baseWork; // Always set the EXPLAIN conditions. setMergeJoinWorkExplainConditions(mergeJoinWork); logMergeJoinWorkExplainVectorization(mergeJoinWork); } } } else if (currTask instanceof SparkTask) { SparkWork sparkWork = (SparkWork) currTask.getWork(); for (BaseWork baseWork : sparkWork.getAllWork()) { if (baseWork instanceof MapWork) { MapWork mapWork = (MapWork) baseWork; setMapWorkExplainConditions(mapWork); convertMapWork(mapWork, /* isTezOrSpark */ true); logMapWorkExplainVectorization(mapWork); } else if (baseWork instanceof ReduceWork) { ReduceWork reduceWork = (ReduceWork) baseWork; // Always set the EXPLAIN conditions. setReduceWorkExplainConditions(reduceWork); if (isReduceVectorizationEnabled) { convertReduceWork(reduceWork); } logReduceWorkExplainVectorization(reduceWork); } } } else if (currTask instanceof FetchTask) { LOG.info("Vectorizing Fetch not supported"); } else { if (LOG.isDebugEnabled()) { LOG.debug("Ignoring vectorization of " + currTask.getClass().getSimpleName()); } } return null; } private void setExplainConditions(BaseWork baseWork) { // Global used when setting errors, etc. currentBaseWork = baseWork; baseWork.setVectorizedVertexNum(++vectorizedVertexNum); baseWork.setVectorizationExamined(true); } private void setMapWorkExplainConditions(MapWork mapWork) { setExplainConditions(mapWork); } private void setReduceWorkExplainConditions(ReduceWork reduceWork) { setExplainConditions(reduceWork); reduceWork.setReduceVectorizationEnabled(isReduceVectorizationEnabled); reduceWork.setVectorReduceEngine( HiveConf.getVar(hiveConf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE)); } private void setMergeJoinWorkExplainConditions(MergeJoinWork mergeJoinWork) { setExplainConditions(mergeJoinWork); } private boolean logExplainVectorization(BaseWork baseWork, String name) { if (!baseWork.getVectorizationExamined()) { return false; } LOG.info(name + " vectorization enabled: " + baseWork.getVectorizationEnabled()); boolean isVectorized = baseWork.getVectorMode(); LOG.info(name + " vectorized: " + isVectorized); if (!isVectorized) { VectorizerReason notVectorizedReason = baseWork.getNotVectorizedReason(); if (notVectorizedReason != null) { LOG.info(name + " notVectorizedReason: " + notVectorizedReason.toString()); } } LOG.info(name + " vectorizedVertexNum: " + baseWork.getVectorizedVertexNum()); if (LOG.isDebugEnabled() && isVectorized) { VectorizedRowBatchCtx batchContext = baseWork.getVectorizedRowBatchCtx(); LOG.debug(name + " dataColumnCount: " + batchContext.getDataColumnCount()); int[] dataColumnNums = batchContext.getDataColumnNums(); if (dataColumnNums != null) { LOG.debug(name + " includeColumns: " + Arrays.toString(dataColumnNums)); } LOG.debug(name + " partitionColumnCount: " + batchContext.getPartitionColumnCount()); LOG.debug(name + " dataColumns: " + BaseWork.BaseExplainVectorization.getColumns( batchContext, 0, batchContext.getDataColumnCount())); LOG.debug(name + " scratchColumnTypeNames: " + BaseWork.BaseExplainVectorization.getScratchColumns(batchContext)); VirtualColumn[] neededVirtualColumns = batchContext.getNeededVirtualColumns(); if (neededVirtualColumns != null && neededVirtualColumns.length != 0) { LOG.debug(name + " neededVirtualColumns: " + Arrays.toString(neededVirtualColumns)); } } return true; } private void logMapWorkExplainVectorization(MapWork mapWork) { if (!logExplainVectorization(mapWork, "Map")) { return; } // Conditions. List<String> enabledConditionsMet = mapWork.getVectorizationEnabledConditionsMet(); if (enabledConditionsMet != null && !enabledConditionsMet.isEmpty()) { LOG.info("Map enabledConditionsMet: " + enabledConditionsMet.toString()); } List<String> enabledConditionsNotMet = mapWork.getVectorizationEnabledConditionsNotMet(); if (enabledConditionsNotMet != null && !enabledConditionsNotMet.isEmpty()) { LOG.info("Map enabledConditionsNotMet: " + enabledConditionsNotMet.toString()); } Set<String> inputFileFormatClassNameSet = mapWork.getVectorizationInputFileFormatClassNameSet(); if (inputFileFormatClassNameSet != null && !inputFileFormatClassNameSet.isEmpty()) { LOG.info("Map inputFileFormatClassNameSet: " + inputFileFormatClassNameSet.toString()); } } private void logReduceWorkExplainVectorization(ReduceWork reduceWork) { if (!logExplainVectorization(reduceWork, "Reduce")) { return; } // Conditions. LOG.info("Reducer " + HiveConf.ConfVars.HIVE_VECTORIZATION_REDUCE_ENABLED.varname + ": " + reduceWork.getReduceVectorizationEnabled()); LOG.info("Reducer engine: " + reduceWork.getVectorReduceEngine()); } private void logMergeJoinWorkExplainVectorization(MergeJoinWork mergeJoinWork) { if (!logExplainVectorization(mergeJoinWork, "MergeJoin")) { return; } } private void convertMapWork(MapWork mapWork, boolean isTezOrSpark) throws SemanticException { // We have to evaluate the input format to see if vectorization is enabled, so // we do not set it right here. VectorTaskColumnInfo vectorTaskColumnInfo = new VectorTaskColumnInfo(); vectorTaskColumnInfo.assume(); validateAndVectorizeMapWork(mapWork, vectorTaskColumnInfo, isTezOrSpark); } /* * Determine if there is only one TableScanOperator. Currently in Map vectorization, we do not * try to vectorize multiple input trees. */ private ImmutablePair<String, TableScanOperator> verifyOnlyOneTableScanOperator(MapWork mapWork) { // Eliminate MR plans with more than one TableScanOperator. Map<String, Operator<? extends OperatorDesc>> aliasToWork = mapWork.getAliasToWork(); if ((aliasToWork == null) || (aliasToWork.size() == 0)) { setNodeIssue("Vectorized map work requires work"); return null; } int tableScanCount = 0; String alias = ""; TableScanOperator tableScanOperator = null; for (Entry<String, Operator<? extends OperatorDesc>> entry : aliasToWork.entrySet()) { Operator<?> op = entry.getValue(); if (op == null) { setNodeIssue("Vectorized map work requires a valid alias"); return null; } if (op instanceof TableScanOperator) { tableScanCount++; alias = entry.getKey(); tableScanOperator = (TableScanOperator) op; } } if (tableScanCount > 1) { setNodeIssue("Vectorized map work only works with 1 TableScanOperator"); return null; } return new ImmutablePair<>(alias, tableScanOperator); } private void getTableScanOperatorSchemaInfo(TableScanOperator tableScanOperator, List<String> logicalColumnNameList, List<TypeInfo> logicalTypeInfoList, List<VirtualColumn> availableVirtualColumnList) { // Add all columns to make a vectorization context for // the TableScan operator. RowSchema rowSchema = tableScanOperator.getSchema(); for (ColumnInfo c : rowSchema.getSignature()) { // Validation will later exclude vectorization of virtual columns usage if necessary. String columnName = c.getInternalName(); // Turns out partition columns get marked as virtual in ColumnInfo, so we need to // check the VirtualColumn directly. VirtualColumn virtualColumn = VirtualColumn.VIRTUAL_COLUMN_NAME_MAP.get(columnName); if (virtualColumn != null) { // The planner gives us a subset virtual columns available for this table scan. // AND // We only support some virtual columns in vectorization. // // So, create the intersection. Note these are available vectorizable virtual columns. // Later we remember which virtual columns were *actually used* in the query so // just those will be included in the Map VectorizedRowBatchCtx that has the // information for creating the Map VectorizedRowBatch. // if (!vectorizableVirtualColumns.contains(virtualColumn)) { continue; } if (virtualColumn == VirtualColumn.ROWID && !isVectorizedRowIdentifierEnabled) { continue; } availableVirtualColumnList.add(virtualColumn); } // All columns: data, partition, and virtual are added. logicalColumnNameList.add(columnName); logicalTypeInfoList.add(TypeInfoUtils.getTypeInfoFromTypeString(c.getTypeName())); } } private void determineDataColumnNums(TableScanOperator tableScanOperator, List<String> allColumnNameList, int dataColumnCount, List<Integer> dataColumnNums) { /* * The TableScanOperator's needed columns are just the data columns. */ Set<String> neededColumns = new HashSet<String>(tableScanOperator.getNeededColumns()); for (int dataColumnNum = 0; dataColumnNum < dataColumnCount; dataColumnNum++) { String columnName = allColumnNameList.get(dataColumnNum); if (neededColumns.contains(columnName)) { dataColumnNums.add(dataColumnNum); } } } private Support[] getVectorizedInputFormatSupports( Class<? extends InputFormat> inputFileFormatClass) { try { InputFormat inputFormat = FetchOperator.getInputFormatFromCache(inputFileFormatClass, hiveConf); if (inputFormat instanceof VectorizedInputFormatInterface) { return ((VectorizedInputFormatInterface) inputFormat).getSupportedFeatures(); } } catch (IOException e) { LOG.error("Unable to instantiate {} input format class. Cannot determine vectorization support.", e); } // FUTURE: Decide how to ask an input file format what vectorization features it supports. return null; } /* * Add the support of the VectorizedInputFileFormatInterface. */ private void addVectorizedInputFileFormatSupport( Set<Support> newSupportSet, boolean isInputFileFormatVectorized, Class<? extends InputFormat>inputFileFormatClass) { final Support[] supports; if (isInputFileFormatVectorized) { supports = getVectorizedInputFormatSupports(inputFileFormatClass); } else { supports = null; } if (supports == null) { // No support. } else { for (Support support : supports) { newSupportSet.add(support); } } } private void handleSupport( boolean isFirstPartition, Set<Support> inputFormatSupportSet, Set<Support> newSupportSet) { if (isFirstPartition) { inputFormatSupportSet.addAll(newSupportSet); } else if (!newSupportSet.equals(inputFormatSupportSet)){ // Do the intersection so only support in both is kept. inputFormatSupportSet.retainAll(newSupportSet); } } /* * Add a vector partition descriptor to partition descriptor, removing duplicate object. * * If the same vector partition descriptor has already been allocated, share that object. */ private void addVectorPartitionDesc(PartitionDesc pd, VectorPartitionDesc vpd, Map<VectorPartitionDesc, VectorPartitionDesc> vectorPartitionDescMap) { VectorPartitionDesc existingEle = vectorPartitionDescMap.get(vpd); if (existingEle != null) { // Use the object we already have. vpd = existingEle; } else { vectorPartitionDescMap.put(vpd, vpd); } pd.setVectorPartitionDesc(vpd); } /* * There are 3 modes of reading for vectorization: * * 1) One for the Vectorized Input File Format which returns VectorizedRowBatch as the row. * * 2) One for using VectorDeserializeRow to deserialize each row into the VectorizedRowBatch. * Currently, these Input File Formats: * TEXTFILE * SEQUENCEFILE * * 3) And one using the regular partition deserializer to get the row object and assigning * the row object into the VectorizedRowBatch with VectorAssignRow. * This picks up Input File Format not supported by the other two. */ private boolean verifyAndSetVectorPartDesc( PartitionDesc pd, boolean isFullAcidTable, List<TypeInfo> allTypeInfoList, Set<String> inputFileFormatClassNameSet, Map<VectorPartitionDesc, VectorPartitionDesc> vectorPartitionDescMap, Set<String> enabledConditionsMetSet, List<String> enabledConditionsNotMetList, Set<Support> newSupportSet, List<TypeInfo> dataTypeInfoList) { Class<? extends InputFormat> inputFileFormatClass = pd.getInputFileFormatClass(); String inputFileFormatClassName = inputFileFormatClass.getName(); final TypeInfo[] dataTypeInfos; if (dataTypeInfoList == null) { dataTypeInfos = EMPTY_TYPEINFO_ARRAY; } else { dataTypeInfos = dataTypeInfoList.toArray(new TypeInfo[dataTypeInfoList.size()]); } // Always collect input file formats. inputFileFormatClassNameSet.add(inputFileFormatClassName); boolean isInputFileFormatVectorized = Utilities.isInputFileFormatVectorized(pd); if (isFullAcidTable) { // Today, ACID tables are only ORC and that format is vectorizable. Verify these // assumptions. Preconditions.checkState(isInputFileFormatVectorized); Preconditions.checkState(supportedAcidInputFormats.contains(inputFileFormatClassName)); if (!useVectorizedInputFileFormat) { enabledConditionsNotMetList.add("Vectorizing ACID tables requires " + HiveConf.ConfVars.HIVE_VECTORIZATION_USE_VECTORIZED_INPUT_FILE_FORMAT.varname); return false; } addVectorizedInputFileFormatSupport( newSupportSet, isInputFileFormatVectorized, inputFileFormatClass); addVectorPartitionDesc( pd, VectorPartitionDesc.createVectorizedInputFileFormat( inputFileFormatClassName, Utilities.isInputFileFormatSelfDescribing(pd), dataTypeInfos), vectorPartitionDescMap); enabledConditionsMetSet.add(HiveConf.ConfVars.HIVE_VECTORIZATION_USE_VECTORIZED_INPUT_FILE_FORMAT.varname); return true; } // Look for Pass-Thru case where InputFileFormat has VectorizedInputFormatInterface // and reads VectorizedRowBatch as a "row". if (useVectorizedInputFileFormat) { if (isInputFileFormatVectorized && !isInputFormatExcluded( inputFileFormatClassName, vectorizedInputFormatExcludes) && !hasUnsupportedVectorizedParquetDataType( inputFileFormatClass, allTypeInfoList)) { addVectorizedInputFileFormatSupport( newSupportSet, isInputFileFormatVectorized, inputFileFormatClass); addVectorPartitionDesc( pd, VectorPartitionDesc.createVectorizedInputFileFormat( inputFileFormatClassName, Utilities.isInputFileFormatSelfDescribing(pd), dataTypeInfos), vectorPartitionDescMap); enabledConditionsMetSet.add( HiveConf.ConfVars.HIVE_VECTORIZATION_USE_VECTORIZED_INPUT_FILE_FORMAT.varname); return true; } // Fall through and look for other options... } if (!isSchemaEvolution) { enabledConditionsNotMetList.add( "Vectorizing tables without Schema Evolution requires " + HiveConf.ConfVars.HIVE_VECTORIZATION_USE_VECTORIZED_INPUT_FILE_FORMAT.varname); return false; } String deserializerClassName = pd.getDeserializerClassName(); // Look for InputFileFormat / Serde combinations we can deserialize more efficiently // using VectorDeserializeRow and a deserialize class with the DeserializeRead interface. // // Do the "vectorized" row-by-row deserialization into a VectorizedRowBatch in the // VectorMapOperator. boolean isTextFormat = inputFileFormatClassName.equals(TextInputFormat.class.getName()) && deserializerClassName.equals(LazySimpleSerDe.class.getName()); boolean isSequenceFormat = inputFileFormatClassName.equals(SequenceFileInputFormat.class.getName()) && (deserializerClassName.equals(LazyBinarySerDe.class.getName()) || deserializerClassName.equals(LazyBinarySerDe2.class.getName())); boolean isVectorDeserializeEligable = isTextFormat || isSequenceFormat; if (useVectorDeserialize) { // Currently, we support LazySimple deserialization: // // org.apache.hadoop.mapred.TextInputFormat // org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe // // AND // // org.apache.hadoop.mapred.SequenceFileInputFormat // org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe if (isTextFormat) { Properties properties = pd.getTableDesc().getProperties(); String lastColumnTakesRestString = properties.getProperty(serdeConstants.SERIALIZATION_LAST_COLUMN_TAKES_REST); boolean lastColumnTakesRest = (lastColumnTakesRestString != null && lastColumnTakesRestString.equalsIgnoreCase("true")); if (lastColumnTakesRest) { // If row mode will not catch this input file format, then not enabled. if (useRowDeserialize && !isInputFormatExcluded(inputFileFormatClassName, rowDeserializeInputFormatExcludes)) { enabledConditionsNotMetList.add( inputFileFormatClassName + " " + serdeConstants.SERIALIZATION_LAST_COLUMN_TAKES_REST + " must be disabled "); return false; } } else { // Add the support for read variations in Vectorized Text. newSupportSet.addAll(vectorDeserializeTextSupportSet); addVectorPartitionDesc( pd, VectorPartitionDesc.createVectorDeserialize( inputFileFormatClassName, VectorDeserializeType.LAZY_SIMPLE, dataTypeInfos), vectorPartitionDescMap); enabledConditionsMetSet.add(HiveConf.ConfVars.HIVE_VECTORIZATION_USE_VECTOR_DESERIALIZE.varname); return true; } } else if (isSequenceFormat) { addVectorPartitionDesc( pd, VectorPartitionDesc.createVectorDeserialize( inputFileFormatClassName, VectorDeserializeType.LAZY_BINARY, dataTypeInfos), vectorPartitionDescMap); enabledConditionsMetSet.add(HiveConf.ConfVars.HIVE_VECTORIZATION_USE_VECTOR_DESERIALIZE.varname); return true; } // Fall through and look for other options... } // Otherwise, if enabled, deserialize rows using regular Serde and add the object // inspect-able Object[] row to a VectorizedRowBatch in the VectorMapOperator. if (useRowDeserialize) { boolean isRowDeserializeExcluded = isInputFormatExcluded(inputFileFormatClassName, rowDeserializeInputFormatExcludes); if (!isRowDeserializeExcluded && !isInputFileFormatVectorized) { addVectorPartitionDesc( pd, VectorPartitionDesc.createRowDeserialize( inputFileFormatClassName, Utilities.isInputFileFormatSelfDescribing(pd), deserializerClassName, dataTypeInfos), vectorPartitionDescMap); enabledConditionsMetSet.add(HiveConf.ConfVars.HIVE_VECTORIZATION_USE_ROW_DESERIALIZE.varname); return true; } else if (isInputFileFormatVectorized) { /* * Vectorizer does not vectorize in row deserialize mode if the input format has * VectorizedInputFormat so input formats will be clear if the isVectorized flag * is on, they are doing VRB work. */ enabledConditionsNotMetList.add("Row deserialization of vectorized input format not supported"); } else { enabledConditionsNotMetList.add(ConfVars.HIVE_VECTORIZATION_USE_ROW_DESERIALIZE.varname + " IS true AND " + ConfVars.HIVE_VECTORIZATION_ROW_DESERIALIZE_INPUTFORMAT_EXCLUDES.varname + " NOT CONTAINS " + inputFileFormatClassName); } } if (isInputFileFormatVectorized) { if(useVectorizedInputFileFormat) { enabledConditionsNotMetList.add( ConfVars.HIVE_VECTORIZATION_USE_VECTORIZED_INPUT_FILE_FORMAT.varname + " IS true AND " + ConfVars.HIVE_VECTORIZATION_VECTORIZED_INPUT_FILE_FORMAT_EXCLUDES.varname + " NOT CONTAINS " + inputFileFormatClassName); } else { enabledConditionsNotMetList .add(HiveConf.ConfVars.HIVE_VECTORIZATION_USE_VECTORIZED_INPUT_FILE_FORMAT.varname); } } else { // Only offer these when the input file format is not the fast vectorized formats. if (isVectorDeserializeEligable) { Preconditions.checkState(!useVectorDeserialize); enabledConditionsNotMetList.add(HiveConf.ConfVars.HIVE_VECTORIZATION_USE_VECTOR_DESERIALIZE.varname); } else { // Since row mode takes everyone. enabledConditionsNotMetList.add(HiveConf.ConfVars.HIVE_VECTORIZATION_USE_ROW_DESERIALIZE.varname); } } return false; } private boolean shouldUseVectorizedInputFormat(Set<String> inputFileFormatClassNames) { if (inputFileFormatClassNames == null || inputFileFormatClassNames.isEmpty() || !useVectorizedInputFileFormat) { return useVectorizedInputFileFormat; } //Global config of vectorized input format is enabled; check if these inputformats are excluded for (String inputFormat : inputFileFormatClassNames) { if(isInputFormatExcluded(inputFormat, vectorizedInputFormatExcludes)) { return false; } } return true; } private boolean isInputFormatExcluded(String inputFileFormatClassName, Collection<Class<?>> excludes) { Class<?> ifClass = null; try { ifClass = Class.forName(inputFileFormatClassName); } catch (ClassNotFoundException e) { LOG.warn("Cannot verify class for " + inputFileFormatClassName, e); return true; } if(excludes == null || excludes.isEmpty()) { return false; } for (Class<?> badClass : excludes) { if (badClass.isAssignableFrom(ifClass)) { return true; } } return false; } private boolean hasUnsupportedVectorizedParquetDataType( Class<? extends InputFormat> inputFileFormatClass, List<TypeInfo> allTypeInfoList) { if (!inputFileFormatClass.equals(org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat.class)) { return false; } /* * Currently, VectorizedParquetRecordReader cannot handle nested complex types. */ for (TypeInfo typeInfo : allTypeInfoList) { if (!(typeInfo instanceof PrimitiveTypeInfo)) { switch (typeInfo.getCategory()) { case LIST: if (!(((ListTypeInfo) typeInfo).getListElementTypeInfo() instanceof PrimitiveTypeInfo)) { return true; } break; case MAP: { MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo; if (!(mapTypeInfo.getMapKeyTypeInfo() instanceof PrimitiveTypeInfo)) { return true; } if (!(mapTypeInfo.getMapValueTypeInfo() instanceof PrimitiveTypeInfo)) { return true; } } break; case STRUCT: { StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo; List<TypeInfo> fieldTypeInfos = structTypeInfo.getAllStructFieldTypeInfos(); for (TypeInfo fieldTypeInfo : fieldTypeInfos) { if (!(fieldTypeInfo instanceof PrimitiveTypeInfo)) { return true; } } } break; case UNION: // Not supported at all. return false; default: throw new RuntimeException( "Unsupported complex type category " + typeInfo.getCategory()); } } } return false; } private void setValidateInputFormatAndSchemaEvolutionExplain(MapWork mapWork, Set<String> inputFileFormatClassNameSet, Map<VectorPartitionDesc, VectorPartitionDesc> vectorPartitionDescMap, Collection<String> enabledConditionsMetSet, Collection<String> enabledConditionsNotMetList) { mapWork.setVectorizationInputFileFormatClassNameSet(inputFileFormatClassNameSet); ArrayList<VectorPartitionDesc> vectorPartitionDescList = new ArrayList<VectorPartitionDesc>(); vectorPartitionDescList.addAll(vectorPartitionDescMap.keySet()); mapWork.setVectorPartitionDescList(vectorPartitionDescList); mapWork.setVectorizationEnabledConditionsMet(enabledConditionsMetSet); mapWork.setVectorizationEnabledConditionsNotMet(enabledConditionsNotMetList); } private ImmutablePair<Boolean, Boolean> validateInputFormatAndSchemaEvolution(MapWork mapWork, String alias, TableScanOperator tableScanOperator, VectorTaskColumnInfo vectorTaskColumnInfo) throws SemanticException { boolean isFullAcidTable = tableScanOperator.getConf().isFullAcidTable(); // These names/types are the data columns plus partition columns. final List<String> allColumnNameList = new ArrayList<String>(); final List<TypeInfo> allTypeInfoList = new ArrayList<TypeInfo>(); final List<VirtualColumn> availableVirtualColumnList = new ArrayList<VirtualColumn>(); getTableScanOperatorSchemaInfo( tableScanOperator, allColumnNameList, allTypeInfoList, availableVirtualColumnList); final int virtualColumnCount = availableVirtualColumnList.size(); final List<Integer> dataColumnNums = new ArrayList<Integer>(); final int dataAndPartColumnCount = allColumnNameList.size() - virtualColumnCount; /* * Validate input formats of all the partitions can be vectorized. */ boolean isFirst = true; int dataColumnCount = 0; int partitionColumnCount = 0; List<String> tableDataColumnList = null; List<TypeInfo> tableDataTypeInfoList = null; Map<Path, List<String>> pathToAliases = mapWork.getPathToAliases(); Map<Path, PartitionDesc> pathToPartitionInfo = mapWork.getPathToPartitionInfo(); // Remember the input file formats we validated and why. Set<String> inputFileFormatClassNameSet = new HashSet<String>(); Map<VectorPartitionDesc, VectorPartitionDesc> vectorPartitionDescMap = new LinkedHashMap<VectorPartitionDesc, VectorPartitionDesc>(); Set<String> enabledConditionsMetSet = new HashSet<String>(); List<String> enabledConditionsNotMetList = new ArrayList<String>(); Set<Support> inputFormatSupportSet = new TreeSet<Support>(); boolean outsideLoopIsFirstPartition = true; for (Entry<Path, List<String>> entry: pathToAliases.entrySet()) { final boolean isFirstPartition = outsideLoopIsFirstPartition; outsideLoopIsFirstPartition = false; Path path = entry.getKey(); List<String> aliases = entry.getValue(); boolean isPresent = (aliases != null && aliases.indexOf(alias) != -1); if (!isPresent) { setOperatorIssue("Alias " + alias + " not present in aliases " + aliases); return new ImmutablePair<Boolean,Boolean>(false, false); } // TODO: should this use getPartitionDescFromPathRecursively? That's what other code uses. PartitionDesc partDesc = pathToPartitionInfo.get(path); if (partDesc.getVectorPartitionDesc() != null) { // We've seen this already. continue; } Set<Support> newSupportSet = new TreeSet<Support>(); final List<TypeInfo> nextDataTypeInfoList; final Deserializer deserializer; final StructObjectInspector partObjectInspector; try { deserializer = partDesc.getDeserializer(hiveConf); partObjectInspector = (StructObjectInspector) deserializer.getObjectInspector(); } catch (Exception e) { throw new SemanticException(e); } if (isFirst) { /* * Determine the data and partition columns using the first partition descriptor's * partition count. In other words, how to split the schema columns -- the * allColumnNameList and allTypeInfoList variables -- into the data and partition columns. */ LinkedHashMap<String, String> partSpec = partDesc.getPartSpec(); if (partSpec != null && partSpec.size() > 0) { partitionColumnCount = partSpec.size(); dataColumnCount = dataAndPartColumnCount - partitionColumnCount; } else { partitionColumnCount = 0; dataColumnCount = dataAndPartColumnCount; } determineDataColumnNums(tableScanOperator, allColumnNameList, dataColumnCount, dataColumnNums); tableDataColumnList = allColumnNameList.subList(0, dataColumnCount); tableDataTypeInfoList = allTypeInfoList.subList(0, dataColumnCount); isFirst = false; } if (Utilities.isInputFileFormatSelfDescribing(partDesc)) { /* * Self-Describing Input Format will convert its data to the table schema. So, there * will be no VectorMapOperator conversion needed. */ nextDataTypeInfoList = tableDataTypeInfoList; } else { String nextDataTypesString = ObjectInspectorUtils.getFieldTypes(partObjectInspector); /* * We convert to an array of TypeInfo using a library routine since it parses the * information and can handle use of different separators, etc. We cannot use the * raw type string for comparison in the map because of the different separators used. */ nextDataTypeInfoList = TypeInfoUtils.getTypeInfosFromTypeString(nextDataTypesString); } // HIVE-20419: Vectorization: Prevent mutation of VectorPartitionDesc after being used in a // hashmap key final boolean isVerifiedVectorPartDesc = verifyAndSetVectorPartDesc( partDesc, isFullAcidTable, allTypeInfoList, inputFileFormatClassNameSet, vectorPartitionDescMap, enabledConditionsMetSet, enabledConditionsNotMetList, newSupportSet, nextDataTypeInfoList); final VectorPartitionDesc vectorPartDesc = partDesc.getVectorPartitionDesc(); if (!isVerifiedVectorPartDesc) { // Always set these so EXPLAIN can see. setValidateInputFormatAndSchemaEvolutionExplain( mapWork, inputFileFormatClassNameSet, vectorPartitionDescMap, enabledConditionsMetSet, enabledConditionsNotMetList); // We consider this an enable issue, not a not vectorized issue. return new ImmutablePair<Boolean,Boolean>(false, true); } handleSupport(isFirstPartition, inputFormatSupportSet, newSupportSet); // We need to get the partition's column names from the partition serde. // (e.g. Avro provides the table schema and ignores the partition schema..). // String nextDataColumnsString = ObjectInspectorUtils.getFieldNames(partObjectInspector); String[] nextDataColumns = nextDataColumnsString.split(","); List<String> nextDataColumnList = Arrays.asList(nextDataColumns); /* * Validate the column names that are present are the same. Missing columns will be * implicitly defaulted to null. */ if (nextDataColumnList.size() > tableDataColumnList.size()) { enabledConditionsNotMetList.add( String.format( "Could not enable vectorization due to " + "partition column names size %d is greater than the number of table column names size %d", nextDataColumnList.size(), tableDataColumnList.size())); // Always set these so EXPLAIN can see. setValidateInputFormatAndSchemaEvolutionExplain( mapWork, inputFileFormatClassNameSet, vectorPartitionDescMap, enabledConditionsMetSet, enabledConditionsNotMetList); return new ImmutablePair<Boolean,Boolean>(false, true); } if (!(deserializer instanceof NullStructSerDe)) { // (Don't insist NullStructSerDe produce correct column names). for (int i = 0; i < nextDataColumnList.size(); i++) { String nextColumnName = nextDataColumnList.get(i); String tableColumnName = tableDataColumnList.get(i); if (!nextColumnName.equals(tableColumnName)) { enabledConditionsNotMetList.add( String.format( "Could not enable vectorization due to " + "partition column name %s does not match table column name %s", nextColumnName, tableColumnName)); // Always set these so EXPLAIN can see. setValidateInputFormatAndSchemaEvolutionExplain( mapWork, inputFileFormatClassNameSet, vectorPartitionDescMap, enabledConditionsMetSet, enabledConditionsNotMetList); return new ImmutablePair<Boolean,Boolean>(false, true); } } } boolean isPartitionRowConversion = false; if (!vectorPartDesc.getIsInputFileFormatSelfDescribing()) { final int nextDataTypeInfoSize = nextDataTypeInfoList.size(); if (nextDataTypeInfoSize > tableDataTypeInfoList.size()) { enabledConditionsNotMetList.add( String.format( "Could not enable vectorization due to " + "partition column types size %d is greater than the number of table column types size %d", nextDataTypeInfoSize, tableDataTypeInfoList.size())); // Always set these so EXPLAIN can see. setValidateInputFormatAndSchemaEvolutionExplain( mapWork, inputFileFormatClassNameSet, vectorPartitionDescMap, enabledConditionsMetSet, enabledConditionsNotMetList); return new ImmutablePair<Boolean,Boolean>(false, true); } for (int i = 0; i < nextDataTypeInfoSize; i++) { TypeInfo tableDataTypeInfo = tableDataTypeInfoList.get(i); TypeInfo nextDataTypeInfo = nextDataTypeInfoList.get(i); // FUTURE: We be more sophisticated in our conversion check. if (!tableDataTypeInfo.equals(nextDataTypeInfo)) { isPartitionRowConversion = true; break; } } } if (isPartitionRowConversion && isLlapIoEnabled) { enabledConditionsNotMetList.add( "Could not enable vectorization. " + "LLAP I/O is enabled wbich automatically deserializes into " + "VECTORIZED_INPUT_FILE_FORMAT. " + "A partition requires data type conversion and that is not supported"); // Always set these so EXPLAIN can see. setValidateInputFormatAndSchemaEvolutionExplain( mapWork, inputFileFormatClassNameSet, vectorPartitionDescMap, enabledConditionsMetSet, enabledConditionsNotMetList); return new ImmutablePair<Boolean,Boolean>(false, true); } } // For now, we don't know which virtual columns are going to be included. We'll add them // later... vectorTaskColumnInfo.setAllColumnNames(allColumnNameList); vectorTaskColumnInfo.setAllTypeInfos(allTypeInfoList); vectorTaskColumnInfo.setDataColumnNums(dataColumnNums); vectorTaskColumnInfo.setPartitionColumnCount(partitionColumnCount); vectorTaskColumnInfo.setAvailableVirtualColumnList(availableVirtualColumnList); vectorTaskColumnInfo.setUseVectorizedInputFileFormat( shouldUseVectorizedInputFormat(inputFileFormatClassNameSet)); vectorTaskColumnInfo.setInputFormatSupportSet(inputFormatSupportSet); // Always set these so EXPLAIN can see. mapWork.setVectorizationInputFileFormatClassNameSet(inputFileFormatClassNameSet); ArrayList<VectorPartitionDesc> vectorPartitionDescList = new ArrayList<VectorPartitionDesc>(); vectorPartitionDescList.addAll(vectorPartitionDescMap.keySet()); mapWork.setVectorPartitionDescList(vectorPartitionDescList); mapWork.setVectorizationEnabledConditionsMet(enabledConditionsMetSet); mapWork.setVectorizationEnabledConditionsNotMet(enabledConditionsNotMetList); return new ImmutablePair<Boolean,Boolean>(true, false); } private void validateAndVectorizeMapWork(MapWork mapWork, VectorTaskColumnInfo vectorTaskColumnInfo, boolean isTezOrSpark) throws SemanticException { //-------------------------------------------------------------------------------------------- LOG.info("Examining input format to see if vectorization is enabled."); ImmutablePair<String,TableScanOperator> onlyOneTableScanPair = verifyOnlyOneTableScanOperator(mapWork); if (onlyOneTableScanPair == null) { VectorizerReason notVectorizedReason = currentBaseWork.getNotVectorizedReason(); Preconditions.checkState(notVectorizedReason != null); mapWork.setVectorizationEnabledConditionsNotMet(Collections.singleton(notVectorizedReason.toString())); return; } String alias = onlyOneTableScanPair.left; TableScanOperator tableScanOperator = onlyOneTableScanPair.right; // This call fills in the column names, types, and partition column count in // vectorTaskColumnInfo. currentOperator = tableScanOperator; ImmutablePair<Boolean, Boolean> validateInputFormatAndSchemaEvolutionPair = validateInputFormatAndSchemaEvolution( mapWork, alias, tableScanOperator, vectorTaskColumnInfo); if (!validateInputFormatAndSchemaEvolutionPair.left) { // Have we already set the enabled conditions not met? if (!validateInputFormatAndSchemaEvolutionPair.right) { VectorizerReason notVectorizedReason = currentBaseWork.getNotVectorizedReason(); Preconditions.checkState(notVectorizedReason != null); mapWork.setVectorizationEnabledConditionsNotMet(Collections.singleton(notVectorizedReason.toString())); } return; } final int dataColumnCount = vectorTaskColumnInfo.allColumnNames.size() - vectorTaskColumnInfo.partitionColumnCount; /* * Take what all input formats support and eliminate any of them not enabled by * the Hive variable. */ List<String> supportRemovedReasons = new ArrayList<String>(); Set<Support> supportSet = new TreeSet<Support>(); if (vectorTaskColumnInfo.inputFormatSupportSet != null) { supportSet.addAll(vectorTaskColumnInfo.inputFormatSupportSet); } // The retainAll method does set intersection. supportSet.retainAll(vectorizedInputFormatSupportEnabledSet); if (!supportSet.equals(vectorTaskColumnInfo.inputFormatSupportSet)) { Set<Support> removedSet = new TreeSet<Support>(); removedSet.addAll(vectorizedInputFormatSupportEnabledSet); removedSet.removeAll(supportSet); String removeString = removedSet.toString() + " is disabled because it is not in " + HiveConf.ConfVars.HIVE_VECTORIZED_INPUT_FORMAT_SUPPORTS_ENABLED.varname + " " + vectorizedInputFormatSupportEnabledSet.toString(); supportRemovedReasons.add(removeString); } // Now rememember what is supported for this query and any support that was // removed. vectorTaskColumnInfo.setSupportSetInUse(supportSet); vectorTaskColumnInfo.setSupportRemovedReasons(supportRemovedReasons); final boolean isSupportDecimal64 = supportSet.contains(Support.DECIMAL_64); List<DataTypePhysicalVariation> dataTypePhysicalVariations = new ArrayList<DataTypePhysicalVariation>(); for (int i = 0; i < dataColumnCount; i++) { DataTypePhysicalVariation dataTypePhysicalVariation = DataTypePhysicalVariation.NONE; if (isSupportDecimal64) { TypeInfo typeInfo = vectorTaskColumnInfo.allTypeInfos.get(i); if (typeInfo instanceof DecimalTypeInfo) { DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; if (HiveDecimalWritable.isPrecisionDecimal64(decimalTypeInfo.precision())) { dataTypePhysicalVariation = DataTypePhysicalVariation.DECIMAL_64; } } } dataTypePhysicalVariations.add(dataTypePhysicalVariation); } // It simplifies things to just add default ones for partitions. for (int i = 0; i < vectorTaskColumnInfo.partitionColumnCount; i++) { dataTypePhysicalVariations.add(DataTypePhysicalVariation.NONE); } vectorTaskColumnInfo.setAlldataTypePhysicalVariations(dataTypePhysicalVariations); // Set global member indicating which virtual columns are possible to be used by // the Map vertex. availableVectorizedVirtualColumnSet = new HashSet<VirtualColumn>(); availableVectorizedVirtualColumnSet.addAll(vectorTaskColumnInfo.availableVirtualColumnList); // And, use set to remember which virtual columns were actually referenced. neededVirtualColumnSet = new HashSet<VirtualColumn>(); mapWork.setVectorizationEnabled(true); LOG.info("Vectorization is enabled for input format(s) " + mapWork.getVectorizationInputFileFormatClassNameSet().toString()); //-------------------------------------------------------------------------------------------- /* * Validate and vectorize the Map operator tree. */ if (!validateAndVectorizeMapOperators(mapWork, tableScanOperator, isTezOrSpark, vectorTaskColumnInfo)) { return; } //-------------------------------------------------------------------------------------------- vectorTaskColumnInfo.transferToBaseWork(mapWork); mapWork.setVectorMode(true); } private boolean validateAndVectorizeMapOperators(MapWork mapWork, TableScanOperator tableScanOperator, boolean isTezOrSpark, VectorTaskColumnInfo vectorTaskColumnInfo) throws SemanticException { LOG.info("Validating and vectorizing MapWork... (vectorizedVertexNum " + vectorizedVertexNum + ")"); // Set "global" member indicating where to store "not vectorized" information if necessary. currentBaseWork = mapWork; if (!validateTableScanOperator(tableScanOperator, mapWork)) { // The "not vectorized" information has been stored in the MapWork vertex. return false; } try { validateAndVectorizeMapOperators(tableScanOperator, isTezOrSpark, vectorTaskColumnInfo); } catch (VectorizerCannotVectorizeException e) { // The "not vectorized" information has been stored in the MapWork vertex. return false; } catch (NullPointerException e) { if (!isTestVectorizerSuppressFatalExceptions) { // Re-throw without losing original stack trace. throw e; } setNodeIssue("exception: " + VectorizationContext.getStackTraceAsSingleLine(e)); return false; } catch (ClassCastException e) { if (!isTestVectorizerSuppressFatalExceptions) { throw e; } setNodeIssue("exception: " + VectorizationContext.getStackTraceAsSingleLine(e)); return false; } catch (RuntimeException e) { if (!isTestVectorizerSuppressFatalExceptions) { throw e; } setNodeIssue("exception: " + VectorizationContext.getStackTraceAsSingleLine(e)); return false; } vectorTaskColumnInfo.setNeededVirtualColumnList( new ArrayList<VirtualColumn>(neededVirtualColumnSet)); /* * The scratch column information was collected by the task VectorizationContext. Go get it. */ VectorizationContext vContext = ((VectorizationContextRegion) tableScanOperator).getOutputVectorizationContext(); vectorTaskColumnInfo.setScratchTypeNameArray( vContext.getScratchColumnTypeNames()); vectorTaskColumnInfo.setScratchdataTypePhysicalVariationsArray( vContext.getScratchDataTypePhysicalVariations()); return true; } private void validateAndVectorizeMapOperators(TableScanOperator tableScanOperator, boolean isTezOrSpark, VectorTaskColumnInfo vectorTaskColumnInfo) throws VectorizerCannotVectorizeException { Operator<? extends OperatorDesc> dummyVectorOperator = validateAndVectorizeOperatorTree(tableScanOperator, false, isTezOrSpark, vectorTaskColumnInfo); // Fixup parent and child relations. List<Operator<? extends OperatorDesc>> vectorChildren = dummyVectorOperator.getChildOperators(); tableScanOperator.setChildOperators(vectorChildren); final int vectorChildCount = vectorChildren.size(); for (int i = 0; i < vectorChildCount; i++) { Operator<? extends OperatorDesc> vectorChild = vectorChildren.get(i); // Replace any occurrence of dummyVectorOperator with our TableScanOperator. List<Operator<? extends OperatorDesc>> vectorChildParents = vectorChild.getParentOperators(); final int vectorChildParentCount = vectorChildParents.size(); for (int p = 0; p < vectorChildParentCount; p++) { Operator<? extends OperatorDesc> vectorChildParent = vectorChildParents.get(p); if (vectorChildParent == dummyVectorOperator) { vectorChildParents.set(p, tableScanOperator); } } } // And, finally, save the VectorizationContext. tableScanOperator.setTaskVectorizationContext( ((VectorizationOperator) dummyVectorOperator).getInputVectorizationContext()); // Modify TableScanOperator in-place so it knows to operate vectorized. vectorizeTableScanOperatorInPlace(tableScanOperator, vectorTaskColumnInfo); } /* * We are "committing" this vertex to be vectorized. */ private void vectorizeTableScanOperatorInPlace(TableScanOperator tableScanOperator, VectorTaskColumnInfo vectorTaskColumnInfo) { TableScanDesc tableScanDesc = tableScanOperator.getConf(); VectorTableScanDesc vectorTableScanDesc = new VectorTableScanDesc(); tableScanDesc.setVectorDesc(vectorTableScanDesc); VectorizationContext vContext = ((VectorizationContextRegion) tableScanOperator).getOutputVectorizationContext(); List<Integer> projectedColumns = vContext.getProjectedColumns(); vectorTableScanDesc.setProjectedColumns( ArrayUtils.toPrimitive(projectedColumns.toArray(new Integer[0]))); List<String> allColumnNameList = vectorTaskColumnInfo.allColumnNames; List<TypeInfo> allTypeInfoList = vectorTaskColumnInfo.allTypeInfos; List<DataTypePhysicalVariation> allDataTypePhysicalVariationList = vectorTaskColumnInfo.allDataTypePhysicalVariations; final int projectedColumnCount = projectedColumns.size(); String[] projectedDataColumnNames = new String[projectedColumnCount]; TypeInfo[] projectedDataColumnTypeInfos = new TypeInfo[projectedColumnCount]; DataTypePhysicalVariation[] projectedDataColumnDataTypePhysicalVariation = new DataTypePhysicalVariation[projectedColumnCount]; for (int i = 0; i < projectedColumnCount; i++) { final int projectedColumnNum = projectedColumns.get(i); projectedDataColumnNames[i] = allColumnNameList.get(projectedColumnNum); projectedDataColumnTypeInfos[i] = allTypeInfoList.get(projectedColumnNum); projectedDataColumnDataTypePhysicalVariation[i] = allDataTypePhysicalVariationList.get(projectedColumnNum); } vectorTableScanDesc.setProjectedColumnNames(projectedDataColumnNames); vectorTableScanDesc.setProjectedColumnTypeInfos(projectedDataColumnTypeInfos); vectorTableScanDesc.setProjectedColumnDataTypePhysicalVariations(projectedDataColumnDataTypePhysicalVariation); tableScanOperator.getConf().setVectorized(true); List<Operator<? extends OperatorDesc>> children = tableScanOperator.getChildOperators(); while (children.size() > 0) { children = dosetVectorDesc(children); } } private List<Operator<? extends OperatorDesc>> dosetVectorDesc( List<Operator<? extends OperatorDesc>> children) { List<Operator<? extends OperatorDesc>> newChildren = new ArrayList<Operator<? extends OperatorDesc>>(); for (Operator<? extends OperatorDesc> child : children) { // Get the vector description from the operator. VectorDesc vectorDesc = ((VectorizationOperator) child).getVectorDesc(); // Save the vector description for the EXPLAIN. AbstractOperatorDesc desc = (AbstractOperatorDesc) child.getConf(); desc.setVectorDesc(vectorDesc); List<Operator<? extends OperatorDesc>> childChildren = child.getChildOperators(); if (childChildren != null) { newChildren.addAll(childChildren); } } return newChildren; } private void convertReduceWork(ReduceWork reduceWork) throws SemanticException { reduceWork.setVectorizationEnabled(true); VectorTaskColumnInfo vectorTaskColumnInfo = new VectorTaskColumnInfo(); vectorTaskColumnInfo.assume(); reduceWork.setVectorizedTestingReducerBatchSize(vectorizedTestingReducerBatchSize); validateAndVectorizeReduceWork(reduceWork, vectorTaskColumnInfo); } private void validateAndVectorizeReduceWork(ReduceWork reduceWork, VectorTaskColumnInfo vectorTaskColumnInfo) throws SemanticException { // Validate input to ReduceWork. if (!getOnlyStructObjectInspectors(reduceWork, vectorTaskColumnInfo)) { return; } //-------------------------------------------------------------------------------------------- /* * Validate and vectorize the Reduce operator tree. */ if (!validateAndVectorizeReduceOperators(reduceWork, vectorTaskColumnInfo)) { return; } //-------------------------------------------------------------------------------------------- vectorTaskColumnInfo.transferToBaseWork(reduceWork); reduceWork.setVectorMode(true); return; } private boolean validateAndVectorizeReduceOperators(ReduceWork reduceWork, VectorTaskColumnInfo vectorTaskColumnInfo) throws SemanticException { LOG.info("Validating and vectorizing ReduceWork... (vectorizedVertexNum " + vectorizedVertexNum + ")"); Operator<? extends OperatorDesc> newVectorReducer; try { newVectorReducer = validateAndVectorizeReduceOperators(reduceWork.getReducer(), vectorTaskColumnInfo); } catch (VectorizerCannotVectorizeException e) { // The "not vectorized" information has been stored in the MapWork vertex. return false; } catch (NullPointerException e) { if (!isTestVectorizerSuppressFatalExceptions) { // Re-throw without losing original stack trace. throw e; } setNodeIssue("exception: " + VectorizationContext.getStackTraceAsSingleLine(e)); return false; } catch (ClassCastException e) { if (!isTestVectorizerSuppressFatalExceptions) { throw e; } setNodeIssue("exception: " + VectorizationContext.getStackTraceAsSingleLine(e)); return false; } catch (RuntimeException e) { if (!isTestVectorizerSuppressFatalExceptions) { throw e; } setNodeIssue("exception: " + VectorizationContext.getStackTraceAsSingleLine(e)); return false; } /* * The scratch column information was collected by the task VectorizationContext. Go get it. */ VectorizationContext vContext = ((VectorizationOperator) newVectorReducer).getInputVectorizationContext(); vectorTaskColumnInfo.setScratchTypeNameArray( vContext.getScratchColumnTypeNames()); vectorTaskColumnInfo.setScratchdataTypePhysicalVariationsArray( vContext.getScratchDataTypePhysicalVariations()); // Replace the reducer with our fully vectorized reduce operator tree. reduceWork.setReducer(newVectorReducer); return true; } private Operator<? extends OperatorDesc> validateAndVectorizeReduceOperators( Operator<? extends OperatorDesc> reducerOperator, VectorTaskColumnInfo vectorTaskColumnInfo) throws VectorizerCannotVectorizeException { Operator<? extends OperatorDesc> dummyOperator = new DummyOperator(); dummyOperator.getChildOperators().add(reducerOperator); Operator<? extends OperatorDesc> dummyVectorOperator = validateAndVectorizeOperatorTree(dummyOperator, true, true, vectorTaskColumnInfo); Operator<? extends OperatorDesc> newVectorReducer = dummyVectorOperator.getChildOperators().get(0); List<Operator<? extends OperatorDesc>> children = new ArrayList<Operator<? extends OperatorDesc>>(); children.add(newVectorReducer); while (children.size() > 0) { children = dosetVectorDesc(children); } return newVectorReducer; } private boolean getOnlyStructObjectInspectors(ReduceWork reduceWork, VectorTaskColumnInfo vectorTaskColumnInfo) throws SemanticException { ArrayList<String> reduceColumnNames = new ArrayList<String>(); ArrayList<TypeInfo> reduceTypeInfos = new ArrayList<TypeInfo>(); ArrayList<DataTypePhysicalVariation> reduceDataTypePhysicalVariations = new ArrayList<DataTypePhysicalVariation>(); if (reduceWork.getNeedsTagging()) { setNodeIssue("Tagging not supported"); return false; } String columnSortOrder; String columnNullOrder; try { TableDesc keyTableDesc = reduceWork.getKeyDesc(); if (LOG.isDebugEnabled()) { LOG.debug("Using reduce tag " + reduceWork.getTag()); } TableDesc valueTableDesc = reduceWork.getTagToValueDesc().get(reduceWork.getTag()); Properties keyTableProperties = keyTableDesc.getProperties(); AbstractSerDe keyDeserializer = ReflectionUtils.newInstance( keyTableDesc.getSerDeClass(), null); keyDeserializer.initialize(null, keyTableProperties, null); ObjectInspector keyObjectInspector = keyDeserializer.getObjectInspector(); if (keyObjectInspector == null) { setNodeIssue("Key object inspector null"); return false; } if (!(keyObjectInspector instanceof StructObjectInspector)) { setNodeIssue("Key object inspector not StructObjectInspector"); return false; } StructObjectInspector keyStructObjectInspector = (StructObjectInspector) keyObjectInspector; List<? extends StructField> keyFields = keyStructObjectInspector.getAllStructFieldRefs(); for (StructField field: keyFields) { reduceColumnNames.add(Utilities.ReduceField.KEY.toString() + "." + field.getFieldName()); reduceTypeInfos.add(TypeInfoUtils.getTypeInfoFromTypeString(field.getFieldObjectInspector().getTypeName())); reduceDataTypePhysicalVariations.add(DataTypePhysicalVariation.NONE); } columnSortOrder = keyTableProperties.getProperty(serdeConstants.SERIALIZATION_SORT_ORDER); columnNullOrder = keyTableProperties.getProperty(serdeConstants.SERIALIZATION_NULL_SORT_ORDER); AbstractSerDe valueDeserializer = ReflectionUtils.newInstance( valueTableDesc.getSerDeClass(), null); valueDeserializer.initialize(null, valueTableDesc.getProperties(), null); ObjectInspector valueObjectInspector = valueDeserializer.getObjectInspector(); if (valueObjectInspector != null) { if (!(valueObjectInspector instanceof StructObjectInspector)) { setNodeIssue("Value object inspector not StructObjectInspector"); return false; } StructObjectInspector valueStructObjectInspector = (StructObjectInspector) valueObjectInspector; List<? extends StructField> valueFields = valueStructObjectInspector.getAllStructFieldRefs(); for (StructField field: valueFields) { reduceColumnNames.add(Utilities.ReduceField.VALUE.toString() + "." + field.getFieldName()); TypeInfo reduceTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString( field.getFieldObjectInspector().getTypeName()); reduceTypeInfos.add(reduceTypeInfo); if (reduceTypeInfo instanceof DecimalTypeInfo && HiveDecimalWritable.isPrecisionDecimal64(((DecimalTypeInfo)reduceTypeInfo).getPrecision())) { reduceDataTypePhysicalVariations.add(DataTypePhysicalVariation.DECIMAL_64); } else { reduceDataTypePhysicalVariations.add(DataTypePhysicalVariation.NONE); } } } } catch (Exception e) { throw new SemanticException(e); } vectorTaskColumnInfo.setAllColumnNames(reduceColumnNames); vectorTaskColumnInfo.setAllTypeInfos(reduceTypeInfos); vectorTaskColumnInfo.setAlldataTypePhysicalVariations(reduceDataTypePhysicalVariations); vectorTaskColumnInfo.setReduceColumnSortOrder(columnSortOrder); vectorTaskColumnInfo.setReduceColumnNullOrder(columnNullOrder); return true; } } @Override public PhysicalContext resolve(PhysicalContext physicalContext) throws SemanticException { hiveConf = physicalContext.getConf(); planMapper = physicalContext.getContext().getPlanMapper(); String vectorizationEnabledOverrideString = HiveConf.getVar(hiveConf, HiveConf.ConfVars.HIVE_TEST_VECTORIZATION_ENABLED_OVERRIDE); vectorizationEnabledOverride = EnabledOverride.nameMap.get(vectorizationEnabledOverrideString); isVectorizationEnabled = HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED); final boolean weCanAttemptVectorization; isTestForcedVectorizationEnable = false; switch (vectorizationEnabledOverride) { case NONE: weCanAttemptVectorization = isVectorizationEnabled; break; case DISABLE: weCanAttemptVectorization = false; break; case ENABLE: weCanAttemptVectorization = true; isTestForcedVectorizationEnable = !isVectorizationEnabled; // Different parts of the code rely on this being set... HiveConf.setBoolVar(hiveConf, HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED, true); isVectorizationEnabled = true; break; default: throw new RuntimeException("Unexpected vectorization enabled override " + vectorizationEnabledOverride); } if (!weCanAttemptVectorization) { LOG.info("Vectorization is disabled"); return physicalContext; } useVectorizedInputFileFormat = HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_VECTORIZATION_USE_VECTORIZED_INPUT_FILE_FORMAT); if(useVectorizedInputFileFormat) { initVectorizedInputFormatExcludeClasses(); } useVectorDeserialize = HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_VECTORIZATION_USE_VECTOR_DESERIALIZE); useRowDeserialize = HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_VECTORIZATION_USE_ROW_DESERIALIZE); if (useRowDeserialize) { initRowDeserializeExcludeClasses(); } // TODO: we could also vectorize some formats based on hive.llap.io.encode.formats if LLAP IO // is enabled and we are going to run in LLAP. However, we don't know if we end up in // LLAP or not at this stage, so don't do this now. We may need to add a 'force' option. isReduceVectorizationEnabled = HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_VECTORIZATION_REDUCE_ENABLED); isPtfVectorizationEnabled = HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_VECTORIZATION_PTF_ENABLED); isVectorizationComplexTypesEnabled = HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_VECTORIZATION_COMPLEX_TYPES_ENABLED); isVectorizationGroupByComplexTypesEnabled = HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_VECTORIZATION_GROUPBY_COMPLEX_TYPES_ENABLED); isVectorizedRowIdentifierEnabled = HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_VECTORIZATION_ROW_IDENTIFIER_ENABLED); vectorizedPTFMaxMemoryBufferingBatchCount = HiveConf.getIntVar(hiveConf, HiveConf.ConfVars.HIVE_VECTORIZATION_PTF_MAX_MEMORY_BUFFERING_BATCH_COUNT); vectorizedTestingReducerBatchSize = HiveConf.getIntVar(hiveConf, HiveConf.ConfVars.HIVE_VECTORIZATION_TESTING_REDUCER_BATCH_SIZE); isTestVectorizerSuppressFatalExceptions = HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_TEST_VECTORIZER_SUPPRESS_FATAL_EXCEPTIONS); vectorizedInputFormatSupportEnabled = HiveConf.getVar(hiveConf, HiveConf.ConfVars.HIVE_VECTORIZED_INPUT_FORMAT_SUPPORTS_ENABLED); String[] supportEnabledStrings = vectorizedInputFormatSupportEnabled.toLowerCase().split(","); vectorizedInputFormatSupportEnabledSet = new TreeSet<Support>(); for (String supportEnabledString : supportEnabledStrings) { Support support = Support.nameToSupportMap.get(supportEnabledString); // Known? if (support != null) { vectorizedInputFormatSupportEnabledSet.add(support); } } /* * Notice the default value for LLAP_IO_ENABLED is overridden to be whether we are * executing under LLAP. */ isLlapIoEnabled = HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.LLAP_IO_ENABLED, LlapProxy.isDaemon()); isSchemaEvolution = HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_SCHEMA_EVOLUTION); hiveVectorAdaptorUsageMode = HiveVectorAdaptorUsageMode.getHiveConfValue(hiveConf); isTestVectorizationSuppressExplainExecutionMode = HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_TEST_VECTORIZATION_SUPPRESS_EXPLAIN_EXECUTION_MODE); // create dispatcher and graph walker SemanticDispatcher disp = new VectorizationDispatcher(); TaskGraphWalker ogw = new TaskGraphWalker(disp); // get all the tasks nodes from root task ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(physicalContext.getRootTasks()); // begin to walk through the task tree. ogw.startWalking(topNodes, null); return physicalContext; } private void initVectorizedInputFormatExcludeClasses() { vectorizedInputFormatExcludes = Utilities.getClassNamesFromConfig(hiveConf, ConfVars.HIVE_VECTORIZATION_VECTORIZED_INPUT_FILE_FORMAT_EXCLUDES); } private void initRowDeserializeExcludeClasses() { rowDeserializeInputFormatExcludes = Utilities.getClassNamesFromConfig(hiveConf, ConfVars.HIVE_VECTORIZATION_ROW_DESERIALIZE_INPUTFORMAT_EXCLUDES); } private void setOperatorNotSupported(Operator<? extends OperatorDesc> op) { OperatorDesc desc = op.getConf(); Annotation note = AnnotationUtils.getAnnotation(desc.getClass(), Explain.class); if (note != null) { Explain explainNote = (Explain) note; setNodeIssue(explainNote.displayName() + " (" + op.getType() + ") not supported"); } else { setNodeIssue("Operator " + op.getType() + " not supported"); } } private boolean validateSMBMapJoinOperator(SMBMapJoinOperator op) { SMBJoinDesc desc = op.getConf(); // Validation is the same as for map join, since the 'small' tables are not vectorized return validateMapJoinDesc(desc); } private boolean validateTableScanOperator(TableScanOperator op, MapWork mWork) { TableScanDesc desc = op.getConf(); if (desc.isGatherStats()) { setOperatorIssue("gather stats not supported"); return false; } return true; } private boolean validateMapJoinOperator(MapJoinOperator op) { MapJoinDesc desc = op.getConf(); return validateMapJoinDesc(desc); } private boolean validateMapJoinDesc(MapJoinDesc desc) { byte posBigTable = (byte) desc.getPosBigTable(); List<ExprNodeDesc> filterExprs = desc.getFilters().get(posBigTable); if (!validateExprNodeDesc( filterExprs, "Filter", VectorExpressionDescriptor.Mode.FILTER, /* allowComplex */ true)) { return false; } List<ExprNodeDesc> keyExprs = desc.getKeys().get(posBigTable); if (!validateExprNodeDescNoComplex(keyExprs, "Key")) { // Vectorization for join keys of complex type is not supported. // https://issues.apache.org/jira/browse/HIVE-24989 return false; } List<ExprNodeDesc> valueExprs = desc.getExprs().get(posBigTable); if (!validateExprNodeDesc(valueExprs, "Value")) { return false; } Byte[] order = desc.getTagOrder(); Byte posSingleVectorMapJoinSmallTable = (order[0] == posBigTable ? order[1] : order[0]); List<ExprNodeDesc> smallTableExprs = desc.getExprs().get(posSingleVectorMapJoinSmallTable); if (!validateExprNodeDesc(smallTableExprs, "Small Table")) { return false; } if (desc.getResidualFilterExprs() != null && !desc.getResidualFilterExprs().isEmpty()) { setOperatorIssue("Non-equi joins not supported"); return false; } return true; } private boolean validateReduceSinkOperator(ReduceSinkOperator op) { List<ExprNodeDesc> keyDescs = op.getConf().getKeyCols(); List<ExprNodeDesc> partitionDescs = op.getConf().getPartitionCols(); List<ExprNodeDesc> valueDesc = op.getConf().getValueCols(); return validateExprNodeDesc(keyDescs, "Key") && validateExprNodeDesc(partitionDescs, "Partition") && validateExprNodeDesc(valueDesc, "Value"); } private boolean validateSelectOperator(SelectOperator op) { List<ExprNodeDesc> descList = op.getConf().getColList(); for (ExprNodeDesc desc : descList) { boolean ret = validateExprNodeDesc( desc, "Select", VectorExpressionDescriptor.Mode.PROJECTION, /* allowComplex */ true, /* allowVoidProjection */ true); if (!ret) { return false; } } return true; } private boolean validateFilterOperator(FilterOperator op) { ExprNodeDesc desc = op.getConf().getPredicate(); return validateExprNodeDesc( desc, "Predicate", VectorExpressionDescriptor.Mode.FILTER, /* allowComplex */ true); } private boolean validateTopNKeyOperator(TopNKeyOperator op) { List<ExprNodeDesc> keyColumns = op.getConf().getKeyColumns(); return validateExprNodeDesc(keyColumns, "Key columns"); } private boolean validateGroupByOperator(GroupByOperator op, boolean isReduce, boolean isTezOrSpark, VectorGroupByDesc vectorGroupByDesc) { GroupByDesc desc = op.getConf(); if (desc.getMode() != GroupByDesc.Mode.HASH && desc.isDistinct()) { setOperatorIssue("DISTINCT not supported"); return false; } boolean ret = validateExprNodeDescNoComplex(desc.getKeys(), "Key"); if (!ret) { return false; } /** * * GROUP BY DEFINITIONS: * * GroupByDesc.Mode enumeration: * * The different modes of a GROUP BY operator. * * These descriptions are hopefully less cryptic than the comments for GroupByDesc.Mode. * * COMPLETE Aggregates original rows into full aggregation row(s). * * If the key length is 0, this is also called Global aggregation and * 1 output row is produced. * * When the key length is > 0, the original rows come in ALREADY GROUPED. * * An example for key length > 0 is a GROUP BY being applied to the * ALREADY GROUPED rows coming from an upstream JOIN operator. Or, * ALREADY GROUPED rows coming from upstream MERGEPARTIAL GROUP BY * operator. * * PARTIAL1 The first of 2 (or more) phases that aggregates ALREADY GROUPED * original rows into partial aggregations. * * Subsequent phases PARTIAL2 (optional) and MERGEPARTIAL will merge * the partial aggregations and output full aggregations. * * PARTIAL2 Accept ALREADY GROUPED partial aggregations and merge them into another * partial aggregation. Output the merged partial aggregations. * * (Haven't seen this one used) * * PARTIALS (Behaves for non-distinct the same as PARTIAL2; and behaves for * distinct the same as PARTIAL1.) * * FINAL Accept ALREADY GROUPED original rows and aggregate them into * full aggregations. * * Example is a GROUP BY being applied to rows from a sorted table, where * the group key is the table sort key (or a prefix). * * HASH Accept UNORDERED original rows and aggregate them into a memory table. * Output the partial aggregations on closeOp (or low memory). * * Similar to PARTIAL1 except original rows are UNORDERED. * * Commonly used in both Mapper and Reducer nodes. Always followed by * a Reducer with MERGEPARTIAL GROUP BY. * * MERGEPARTIAL Always first operator of a Reducer. Data is grouped by reduce-shuffle. * * (Behaves for non-distinct aggregations the same as FINAL; and behaves * for distinct aggregations the same as COMPLETE.) * * The output is full aggregation(s). * * Used in Reducers after a stage with a HASH GROUP BY operator. * * * VectorGroupByDesc.ProcessingMode for VectorGroupByOperator: * * GLOBAL No key. All rows --> 1 full aggregation on end of input * * HASH Rows aggregated in to hash table on group key --> * 1 partial aggregation per key (normally, unless there is spilling) * * MERGE_PARTIAL As first operator in a REDUCER, partial aggregations come grouped from * reduce-shuffle --> * aggregate the partial aggregations and emit full aggregation on * endGroup / closeOp * * STREAMING Rows come from PARENT operator ALREADY GROUPED --> * aggregate the rows and emit full aggregation on key change / closeOp * * NOTE: Hash can spill partial result rows prematurely if it runs low on memory. * NOTE: Streaming has to compare keys where MergePartial gets an endGroup call. * * * DECIDER: Which VectorGroupByDesc.ProcessingMode for VectorGroupByOperator? * * Decides using GroupByDesc.Mode and whether there are keys with the * VectorGroupByDesc.groupByDescModeToVectorProcessingMode method. * * Mode.COMPLETE --> (numKeys == 0 ? ProcessingMode.GLOBAL : ProcessingMode.STREAMING) * * Mode.HASH --> ProcessingMode.HASH * * Mode.MERGEPARTIAL --> (numKeys == 0 ? ProcessingMode.GLOBAL : ProcessingMode.MERGE_PARTIAL) * * Mode.PARTIAL1, * Mode.PARTIAL2, * Mode.PARTIALS, * Mode.FINAL --> ProcessingMode.STREAMING * */ boolean hasKeys = (desc.getKeys().size() > 0); ProcessingMode processingMode = VectorGroupByDesc.groupByDescModeToVectorProcessingMode(desc.getMode(), hasKeys); if (desc.isGroupingSetsPresent() && (processingMode != ProcessingMode.HASH && processingMode != ProcessingMode.STREAMING)) { setOperatorIssue("Vectorized GROUPING SETS only expected for HASH and STREAMING processing modes"); return false; } //TODO: isGroupingSetsPresent() is returning false, even though // ListGroupingSets is present. Need to check if there is hidden bug. boolean isGroupingSetsPresent = (desc.getListGroupingSets() != null && !desc.getListGroupingSets().isEmpty()); if (!validateAggregationDescs(desc.getAggregators(), desc.getMode(), isGroupingSetsPresent, hasKeys)) { return false; } vectorGroupByDesc.setProcessingMode(processingMode); vectorGroupByDesc.setIsVectorizationComplexTypesEnabled(isVectorizationComplexTypesEnabled); vectorGroupByDesc.setIsVectorizationGroupByComplexTypesEnabled(isVectorizationGroupByComplexTypesEnabled); LOG.info("Vector GROUP BY operator will use processing mode " + processingMode.name()); return true; } private boolean validateFileSinkOperator(FileSinkOperator op) { return true; } /* * Determine recursively if the PTF LEAD or LAG function is being used in an expression. */ private boolean containsLeadLag(ExprNodeDesc exprNodeDesc) { if (exprNodeDesc instanceof ExprNodeGenericFuncDesc) { ExprNodeGenericFuncDesc genericFuncDesc = (ExprNodeGenericFuncDesc) exprNodeDesc; GenericUDF genFuncClass = genericFuncDesc.getGenericUDF(); if (genFuncClass instanceof GenericUDFLag || genFuncClass instanceof GenericUDFLead) { return true; } return containsLeadLag(genericFuncDesc.getChildren()); } else { // ExprNodeColumnDesc, ExprNodeConstantDesc, ExprNodeDynamicValueDesc, etc do not have // LEAD/LAG inside. return false; } } private boolean containsLeadLag(List<ExprNodeDesc> exprNodeDescList) { for (ExprNodeDesc exprNodeDesc : exprNodeDescList) { if (containsLeadLag(exprNodeDesc)) { return true; } } return false; } private boolean validatePTFOperator(PTFOperator op, VectorizationContext vContext, VectorPTFDesc vectorPTFDesc) throws HiveException { if (!isPtfVectorizationEnabled) { setNodeIssue("Vectorization of PTF is not enabled (" + HiveConf.ConfVars.HIVE_VECTORIZATION_PTF_ENABLED.varname + " IS false)"); return false; } PTFDesc ptfDesc = op.getConf(); boolean isMapSide = ptfDesc.isMapSide(); if (isMapSide) { setOperatorIssue("PTF Mapper not supported"); return false; } List<Operator<? extends OperatorDesc>> ptfParents = op.getParentOperators(); if (ptfParents != null && ptfParents.size() > 0) { Operator<? extends OperatorDesc> ptfParent = op.getParentOperators().get(0); if (!(ptfParent instanceof ReduceSinkOperator)) { boolean isReduceShufflePtf = false; if (ptfParent instanceof SelectOperator) { ptfParents = ptfParent.getParentOperators(); if (ptfParents == null || ptfParents.size() == 0) { isReduceShufflePtf = true; } else { ptfParent = ptfParent.getParentOperators().get(0); isReduceShufflePtf = (ptfParent instanceof ReduceSinkOperator); } } if (!isReduceShufflePtf) { setOperatorIssue("Only PTF directly under reduce-shuffle is supported"); return false; } } } boolean forNoop = ptfDesc.forNoop(); if (forNoop) { setOperatorIssue("NOOP not supported"); return false; } boolean forWindowing = ptfDesc.forWindowing(); if (!forWindowing) { setOperatorIssue("Windowing required"); return false; } PartitionedTableFunctionDef funcDef = ptfDesc.getFuncDef(); boolean isWindowTableFunctionDef = (funcDef instanceof WindowTableFunctionDef); if (!isWindowTableFunctionDef) { setOperatorIssue("Must be a WindowTableFunctionDef"); return false; } // We collect information in VectorPTFDesc that doesn't need the VectorizationContext. // We use this information for validation. Later when creating the vector operator // we create an additional object VectorPTFInfo. try { createVectorPTFDesc( op, ptfDesc, vContext, vectorPTFDesc, vectorizedPTFMaxMemoryBufferingBatchCount); } catch (HiveException e) { setOperatorIssue("exception: " + VectorizationContext.getStackTraceAsSingleLine(e)); return false; } // Output columns ok? String[] outputColumnNames = vectorPTFDesc.getOutputColumnNames(); TypeInfo[] outputTypeInfos = vectorPTFDesc.getOutputTypeInfos(); final int outputCount = outputColumnNames.length; for (int i = 0; i < outputCount; i++) { String typeName = outputTypeInfos[i].getTypeName(); boolean ret = validateDataType(typeName, VectorExpressionDescriptor.Mode.PROJECTION, /* allowComplex */ false); if (!ret) { setExpressionIssue("PTF Output Columns", "Data type " + typeName + " of column " + outputColumnNames[i] + " not supported"); return false; } } boolean[] distinctEvaluator = vectorPTFDesc.getEvaluatorsAreDistinct(); String[] evaluatorFunctionNames = vectorPTFDesc.getEvaluatorFunctionNames(); final int count = evaluatorFunctionNames.length; WindowFrameDef[] evaluatorWindowFrameDefs = vectorPTFDesc.getEvaluatorWindowFrameDefs(); List<ExprNodeDesc>[] evaluatorInputExprNodeDescLists = vectorPTFDesc.getEvaluatorInputExprNodeDescLists(); for (int i = 0; i < count; i++) { String functionName = evaluatorFunctionNames[i]; SupportedFunctionType supportedFunctionType = VectorPTFDesc.supportedFunctionsMap.get(functionName); if (supportedFunctionType == null) { setOperatorIssue(functionName + " not in supported functions " + VectorPTFDesc.supportedFunctionNames); return false; } if (distinctEvaluator[i] && !supportedFunctionType.isSupportDistinct()) { setOperatorIssue(functionName + " distinct is not supported "); return false; } WindowFrameDef windowFrameDef = evaluatorWindowFrameDefs[i]; List<ExprNodeDesc> exprNodeDescList = evaluatorInputExprNodeDescLists[i]; final boolean isSingleParameter = (exprNodeDescList != null && exprNodeDescList.size() == 1); final ExprNodeDesc singleExprNodeDesc = (isSingleParameter ? exprNodeDescList.get(0) : null); final TypeInfo singleTypeInfo = (isSingleParameter ? singleExprNodeDesc.getTypeInfo() : null); final PrimitiveCategory singlePrimitiveCategory = (singleTypeInfo instanceof PrimitiveTypeInfo ? ((PrimitiveTypeInfo) singleTypeInfo).getPrimitiveCategory() : null); switch (windowFrameDef.getWindowType()) { case RANGE: if (!windowFrameDef.getEnd().isCurrentRow()) { setOperatorIssue(functionName + " only CURRENT ROW end frame is supported for RANGE"); return false; } break; case ROWS: { boolean isRowEndCurrent = (windowFrameDef.getEnd().isCurrentRow() && (supportedFunctionType == SupportedFunctionType.AVG || supportedFunctionType == SupportedFunctionType.MAX || supportedFunctionType == SupportedFunctionType.MIN || supportedFunctionType == SupportedFunctionType.SUM) && isSingleParameter && singlePrimitiveCategory != null); if (!isRowEndCurrent && !windowFrameDef.isEndUnbounded()) { setOperatorIssue( functionName + " UNBOUNDED end frame is required for ROWS window type"); return false; } } break; default: throw new RuntimeException("Unexpected window type " + windowFrameDef.getWindowType()); } // RANK/DENSE_RANK don't care about columns. if (supportedFunctionType != SupportedFunctionType.RANK && supportedFunctionType != SupportedFunctionType.DENSE_RANK) { if (exprNodeDescList != null) { if (exprNodeDescList.size() > 1) { setOperatorIssue("More than 1 argument expression of aggregation function " + functionName); return false; } ExprNodeDesc exprNodeDesc = exprNodeDescList.get(0); if (containsLeadLag(exprNodeDesc)) { setOperatorIssue("lead and lag function not supported in argument expression of aggregation function " + functionName); return false; } if (supportedFunctionType != SupportedFunctionType.COUNT) { // COUNT does not care about column types. The rest do. TypeInfo typeInfo = exprNodeDesc.getTypeInfo(); Category category = typeInfo.getCategory(); boolean isSupportedType; if (category != Category.PRIMITIVE) { isSupportedType = false; } else { ColumnVector.Type colVecType = VectorizationContext.getColumnVectorTypeFromTypeInfo(typeInfo); switch (colVecType) { case LONG: case DOUBLE: case DECIMAL: isSupportedType = true; break; default: isSupportedType = false; break; } } if (!isSupportedType) { setOperatorIssue(typeInfo.getTypeName() + " data type not supported in argument expression of aggregation function " + functionName); return false; } } } } if (vectorPTFDesc.getOrderExprNodeDescs().length > 1) { /* * Currently, we need to rule out here all cases where a range boundary scanner can run, * basically: 1. bounded start 2. bounded end which is not current row */ if (windowFrameDef.getWindowType() == WindowType.RANGE && (!windowFrameDef.isStartUnbounded() || !(windowFrameDef.getEnd().isCurrentRow() || windowFrameDef.isEndUnbounded()))) { setOperatorIssue( "Multi-column ordered RANGE boundary scanner is not supported in vectorized mode (window: " + windowFrameDef + ")"); return false; } } } return true; } private boolean validateExprNodeDesc(List<ExprNodeDesc> descs, String expressionTitle) { return validateExprNodeDesc( descs, expressionTitle, VectorExpressionDescriptor.Mode.PROJECTION, /* allowComplex */ true); } private boolean validateExprNodeDescNoComplex(List<ExprNodeDesc> descs, String expressionTitle) { return validateExprNodeDesc( descs, expressionTitle, VectorExpressionDescriptor.Mode.PROJECTION, /* allowComplex */ false); } private boolean validateExprNodeDesc(List<ExprNodeDesc> descs, String expressionTitle, VectorExpressionDescriptor.Mode mode, boolean allowComplex) { for (ExprNodeDesc d : descs) { boolean ret = validateExprNodeDesc(d, expressionTitle, mode, allowComplex); if (!ret) { return false; } } return true; } private boolean validateAggregationDescs(List<AggregationDesc> descs, GroupByDesc.Mode groupByMode, boolean isGroupingSetsPresent, boolean hasKeys) { for (AggregationDesc d : descs) { if (!validateAggregationDesc(d, groupByMode, isGroupingSetsPresent, hasKeys)) { return false; } } return true; } private boolean validateExprNodeDescRecursive(ExprNodeDesc desc, String expressionTitle, VectorExpressionDescriptor.Mode mode, boolean allowComplex) { return validateExprNodeDescRecursive(desc, expressionTitle, mode, allowComplex, /* allowVoidProjection */ false); } private boolean validateExprNodeDescRecursive(ExprNodeDesc desc, String expressionTitle, VectorExpressionDescriptor.Mode mode, boolean allowComplex, boolean allowVoidProjection) { if (desc instanceof ExprNodeColumnDesc) { ExprNodeColumnDesc c = (ExprNodeColumnDesc) desc; String columnName = c.getColumn(); if (availableVectorizedVirtualColumnSet != null) { // For Map, check for virtual columns. VirtualColumn virtualColumn = VirtualColumn.VIRTUAL_COLUMN_NAME_MAP.get(columnName); if (virtualColumn != null) { // We support some virtual columns in vectorization for this table scan. if (!availableVectorizedVirtualColumnSet.contains(virtualColumn)) { setExpressionIssue(expressionTitle, "Virtual column " + columnName + " is not supported"); return false; } // Remember we used this one in the query. neededVirtualColumnSet.add(virtualColumn); } } } String typeName = desc.getTypeInfo().getTypeName(); boolean ret = validateDataType( typeName, mode, allowComplex && isVectorizationComplexTypesEnabled, allowVoidProjection); if (!ret) { setExpressionIssue(expressionTitle, getValidateDataTypeErrorMsg( typeName, mode, allowComplex, isVectorizationComplexTypesEnabled)); return false; } boolean isInExpression = false; if (desc instanceof ExprNodeGenericFuncDesc) { ExprNodeGenericFuncDesc d = (ExprNodeGenericFuncDesc) desc; boolean r = validateGenericUdf(d); if (!r) { setExpressionIssue(expressionTitle, "UDF " + d + " not supported"); return false; } GenericUDF genericUDF = d.getGenericUDF(); isInExpression = (genericUDF instanceof GenericUDFIn); } if (desc.getChildren() != null) { if (isInExpression && desc.getChildren().get(0).getTypeInfo().getCategory() == Category.STRUCT) { // Don't restrict child expressions for projection. // Always use loose FILTER mode. if (!validateStructInExpression( desc, expressionTitle, VectorExpressionDescriptor.Mode.FILTER)) { return false; } } else { for (ExprNodeDesc d : desc.getChildren()) { // Don't restrict child expressions for projection. // Always use loose FILTER mode. if (!validateExprNodeDescRecursive( d, expressionTitle, VectorExpressionDescriptor.Mode.FILTER, /* allowComplex */ true, allowVoidProjection)) { return false; } } } } return true; } private boolean validateStructInExpression(ExprNodeDesc desc, String expressionTitle, VectorExpressionDescriptor.Mode mode) { for (ExprNodeDesc d : desc.getChildren()) { TypeInfo typeInfo = d.getTypeInfo(); if (typeInfo.getCategory() != Category.STRUCT) { return false; } StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo; List<TypeInfo> fieldTypeInfos = structTypeInfo.getAllStructFieldTypeInfos(); List<String> fieldNames = structTypeInfo.getAllStructFieldNames(); final int fieldCount = fieldTypeInfos.size(); for (int f = 0; f < fieldCount; f++) { TypeInfo fieldTypeInfo = fieldTypeInfos.get(f); Category category = fieldTypeInfo.getCategory(); if (category != Category.PRIMITIVE) { setExpressionIssue(expressionTitle, "Cannot vectorize struct field " + fieldNames.get(f) + " of type " + fieldTypeInfo.getTypeName()); return false; } PrimitiveTypeInfo fieldPrimitiveTypeInfo = (PrimitiveTypeInfo) fieldTypeInfo; InConstantType inConstantType = VectorizationContext .getInConstantTypeFromPrimitiveCategory(fieldPrimitiveTypeInfo .getPrimitiveCategory()); // For now, limit the data types we support for Vectorized Struct IN(). if (inConstantType != InConstantType.INT_FAMILY && inConstantType != InConstantType.FLOAT_FAMILY && inConstantType != InConstantType.STRING_FAMILY) { setExpressionIssue(expressionTitle, "Cannot vectorize struct field " + fieldNames.get(f) + " of type " + fieldTypeInfo.getTypeName()); return false; } } } return true; } private boolean validateExprNodeDesc(ExprNodeDesc desc, String expressionTitle) { return validateExprNodeDesc( desc, expressionTitle, VectorExpressionDescriptor.Mode.PROJECTION, /* allowComplex */ true, /* allowVoidProjection */ false); } boolean validateExprNodeDesc(ExprNodeDesc desc, String expressionTitle, VectorExpressionDescriptor.Mode mode, boolean allowComplex) { return validateExprNodeDescRecursive(desc, expressionTitle, mode, allowComplex); } boolean validateExprNodeDesc(ExprNodeDesc desc, String expressionTitle, VectorExpressionDescriptor.Mode mode, boolean allowComplex, boolean allowVoidProjection) { return validateExprNodeDescRecursive( desc, expressionTitle, mode, allowComplex, allowVoidProjection); } private boolean validateGenericUdf(ExprNodeGenericFuncDesc genericUDFExpr) { if (VectorizationContext.isCustomUDF(genericUDFExpr)) { return true; } if (hiveVectorAdaptorUsageMode == HiveVectorAdaptorUsageMode.NONE || hiveVectorAdaptorUsageMode == HiveVectorAdaptorUsageMode.CHOSEN) { GenericUDF genericUDF = genericUDFExpr.getGenericUDF(); if (genericUDF instanceof GenericUDFBridge) { Class<? extends UDF> udf = ((GenericUDFBridge) genericUDF).getUdfClass(); return supportedGenericUDFs.contains(udf); } else { return supportedGenericUDFs.contains(genericUDF.getClass()); } } return true; } private boolean validateAggregationDesc(AggregationDesc aggDesc, GroupByDesc.Mode groupByMode, boolean isGroupingSetsPresent, boolean hasKeys) { String udfName = aggDesc.getGenericUDAFName().toLowerCase(); if (!supportedAggregationUdfs.contains(udfName)) { setExpressionIssue("Aggregation Function", "UDF " + udfName + " not supported"); return false; } // The planner seems to pull this one out. if (groupByMode != GroupByDesc.Mode.HASH && aggDesc.getDistinct()) { setExpressionIssue("Aggregation Function", "DISTINCT not supported"); return false; } if (isGroupingSetsPresent && aggDesc.getDistinct()) { setExpressionIssue("Aggregation Function", "DISTINCT with Groupingsets not supported"); return false; } List<ExprNodeDesc> parameters = aggDesc.getParameters(); if (parameters != null && !validateExprNodeDesc(parameters, "Aggregation Function UDF " + udfName + " parameter")) { return false; } return true; } public static boolean validateDataType(String type, VectorExpressionDescriptor.Mode mode, boolean allowComplex) { return validateDataType(type, mode, allowComplex, /* allowVoidProjection */ false); } public static boolean validateDataType(String type, VectorExpressionDescriptor.Mode mode, boolean allowComplex, boolean allowVoidProjection) { type = type.toLowerCase(); boolean result = supportedDataTypesPattern.matcher(type).matches(); if (result && !allowVoidProjection && mode == VectorExpressionDescriptor.Mode.PROJECTION && type.equals("void")) { return false; } if (!result) { TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(type); if (typeInfo.getCategory() != Category.PRIMITIVE) { if (allowComplex) { return true; } } } return result; } public static String getValidateDataTypeErrorMsg(String type, VectorExpressionDescriptor.Mode mode, boolean allowComplex, boolean isVectorizationComplexTypesEnabled) { return getValidateDataTypeErrorMsg( type, mode, allowComplex, isVectorizationComplexTypesEnabled, false); } public static String getValidateDataTypeErrorMsg(String type, VectorExpressionDescriptor.Mode mode, boolean allowComplex, boolean isVectorizationComplexTypesEnabled, boolean allowVoidProjection) { type = type.toLowerCase(); boolean result = supportedDataTypesPattern.matcher(type).matches(); if (result && !allowVoidProjection && mode == VectorExpressionDescriptor.Mode.PROJECTION && type.equals("void")) { return "Vectorizing data type void not supported when mode = PROJECTION"; } if (!result) { TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(type); if (typeInfo.getCategory() != Category.PRIMITIVE) { if (allowComplex && isVectorizationComplexTypesEnabled) { return null; } else if (!allowComplex) { return "Vectorizing complex type " + typeInfo.getCategory() + " not supported"; } else { return "Vectorizing complex type " + typeInfo.getCategory() + " not enabled (" + type + ") since " + GroupByDesc.getComplexTypeEnabledCondition( isVectorizationComplexTypesEnabled); } } } return (result ? null : "Vectorizing data type " + type + " not supported"); } private void fixupParentChildOperators(Operator<? extends OperatorDesc> op, Operator<? extends OperatorDesc> vectorOp) { if (op.getParentOperators() != null) { vectorOp.setParentOperators(op.getParentOperators()); for (Operator<? extends OperatorDesc> p : op.getParentOperators()) { p.replaceChild(op, vectorOp); } } if (op.getChildOperators() != null) { vectorOp.setChildOperators(op.getChildOperators()); for (Operator<? extends OperatorDesc> c : op.getChildOperators()) { c.replaceParent(op, vectorOp); } } } private boolean isBigTableOnlyResults(MapJoinDesc desc) { Byte[] order = desc.getTagOrder(); byte posBigTable = (byte) desc.getPosBigTable(); Byte posSingleVectorMapJoinSmallTable = (order[0] == posBigTable ? order[1] : order[0]); int[] smallTableIndices; int smallTableIndicesSize; if (desc.getValueIndices() != null && desc.getValueIndices().get(posSingleVectorMapJoinSmallTable) != null) { smallTableIndices = desc.getValueIndices().get(posSingleVectorMapJoinSmallTable); LOG.info("Vectorizer isBigTableOnlyResults smallTableIndices " + Arrays.toString(smallTableIndices)); smallTableIndicesSize = smallTableIndices.length; } else { smallTableIndices = null; LOG.info("Vectorizer isBigTableOnlyResults smallTableIndices EMPTY"); smallTableIndicesSize = 0; } List<Integer> smallTableRetainList = desc.getRetainList().get(posSingleVectorMapJoinSmallTable); LOG.info("Vectorizer isBigTableOnlyResults smallTableRetainList " + smallTableRetainList); int smallTableRetainSize = smallTableRetainList.size(); if (smallTableIndicesSize > 0) { // Small table indices has priority over retain. for (int i = 0; i < smallTableIndicesSize; i++) { if (smallTableIndices[i] < 0) { // Negative numbers indicate a column to be (deserialize) read from the small table's // LazyBinary value row. setOperatorIssue("Vectorizer isBigTableOnlyResults smallTableIndices[i] < 0 returning false"); return false; } } } else if (smallTableRetainSize > 0) { setOperatorIssue("Vectorizer isBigTableOnlyResults smallTableRetainSize > 0 returning false"); return false; } LOG.info("Vectorizer isBigTableOnlyResults returning true"); return true; } Operator<? extends OperatorDesc> specializeMapJoinOperator(Operator<? extends OperatorDesc> op, VectorizationContext vContext, MapJoinDesc desc, VectorMapJoinDesc vectorDesc) throws HiveException { Operator<? extends OperatorDesc> vectorOp = null; Class<? extends Operator<?>> opClass = null; VectorMapJoinInfo vectorMapJoinInfo = vectorDesc.getVectorMapJoinInfo(); HashTableImplementationType hashTableImplementationType = HashTableImplementationType.NONE; HashTableKind hashTableKind = HashTableKind.NONE; HashTableKeyType hashTableKeyType = HashTableKeyType.NONE; VectorMapJoinVariation vectorMapJoinVariation = null; if (vectorDesc.getIsFastHashTableEnabled()) { hashTableImplementationType = HashTableImplementationType.FAST; } else { hashTableImplementationType = HashTableImplementationType.OPTIMIZED; } int joinType = desc.getConds()[0].getType(); boolean isInnerBigOnly = false; if (joinType == JoinDesc.INNER_JOIN && isBigTableOnlyResults(desc)) { isInnerBigOnly = true; } // By default, we can always use the multi-key class. hashTableKeyType = HashTableKeyType.MULTI_KEY; if (!HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_VECTORIZATION_MAPJOIN_NATIVE_MULTIKEY_ONLY_ENABLED)) { // Look for single column optimization. byte posBigTable = (byte) desc.getPosBigTable(); Map<Byte, List<ExprNodeDesc>> keyExprs = desc.getKeys(); List<ExprNodeDesc> bigTableKeyExprs = keyExprs.get(posBigTable); if (bigTableKeyExprs.size() == 1) { TypeInfo typeInfo = bigTableKeyExprs.get(0).getTypeInfo(); LOG.info("Vectorizer vectorizeOperator map join typeName " + typeInfo.getTypeName()); switch (((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) { case BOOLEAN: hashTableKeyType = HashTableKeyType.BOOLEAN; break; case BYTE: hashTableKeyType = HashTableKeyType.BYTE; break; case SHORT: hashTableKeyType = HashTableKeyType.SHORT; break; case INT: hashTableKeyType = HashTableKeyType.INT; break; case DATE: hashTableKeyType = HashTableKeyType.DATE; break; case LONG: hashTableKeyType = HashTableKeyType.LONG; break; case STRING: case CHAR: case VARCHAR: case BINARY: hashTableKeyType = HashTableKeyType.STRING; default: // Stay with multi-key. } } } switch (joinType) { case JoinDesc.INNER_JOIN: if (!isInnerBigOnly) { vectorMapJoinVariation = VectorMapJoinVariation.INNER; hashTableKind = HashTableKind.HASH_MAP; } else { vectorMapJoinVariation = VectorMapJoinVariation.INNER_BIG_ONLY; hashTableKind = HashTableKind.HASH_MULTISET; } break; case JoinDesc.LEFT_OUTER_JOIN: case JoinDesc.RIGHT_OUTER_JOIN: vectorMapJoinVariation = VectorMapJoinVariation.OUTER; hashTableKind = HashTableKind.HASH_MAP; break; case JoinDesc.FULL_OUTER_JOIN: vectorMapJoinVariation = VectorMapJoinVariation.FULL_OUTER; hashTableKind = HashTableKind.HASH_MAP; break; case JoinDesc.LEFT_SEMI_JOIN: vectorMapJoinVariation = VectorMapJoinVariation.LEFT_SEMI; hashTableKind = HashTableKind.HASH_SET; break; case JoinDesc.ANTI_JOIN: vectorMapJoinVariation = VectorMapJoinVariation.LEFT_ANTI; hashTableKind = HashTableKind.HASH_SET; break; default: throw new HiveException("Unknown join type " + joinType); } LOG.info("Vectorizer vectorizeOperator map join hashTableKind " + hashTableKind.name() + " hashTableKeyType " + hashTableKeyType.name()); switch (hashTableKeyType) { case BOOLEAN: case BYTE: case SHORT: case INT: case DATE: case LONG: switch (vectorMapJoinVariation) { case INNER: opClass = VectorMapJoinInnerLongOperator.class; break; case INNER_BIG_ONLY: opClass = VectorMapJoinInnerBigOnlyLongOperator.class; break; case LEFT_SEMI: opClass = VectorMapJoinLeftSemiLongOperator.class; break; case LEFT_ANTI: opClass = VectorMapJoinAntiJoinLongOperator.class; break; case OUTER: opClass = VectorMapJoinOuterLongOperator.class; break; case FULL_OUTER: opClass = VectorMapJoinFullOuterLongOperator.class; break; default: throw new HiveException("Unknown operator variation " + vectorMapJoinVariation); } break; case STRING: switch (vectorMapJoinVariation) { case INNER: opClass = VectorMapJoinInnerStringOperator.class; break; case INNER_BIG_ONLY: opClass = VectorMapJoinInnerBigOnlyStringOperator.class; break; case LEFT_SEMI: opClass = VectorMapJoinLeftSemiStringOperator.class; break; case LEFT_ANTI: opClass = VectorMapJoinAntiJoinStringOperator.class; break; case OUTER: opClass = VectorMapJoinOuterStringOperator.class; break; case FULL_OUTER: opClass = VectorMapJoinFullOuterStringOperator.class; break; default: throw new HiveException("Unknown operator variation " + vectorMapJoinVariation); } break; case MULTI_KEY: switch (vectorMapJoinVariation) { case INNER: opClass = VectorMapJoinInnerMultiKeyOperator.class; break; case INNER_BIG_ONLY: opClass = VectorMapJoinInnerBigOnlyMultiKeyOperator.class; break; case LEFT_SEMI: opClass = VectorMapJoinLeftSemiMultiKeyOperator.class; break; case LEFT_ANTI: opClass = VectorMapJoinAntiJoinMultiKeyOperator.class; break; case OUTER: opClass = VectorMapJoinOuterMultiKeyOperator.class; break; case FULL_OUTER: opClass = VectorMapJoinFullOuterMultiKeyOperator.class; break; default: throw new HiveException("Unknown operator variation " + vectorMapJoinVariation); } break; default: throw new RuntimeException("Unexpected hash table key type " + hashTableKeyType.name()); } boolean minMaxEnabled = HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_VECTORIZATION_MAPJOIN_NATIVE_MINMAX_ENABLED); vectorDesc.setHashTableImplementationType(hashTableImplementationType); vectorDesc.setHashTableKind(hashTableKind); vectorDesc.setHashTableKeyType(hashTableKeyType); vectorDesc.setVectorMapJoinVariation(vectorMapJoinVariation); if (vectorMapJoinVariation == VectorMapJoinVariation.FULL_OUTER) { vectorDesc.setIsFullOuter(true); } vectorDesc.setMinMaxEnabled(minMaxEnabled); vectorDesc.setVectorMapJoinInfo(vectorMapJoinInfo); vectorOp = OperatorFactory.getVectorOperator( opClass, op.getCompilationOpContext(), op.getConf(), vContext, vectorDesc); LOG.info("Vectorizer vectorizeOperator map join class " + vectorOp.getClass().getSimpleName()); return vectorOp; } public static boolean onExpressionHasNullSafes(MapJoinDesc desc) { boolean[] nullSafes = desc.getNullSafes(); if (nullSafes == null) { return false; } for (boolean nullSafe : nullSafes) { if (nullSafe) { return true; } } return false; } private boolean canSpecializeMapJoin(Operator<? extends OperatorDesc> op, MapJoinDesc desc, boolean isTezOrSpark, VectorizationContext vContext, VectorMapJoinDesc vectorDesc) throws HiveException { Preconditions.checkState(op instanceof MapJoinOperator); VectorMapJoinInfo vectorMapJoinInfo = new VectorMapJoinInfo(); boolean isVectorizationMapJoinNativeEnabled = HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_VECTORIZATION_MAPJOIN_NATIVE_ENABLED); String engine = HiveConf.getVar(hiveConf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE); boolean oneMapJoinCondition = (desc.getConds().length == 1); boolean hasNullSafes = onExpressionHasNullSafes(desc); byte posBigTable = (byte) desc.getPosBigTable(); // Since we want to display all the met and not met conditions in EXPLAIN, we determine all // information first.... List<ExprNodeDesc> keyDesc = desc.getKeys().get(posBigTable); boolean outerJoinHasNoKeys = (!desc.isNoOuterJoin() && keyDesc.size() == 0); // For now, we don't support joins on or using DECIMAL_64. VectorExpression[] allBigTableKeyExpressions = vContext.getVectorExpressionsUpConvertDecimal64(keyDesc); final int allBigTableKeyExpressionsLength = allBigTableKeyExpressions.length; boolean supportsKeyTypes = true; // Assume. HashSet<String> notSupportedKeyTypes = new HashSet<String>(); // Since a key expression can be a calculation and the key will go into a scratch column, // we need the mapping and type information. int[] bigTableKeyColumnMap = new int[allBigTableKeyExpressionsLength]; String[] bigTableKeyColumnNames = new String[allBigTableKeyExpressionsLength]; TypeInfo[] bigTableKeyTypeInfos = new TypeInfo[allBigTableKeyExpressionsLength]; ArrayList<VectorExpression> bigTableKeyExpressionsList = new ArrayList<VectorExpression>(); VectorExpression[] slimmedBigTableKeyExpressions; for (int i = 0; i < allBigTableKeyExpressionsLength; i++) { VectorExpression ve = allBigTableKeyExpressions[i]; if (!IdentityExpression.isColumnOnly(ve)) { bigTableKeyExpressionsList.add(ve); } bigTableKeyColumnMap[i] = ve.getOutputColumnNum(); ExprNodeDesc exprNode = keyDesc.get(i); bigTableKeyColumnNames[i] = exprNode.toString(); TypeInfo typeInfo = exprNode.getTypeInfo(); // Verify we handle the key column types for an optimized table. This is the effectively the // same check used in HashTableLoader. if (!MapJoinKey.isSupportedField(typeInfo)) { supportsKeyTypes = false; Category category = typeInfo.getCategory(); notSupportedKeyTypes.add( (category != Category.PRIMITIVE ? category.toString() : ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory().toString())); } bigTableKeyTypeInfos[i] = typeInfo; } if (bigTableKeyExpressionsList.size() == 0) { slimmedBigTableKeyExpressions = null; } else { slimmedBigTableKeyExpressions = bigTableKeyExpressionsList.toArray(new VectorExpression[0]); } List<ExprNodeDesc> bigTableExprs = desc.getExprs().get(posBigTable); // For now, we don't support joins on or using DECIMAL_64. VectorExpression[] allBigTableValueExpressions = vContext.getVectorExpressions(bigTableExprs); boolean isFastHashTableEnabled = HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_VECTORIZATION_MAPJOIN_NATIVE_FAST_HASHTABLE_ENABLED); // Especially since LLAP is prone to turn it off in the MapJoinDesc in later // physical optimizer stages... boolean isHybridHashJoin = desc.isHybridHashJoin(); /* * Populate vectorMapJoininfo. */ /* * Similarly, we need a mapping since a value expression can be a calculation and the value * will go into a scratch column. * * Value expressions include keys? YES. */ boolean supportsValueTypes = true; // Assume. HashSet<String> notSupportedValueTypes = new HashSet<String>(); int[] bigTableValueColumnMap = new int[allBigTableValueExpressions.length]; String[] bigTableValueColumnNames = new String[allBigTableValueExpressions.length]; TypeInfo[] bigTableValueTypeInfos = new TypeInfo[allBigTableValueExpressions.length]; ArrayList<VectorExpression> bigTableValueExpressionsList = new ArrayList<VectorExpression>(); VectorExpression[] slimmedBigTableValueExpressions; for (int i = 0; i < bigTableValueColumnMap.length; i++) { VectorExpression ve = allBigTableValueExpressions[i]; if (!IdentityExpression.isColumnOnly(ve)) { bigTableValueExpressionsList.add(ve); } bigTableValueColumnMap[i] = ve.getOutputColumnNum(); ExprNodeDesc exprNode = bigTableExprs.get(i); bigTableValueColumnNames[i] = exprNode.toString(); TypeInfo typeInfo = exprNode.getTypeInfo(); if (!(typeInfo instanceof PrimitiveTypeInfo)) { supportsValueTypes = false; Category category = typeInfo.getCategory(); notSupportedValueTypes.add(category.toString()); } bigTableValueTypeInfos[i] = typeInfo; } if (bigTableValueExpressionsList.size() == 0) { slimmedBigTableValueExpressions = null; } else { slimmedBigTableValueExpressions = bigTableValueExpressionsList.toArray(new VectorExpression[0]); } vectorMapJoinInfo.setBigTableKeyColumnMap(bigTableKeyColumnMap); vectorMapJoinInfo.setBigTableKeyColumnNames(bigTableKeyColumnNames); vectorMapJoinInfo.setBigTableKeyTypeInfos(bigTableKeyTypeInfos); vectorMapJoinInfo.setSlimmedBigTableKeyExpressions(slimmedBigTableKeyExpressions); vectorDesc.setAllBigTableKeyExpressions(allBigTableKeyExpressions); vectorMapJoinInfo.setBigTableValueColumnMap(bigTableValueColumnMap); vectorMapJoinInfo.setBigTableValueColumnNames(bigTableValueColumnNames); vectorMapJoinInfo.setBigTableValueTypeInfos(bigTableValueTypeInfos); vectorMapJoinInfo.setSlimmedBigTableValueExpressions(slimmedBigTableValueExpressions); vectorDesc.setAllBigTableValueExpressions(allBigTableValueExpressions); /* * Column mapping. */ VectorColumnOutputMapping bigTableRetainMapping = new VectorColumnOutputMapping("Big Table Retain Mapping"); VectorColumnOutputMapping nonOuterSmallTableKeyMapping = new VectorColumnOutputMapping("Non Outer Small Table Key Key Mapping"); VectorColumnOutputMapping outerSmallTableKeyMapping = new VectorColumnOutputMapping("Outer Small Table Key Mapping"); VectorColumnSourceMapping fullOuterSmallTableKeyMapping = new VectorColumnSourceMapping("Full Outer Small Table Key Mapping"); // The order of the fields in the LazyBinary small table value must be used, so // we use the source ordering flavor for the mapping. VectorColumnSourceMapping smallTableValueMapping = new VectorColumnSourceMapping("Small Table Value Mapping"); Byte[] order = desc.getTagOrder(); Byte posSingleVectorMapJoinSmallTable = (order[0] == posBigTable ? order[1] : order[0]); boolean isOuterJoin = !desc.getNoOuterJoin(); /* * Gather up big and small table output result information from the MapJoinDesc. */ List<Integer> bigTableRetainList = desc.getRetainList().get(posBigTable); int[] smallTableIndices; int smallTableIndicesSize; List<ExprNodeDesc> smallTableExprs = desc.getExprs().get(posSingleVectorMapJoinSmallTable); if (desc.getValueIndices() != null && desc.getValueIndices().get(posSingleVectorMapJoinSmallTable) != null) { smallTableIndices = desc.getValueIndices().get(posSingleVectorMapJoinSmallTable); smallTableIndicesSize = smallTableIndices.length; } else { smallTableIndices = null; smallTableIndicesSize = 0; } List<Integer> smallTableRetainList = desc.getRetainList().get(posSingleVectorMapJoinSmallTable); int smallTableRetainSize = smallTableRetainList.size(); int smallTableResultSize = 0; if (smallTableIndicesSize > 0) { smallTableResultSize = smallTableIndicesSize; } else if (smallTableRetainSize > 0) { smallTableResultSize = smallTableRetainSize; } /* * Determine the big table retained mapping first so we can optimize out (with * projection) copying inner join big table keys in the subsequent small table results section. */ // We use a mapping object here so we can build the projection in any order and // get the ordered by 0 to n-1 output columns at the end. // // Also, to avoid copying a big table key into the small table result area for inner joins, // we reference it with the projection so there can be duplicate output columns // in the projection. VectorColumnSourceMapping projectionMapping = new VectorColumnSourceMapping("Projection Mapping"); int nextOutputColumn = (order[0] == posBigTable ? 0 : smallTableResultSize); final int bigTableRetainSize = bigTableRetainList.size(); for (int i = 0; i < bigTableRetainSize; i++) { // Since bigTableValueExpressions may do a calculation and produce a scratch column, we // need to map to the right batch column. int retainColumn = bigTableRetainList.get(i); int batchColumnIndex = bigTableValueColumnMap[retainColumn]; TypeInfo typeInfo = bigTableValueTypeInfos[i]; // With this map we project the big table batch to make it look like an output batch. projectionMapping.add(nextOutputColumn, batchColumnIndex, typeInfo); // Collect columns we copy from the big table batch to the overflow batch. if (!bigTableRetainMapping.containsOutputColumn(batchColumnIndex)) { // Tolerate repeated use of a big table column. bigTableRetainMapping.add(batchColumnIndex, batchColumnIndex, typeInfo); } nextOutputColumn++; } /* * Now determine the small table results. */ boolean smallTableExprVectorizes = true; int firstSmallTableOutputColumn; firstSmallTableOutputColumn = (order[0] == posBigTable ? bigTableRetainSize : 0); nextOutputColumn = firstSmallTableOutputColumn; // Small table indices has more information (i.e. keys) than retain, so use it if it exists... if (smallTableIndicesSize > 0) { for (int i = 0; i < smallTableIndicesSize; i++) { if (smallTableIndices[i] >= 0) { // Zero and above numbers indicate a big table key is needed for // small table result "area". int keyIndex = smallTableIndices[i]; // Since bigTableKeyExpressions may do a calculation and produce a scratch column, we // need to map the right column. int bigTableKeyColumn = bigTableKeyColumnMap[keyIndex]; TypeInfo typeInfo = bigTableKeyTypeInfos[keyIndex]; if (!isOuterJoin) { // Optimize inner join keys of small table results. // Project the big table key into the small table result "area". projectionMapping.add(nextOutputColumn, bigTableKeyColumn, typeInfo); if (!bigTableRetainMapping.containsOutputColumn(bigTableKeyColumn)) { // When the Big Key is not retained in the output result, we do need to copy the // Big Table key into the overflow batch so the projection of it (Big Table key) to // the Small Table key will work properly... // nonOuterSmallTableKeyMapping.add(bigTableKeyColumn, bigTableKeyColumn, typeInfo); } } else { // For outer joins, since the small table key can be null when there for NOMATCH, // we must have a physical (scratch) column for those keys. We cannot use the // projection optimization used by non-[FULL} OUTER joins above. int scratchColumn = vContext.allocateScratchColumn(typeInfo); projectionMapping.add(nextOutputColumn, scratchColumn, typeInfo); outerSmallTableKeyMapping.add(bigTableKeyColumn, scratchColumn, typeInfo); // For FULL OUTER MapJoin, we need to be able to deserialize a Small Table key // into the output result. fullOuterSmallTableKeyMapping.add(keyIndex, scratchColumn, typeInfo); } } else { // Negative numbers indicate a column to be (deserialize) read from the small table's // LazyBinary value row. int smallTableValueIndex = -smallTableIndices[i] - 1; ExprNodeDesc smallTableExprNode = smallTableExprs.get(i); if (!validateExprNodeDesc(smallTableExprNode, "Small Table")) { clearNotVectorizedReason(); smallTableExprVectorizes = false; } TypeInfo typeInfo = smallTableExprNode.getTypeInfo(); // Make a new big table scratch column for the small table value. int scratchColumn = vContext.allocateScratchColumn(typeInfo); projectionMapping.add(nextOutputColumn, scratchColumn, typeInfo); smallTableValueMapping.add(smallTableValueIndex, scratchColumn, typeInfo); } nextOutputColumn++; } } else if (smallTableRetainSize > 0) { // Only small table values appear in join output result. for (int i = 0; i < smallTableRetainSize; i++) { int smallTableValueIndex = smallTableRetainList.get(i); ExprNodeDesc smallTableExprNode = smallTableExprs.get(i); if (!validateExprNodeDesc(smallTableExprNode, "Small Table")) { clearNotVectorizedReason(); smallTableExprVectorizes = false; } // Make a new big table scratch column for the small table value. TypeInfo typeInfo = smallTableExprNode.getTypeInfo(); int scratchColumn = vContext.allocateScratchColumn(typeInfo); projectionMapping.add(nextOutputColumn, scratchColumn, typeInfo); smallTableValueMapping.add(smallTableValueIndex, scratchColumn, typeInfo); nextOutputColumn++; } } Map<Byte, List<ExprNodeDesc>> filterExpressions = desc.getFilters(); VectorExpression[] bigTableFilterExpressions = vContext.getVectorExpressions( filterExpressions.get(posBigTable), VectorExpressionDescriptor.Mode.FILTER); vectorMapJoinInfo.setBigTableFilterExpressions(bigTableFilterExpressions); boolean useOptimizedTable = HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEMAPJOINUSEOPTIMIZEDTABLE); // Remember the condition variables for EXPLAIN regardless of whether we specialize or not. vectorDesc.setVectorMapJoinInfo(vectorMapJoinInfo); vectorDesc.setUseOptimizedTable(useOptimizedTable); vectorDesc.setIsVectorizationMapJoinNativeEnabled(isVectorizationMapJoinNativeEnabled); vectorDesc.setEngine(engine); vectorDesc.setOneMapJoinCondition(oneMapJoinCondition); vectorDesc.setHasNullSafes(hasNullSafes); vectorDesc.setSmallTableExprVectorizes(smallTableExprVectorizes); vectorDesc.setOuterJoinHasNoKeys(outerJoinHasNoKeys); vectorDesc.setIsFastHashTableEnabled(isFastHashTableEnabled); vectorDesc.setIsHybridHashJoin(isHybridHashJoin); vectorDesc.setSupportsKeyTypes(supportsKeyTypes); if (!supportsKeyTypes) { vectorDesc.setNotSupportedKeyTypes(new ArrayList<>(notSupportedKeyTypes)); } vectorDesc.setSupportsValueTypes(supportsValueTypes); if (!supportsValueTypes) { vectorDesc.setNotSupportedValueTypes(new ArrayList<>(notSupportedValueTypes)); } // Check common conditions for both Optimized and Fast Hash Tables. boolean result = true; // Assume. if (!useOptimizedTable || !isVectorizationMapJoinNativeEnabled || !isTezOrSpark || !oneMapJoinCondition || hasNullSafes || !smallTableExprVectorizes || outerJoinHasNoKeys || !supportsValueTypes) { result = false; } // supportsKeyTypes if (!isFastHashTableEnabled) { // Check optimized-only hash table restrictions. if (!supportsKeyTypes) { result = false; } } else { // With the fast hash table implementation, we currently do not support // Hybrid Grace Hash Join. if (isHybridHashJoin) { result = false; } } // Convert dynamic arrays and maps to simple arrays. bigTableRetainMapping.finalize(); vectorMapJoinInfo.setBigTableRetainColumnMap(bigTableRetainMapping.getOutputColumns()); vectorMapJoinInfo.setBigTableRetainTypeInfos(bigTableRetainMapping.getTypeInfos()); nonOuterSmallTableKeyMapping.finalize(); vectorMapJoinInfo.setNonOuterSmallTableKeyColumnMap(nonOuterSmallTableKeyMapping.getOutputColumns()); vectorMapJoinInfo.setNonOuterSmallTableKeyTypeInfos(nonOuterSmallTableKeyMapping.getTypeInfos()); outerSmallTableKeyMapping.finalize(); fullOuterSmallTableKeyMapping.finalize(); vectorMapJoinInfo.setOuterSmallTableKeyMapping(outerSmallTableKeyMapping); vectorMapJoinInfo.setFullOuterSmallTableKeyMapping(fullOuterSmallTableKeyMapping); smallTableValueMapping.finalize(); vectorMapJoinInfo.setSmallTableValueMapping(smallTableValueMapping); projectionMapping.finalize(); // Verify we added an entry for each output. assert projectionMapping.isSourceSequenceGood(); vectorMapJoinInfo.setProjectionMapping(projectionMapping); return result; } private Operator<? extends OperatorDesc> specializeReduceSinkOperator( Operator<? extends OperatorDesc> op, VectorizationContext vContext, ReduceSinkDesc desc, VectorReduceSinkDesc vectorDesc) throws HiveException { VectorReduceSinkInfo vectorReduceSinkInfo = vectorDesc.getVectorReduceSinkInfo(); Type[] reduceSinkKeyColumnVectorTypes = vectorReduceSinkInfo.getReduceSinkKeyColumnVectorTypes(); // By default, we can always use the multi-key class. VectorReduceSinkDesc.ReduceSinkKeyType reduceSinkKeyType = VectorReduceSinkDesc.ReduceSinkKeyType.MULTI_KEY; // Look for single column optimization. if (reduceSinkKeyColumnVectorTypes != null && reduceSinkKeyColumnVectorTypes.length == 1) { LOG.info("Vectorizer vectorizeOperator groupby typeName " + vectorReduceSinkInfo.getReduceSinkKeyTypeInfos()[0]); Type columnVectorType = reduceSinkKeyColumnVectorTypes[0]; switch (columnVectorType) { case LONG: { PrimitiveCategory primitiveCategory = ((PrimitiveTypeInfo) vectorReduceSinkInfo.getReduceSinkKeyTypeInfos()[0]).getPrimitiveCategory(); switch (primitiveCategory) { case BOOLEAN: case BYTE: case SHORT: case INT: case DATE: case LONG: reduceSinkKeyType = VectorReduceSinkDesc.ReduceSinkKeyType.LONG; break; default: // For any remaining Long CV types use default multi-key LOG.warn("Unsupported Long-CV key type {} defaulted to multi-key ReduceSink", primitiveCategory); break; } } break; case BYTES: reduceSinkKeyType = VectorReduceSinkDesc.ReduceSinkKeyType.STRING; default: // Stay with multi-key. break; } } Class<? extends Operator<?>> opClass = null; if (vectorReduceSinkInfo.getUseUniformHash()) { if (vectorDesc.getIsEmptyKey()) { opClass = VectorReduceSinkEmptyKeyOperator.class; } else { switch (reduceSinkKeyType) { case LONG: opClass = VectorReduceSinkLongOperator.class; break; case STRING: opClass = VectorReduceSinkStringOperator.class; break; case MULTI_KEY: opClass = VectorReduceSinkMultiKeyOperator.class; break; default: throw new HiveException("Unknown reduce sink key type " + reduceSinkKeyType); } } } else { if (vectorDesc.getIsEmptyKey() && vectorDesc.getIsEmptyBuckets() && vectorDesc.getIsEmptyPartitions()) { opClass = VectorReduceSinkEmptyKeyOperator.class; } else { opClass = VectorReduceSinkObjectHashOperator.class; } } vectorDesc.setReduceSinkKeyType(reduceSinkKeyType); vectorDesc.setVectorReduceSinkInfo(vectorReduceSinkInfo); LOG.info("Vectorizer vectorizeOperator reduce sink class " + opClass.getSimpleName()); Operator<? extends OperatorDesc> vectorOp = null; try { vectorOp = OperatorFactory.getVectorOperator( opClass, op.getCompilationOpContext(), op.getConf(), vContext, vectorDesc); } catch (Exception e) { LOG.info("Vectorizer vectorizeOperator reduce sink class exception " + opClass.getSimpleName() + " exception " + e); throw new HiveException(e); } Preconditions.checkArgument(vectorOp instanceof VectorReduceSinkCommonOperator); return vectorOp; } private boolean canSpecializeReduceSink(ReduceSinkDesc desc, boolean isTezOrSpark, VectorizationContext vContext, VectorReduceSinkDesc vectorDesc) throws HiveException { VectorReduceSinkInfo vectorReduceSinkInfo = new VectorReduceSinkInfo(); // Various restrictions. // Set this if we encounter a condition we were not expecting. boolean isUnexpectedCondition = false; boolean isVectorizationReduceSinkNativeEnabled = HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_VECTORIZATION_REDUCESINK_NEW_ENABLED); String engine = HiveConf.getVar(hiveConf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE); int limit = desc.getTopN(); float memUsage = desc.getTopNMemoryUsage(); boolean hasPTFTopN = (limit >= 0 && memUsage > 0 && desc.isPTFReduceSink()); boolean hasDistinctColumns = (desc.getDistinctColumnIndices().size() > 0); TableDesc keyTableDesc = desc.getKeySerializeInfo(); Class<? extends Deserializer> keySerializerClass = keyTableDesc.getSerDeClass(); boolean isKeyBinarySortable = (keySerializerClass == org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe.class); TableDesc valueTableDesc = desc.getValueSerializeInfo(); Class<? extends Deserializer> valueDeserializerClass = valueTableDesc.getSerDeClass(); boolean isValueLazyBinary = (valueDeserializerClass == org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe.class) || (valueDeserializerClass == org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe2.class); // We are doing work here we'd normally do in VectorGroupByCommonOperator's constructor. // So if we later decide not to specialize, we'll just waste any scratch columns allocated... List<ExprNodeDesc> keysDescs = desc.getKeyCols(); final boolean isEmptyKey = (keysDescs.size() == 0); if (!isEmptyKey) { VectorExpression[] allKeyExpressions = vContext.getVectorExpressions(keysDescs); final int[] reduceSinkKeyColumnMap = new int[allKeyExpressions.length]; final TypeInfo[] reduceSinkKeyTypeInfos = new TypeInfo[allKeyExpressions.length]; final Type[] reduceSinkKeyColumnVectorTypes = new Type[allKeyExpressions.length]; final VectorExpression[] reduceSinkKeyExpressions; // Since a key expression can be a calculation and the key will go into a scratch column, // we need the mapping and type information. ArrayList<VectorExpression> groupByKeyExpressionsList = new ArrayList<VectorExpression>(); for (int i = 0; i < reduceSinkKeyColumnMap.length; i++) { VectorExpression ve = allKeyExpressions[i]; reduceSinkKeyColumnMap[i] = ve.getOutputColumnNum(); reduceSinkKeyTypeInfos[i] = keysDescs.get(i).getTypeInfo(); reduceSinkKeyColumnVectorTypes[i] = VectorizationContext.getColumnVectorTypeFromTypeInfo(reduceSinkKeyTypeInfos[i]); if (!IdentityExpression.isColumnOnly(ve)) { groupByKeyExpressionsList.add(ve); } } if (groupByKeyExpressionsList.size() == 0) { reduceSinkKeyExpressions = null; } else { reduceSinkKeyExpressions = groupByKeyExpressionsList.toArray(new VectorExpression[0]); } vectorReduceSinkInfo.setReduceSinkKeyColumnMap(reduceSinkKeyColumnMap); vectorReduceSinkInfo.setReduceSinkKeyTypeInfos(reduceSinkKeyTypeInfos); vectorReduceSinkInfo.setReduceSinkKeyColumnVectorTypes(reduceSinkKeyColumnVectorTypes); vectorReduceSinkInfo.setReduceSinkKeyExpressions(reduceSinkKeyExpressions); } List<ExprNodeDesc> valueDescs = desc.getValueCols(); final boolean isEmptyValue = (valueDescs.size() == 0); if (!isEmptyValue) { VectorExpression[] allValueExpressions = vContext.getVectorExpressions(valueDescs); final int[] reduceSinkValueColumnMap = new int[allValueExpressions.length]; final TypeInfo[] reduceSinkValueTypeInfos = new TypeInfo[allValueExpressions.length]; final Type[] reduceSinkValueColumnVectorTypes = new Type[allValueExpressions.length]; VectorExpression[] reduceSinkValueExpressions; ArrayList<VectorExpression> reduceSinkValueExpressionsList = new ArrayList<VectorExpression>(); for (int i = 0; i < valueDescs.size(); ++i) { VectorExpression ve = allValueExpressions[i]; reduceSinkValueColumnMap[i] = ve.getOutputColumnNum(); reduceSinkValueTypeInfos[i] = valueDescs.get(i).getTypeInfo(); reduceSinkValueColumnVectorTypes[i] = VectorizationContext.getColumnVectorTypeFromTypeInfo(reduceSinkValueTypeInfos[i]); if (!IdentityExpression.isColumnOnly(ve)) { reduceSinkValueExpressionsList.add(ve); } } if (reduceSinkValueExpressionsList.size() == 0) { reduceSinkValueExpressions = null; } else { reduceSinkValueExpressions = reduceSinkValueExpressionsList.toArray(new VectorExpression[0]); } vectorReduceSinkInfo.setReduceSinkValueColumnMap(reduceSinkValueColumnMap); vectorReduceSinkInfo.setReduceSinkValueTypeInfos(reduceSinkValueTypeInfos); vectorReduceSinkInfo.setReduceSinkValueColumnVectorTypes(reduceSinkValueColumnVectorTypes); vectorReduceSinkInfo.setReduceSinkValueExpressions(reduceSinkValueExpressions); } boolean useUniformHash = desc.getReducerTraits().contains(UNIFORM); vectorReduceSinkInfo.setUseUniformHash(useUniformHash); List<ExprNodeDesc> bucketDescs = desc.getBucketCols(); final boolean isEmptyBuckets = (bucketDescs == null || bucketDescs.size() == 0); List<ExprNodeDesc> partitionDescs = desc.getPartitionCols(); final boolean isEmptyPartitions = (partitionDescs == null || partitionDescs.size() == 0); if (useUniformHash || (isEmptyKey && isEmptyBuckets && isEmptyPartitions)) { // NOTE: For Uniform Hash or no buckets/partitions, when the key is empty, we will use the VectorReduceSinkEmptyKeyOperator instead. } else { // Collect bucket and/or partition information for object hashing. int[] reduceSinkBucketColumnMap = null; TypeInfo[] reduceSinkBucketTypeInfos = null; Type[] reduceSinkBucketColumnVectorTypes = null; VectorExpression[] reduceSinkBucketExpressions = null; if (!isEmptyBuckets) { VectorExpression[] allBucketExpressions = vContext.getVectorExpressions(bucketDescs); reduceSinkBucketColumnMap = new int[bucketDescs.size()]; reduceSinkBucketTypeInfos = new TypeInfo[bucketDescs.size()]; reduceSinkBucketColumnVectorTypes = new Type[bucketDescs.size()]; ArrayList<VectorExpression> reduceSinkBucketExpressionsList = new ArrayList<VectorExpression>(); for (int i = 0; i < bucketDescs.size(); ++i) { VectorExpression ve = allBucketExpressions[i]; reduceSinkBucketColumnMap[i] = ve.getOutputColumnNum(); reduceSinkBucketTypeInfos[i] = bucketDescs.get(i).getTypeInfo(); reduceSinkBucketColumnVectorTypes[i] = VectorizationContext.getColumnVectorTypeFromTypeInfo(reduceSinkBucketTypeInfos[i]); if (!IdentityExpression.isColumnOnly(ve)) { reduceSinkBucketExpressionsList.add(ve); } } if (reduceSinkBucketExpressionsList.size() == 0) { reduceSinkBucketExpressions = null; } else { reduceSinkBucketExpressions = reduceSinkBucketExpressionsList.toArray(new VectorExpression[0]); } } int[] reduceSinkPartitionColumnMap = null; TypeInfo[] reduceSinkPartitionTypeInfos = null; Type[] reduceSinkPartitionColumnVectorTypes = null; VectorExpression[] reduceSinkPartitionExpressions = null; if (!isEmptyPartitions) { VectorExpression[] allPartitionExpressions = vContext.getVectorExpressions(partitionDescs); reduceSinkPartitionColumnMap = new int[partitionDescs.size()]; reduceSinkPartitionTypeInfos = new TypeInfo[partitionDescs.size()]; reduceSinkPartitionColumnVectorTypes = new Type[partitionDescs.size()]; ArrayList<VectorExpression> reduceSinkPartitionExpressionsList = new ArrayList<VectorExpression>(); for (int i = 0; i < partitionDescs.size(); ++i) { VectorExpression ve = allPartitionExpressions[i]; reduceSinkPartitionColumnMap[i] = ve.getOutputColumnNum(); reduceSinkPartitionTypeInfos[i] = partitionDescs.get(i).getTypeInfo(); reduceSinkPartitionColumnVectorTypes[i] = VectorizationContext.getColumnVectorTypeFromTypeInfo(reduceSinkPartitionTypeInfos[i]); if (!IdentityExpression.isColumnOnly(ve)) { reduceSinkPartitionExpressionsList.add(ve); } } if (reduceSinkPartitionExpressionsList.size() == 0) { reduceSinkPartitionExpressions = null; } else { reduceSinkPartitionExpressions = reduceSinkPartitionExpressionsList.toArray(new VectorExpression[0]); } } vectorReduceSinkInfo.setReduceSinkBucketColumnMap(reduceSinkBucketColumnMap); vectorReduceSinkInfo.setReduceSinkBucketTypeInfos(reduceSinkBucketTypeInfos); vectorReduceSinkInfo.setReduceSinkBucketColumnVectorTypes(reduceSinkBucketColumnVectorTypes); vectorReduceSinkInfo.setReduceSinkBucketExpressions(reduceSinkBucketExpressions); vectorReduceSinkInfo.setReduceSinkPartitionColumnMap(reduceSinkPartitionColumnMap); vectorReduceSinkInfo.setReduceSinkPartitionTypeInfos(reduceSinkPartitionTypeInfos); vectorReduceSinkInfo.setReduceSinkPartitionColumnVectorTypes(reduceSinkPartitionColumnVectorTypes); vectorReduceSinkInfo.setReduceSinkPartitionExpressions(reduceSinkPartitionExpressions); } // Remember the condition variables for EXPLAIN regardless. vectorDesc.setVectorReduceSinkInfo(vectorReduceSinkInfo); vectorDesc.setIsVectorizationReduceSinkNativeEnabled(isVectorizationReduceSinkNativeEnabled); vectorDesc.setEngine(engine); vectorDesc.setIsEmptyKey(isEmptyKey); vectorDesc.setIsEmptyValue(isEmptyValue); vectorDesc.setIsEmptyBuckets(isEmptyBuckets); vectorDesc.setIsEmptyPartitions(isEmptyPartitions); vectorDesc.setHasPTFTopN(hasPTFTopN); vectorDesc.setHasDistinctColumns(hasDistinctColumns); vectorDesc.setIsKeyBinarySortable(isKeyBinarySortable); vectorDesc.setIsValueLazyBinary(isValueLazyBinary); vectorDesc.setIsAcidChange(desc.getWriteType() == AcidUtils.Operation.DELETE || desc.getWriteType() == AcidUtils.Operation.UPDATE); // This indicates we logged an inconsistency (from our point-of-view) and will not make this // operator native... vectorDesc.setIsUnexpectedCondition(isUnexpectedCondition); // Many restrictions. if (!isVectorizationReduceSinkNativeEnabled || !isTezOrSpark || hasPTFTopN || hasDistinctColumns || !isKeyBinarySortable || !isValueLazyBinary || isUnexpectedCondition) { return false; } return true; } private boolean checkForArrowFileSink(FileSinkDesc fileSinkDesc, boolean isTezOrSpark, VectorizationContext vContext, VectorFileSinkDesc vectorDesc) throws HiveException { // Various restrictions. boolean isVectorizationFileSinkArrowNativeEnabled = HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_VECTORIZATION_FILESINK_ARROW_NATIVE_ENABLED); String engine = HiveConf.getVar(hiveConf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE); String serdeClassName = fileSinkDesc.getTableInfo().getSerdeClassName(); boolean isOkArrowFileSink = serdeClassName.equals("org.apache.hadoop.hive.ql.io.arrow.ArrowColumnarBatchSerDe") && isVectorizationFileSinkArrowNativeEnabled && engine.equalsIgnoreCase("tez"); return isOkArrowFileSink; } private Operator<? extends OperatorDesc> specializeArrowFileSinkOperator( Operator<? extends OperatorDesc> op, VectorizationContext vContext, FileSinkDesc desc, VectorFileSinkDesc vectorDesc) throws HiveException { Class<? extends Operator<?>> opClass = VectorFileSinkArrowOperator.class; Operator<? extends OperatorDesc> vectorOp = null; try { vectorOp = OperatorFactory.getVectorOperator( opClass, op.getCompilationOpContext(), op.getConf(), vContext, vectorDesc); } catch (Exception e) { LOG.info("Vectorizer vectorizeOperator file sink class exception " + opClass.getSimpleName() + " exception " + e); throw new HiveException(e); } return vectorOp; } private boolean usesVectorUDFAdaptor(VectorExpression vecExpr) { if (vecExpr == null) { return false; } if (vecExpr instanceof VectorUDFAdaptor) { return true; } if (usesVectorUDFAdaptor(vecExpr.getChildExpressions())) { return true; } return false; } private boolean usesVectorUDFAdaptor(VectorExpression[] vecExprs) { if (vecExprs == null) { return false; } for (VectorExpression vecExpr : vecExprs) { if (usesVectorUDFAdaptor(vecExpr)) { return true; } } return false; } public static Operator<? extends OperatorDesc> vectorizeFilterOperator( Operator<? extends OperatorDesc> filterOp, VectorizationContext vContext, VectorFilterDesc vectorFilterDesc) throws HiveException { FilterDesc filterDesc = (FilterDesc) filterOp.getConf(); ExprNodeDesc predicateExpr = filterDesc.getPredicate(); VectorExpression vectorPredicateExpr = vContext.getVectorExpression(predicateExpr, VectorExpressionDescriptor.Mode.FILTER); vectorFilterDesc.setPredicateExpression(vectorPredicateExpr); return OperatorFactory.getVectorOperator( filterOp.getCompilationOpContext(), filterDesc, vContext, vectorFilterDesc); } private static Operator<? extends OperatorDesc> vectorizeTopNKeyOperator( Operator<? extends OperatorDesc> topNKeyOperator, VectorizationContext vContext, VectorTopNKeyDesc vectorTopNKeyDesc) throws HiveException { TopNKeyDesc topNKeyDesc = (TopNKeyDesc) topNKeyOperator.getConf(); VectorExpression[] keyExpressions = getVectorExpressions(vContext, topNKeyDesc.getKeyColumns()); VectorExpression[] partitionKeyExpressions = getVectorExpressions(vContext, topNKeyDesc.getPartitionKeyColumns()); vectorTopNKeyDesc.setKeyExpressions(keyExpressions); vectorTopNKeyDesc.setPartitionKeyColumns(partitionKeyExpressions); return OperatorFactory.getVectorOperator( topNKeyOperator.getCompilationOpContext(), topNKeyDesc, vContext, vectorTopNKeyDesc); } private static VectorExpression[] getVectorExpressions(VectorizationContext vContext, List<ExprNodeDesc> keyColumns) throws HiveException { VectorExpression[] keyExpressions; vContext.markActualScratchColumns(); try { keyExpressions = vContext.getVectorExpressionsUpConvertDecimal64(keyColumns); fixDecimalDataTypePhysicalVariations(vContext, keyExpressions); } finally { vContext.freeMarkedScratchColumns(); } return keyExpressions; } private static Class<? extends VectorAggregateExpression> findVecAggrClass( Class<? extends VectorAggregateExpression>[] vecAggrClasses, String aggregateName, ColumnVector.Type inputColVectorType, ColumnVector.Type outputColumnVecType, GenericUDAFEvaluator.Mode udafEvaluatorMode) throws HiveException { for (Class<? extends VectorAggregateExpression> vecAggrClass : vecAggrClasses) { VectorAggregateExpression vecAggrExprCheck; try { vecAggrExprCheck = vecAggrClass.newInstance(); } catch (Exception e) { throw new HiveException( vecAggrClass.getSimpleName() + "() failed to initialize", e); } if (vecAggrExprCheck.matches( aggregateName, inputColVectorType, outputColumnVecType, udafEvaluatorMode)) { return vecAggrClass; } } return null; } private static ImmutablePair<VectorAggregationDesc,String> getVectorAggregationDesc( AggregationDesc aggrDesc, VectorizationContext vContext) throws HiveException { String aggregateName = aggrDesc.getGenericUDAFName(); List<ExprNodeDesc> parameterList = aggrDesc.getParameters(); final int parameterCount = parameterList.size(); final GenericUDAFEvaluator.Mode udafEvaluatorMode = aggrDesc.getMode(); /* * Look at evaluator to get output type info. */ GenericUDAFEvaluator evaluator = aggrDesc.getGenericUDAFEvaluator(); ObjectInspector[] parameterObjectInspectors = new ObjectInspector[parameterCount]; for (int i = 0; i < parameterCount; i++) { TypeInfo typeInfo = parameterList.get(i).getTypeInfo(); parameterObjectInspectors[i] = TypeInfoUtils .getStandardWritableObjectInspectorFromTypeInfo(typeInfo); } // The only way to get the return object inspector (and its return type) is to // initialize it... ObjectInspector returnOI = evaluator.init( aggrDesc.getMode(), parameterObjectInspectors); final TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(returnOI.getTypeName()); return getVectorAggregationDesc( aggregateName, parameterList, evaluator, outputTypeInfo, udafEvaluatorMode, vContext); } public static ImmutablePair<VectorAggregationDesc,String> getVectorAggregationDesc( String aggregationName, List<ExprNodeDesc> parameterList, GenericUDAFEvaluator evaluator, TypeInfo outputTypeInfo, GenericUDAFEvaluator.Mode udafEvaluatorMode, VectorizationContext vContext) throws HiveException { VectorizedUDAFs annotation = AnnotationUtils.getAnnotation(evaluator.getClass(), VectorizedUDAFs.class); if (annotation == null) { String issue = "Evaluator " + evaluator.getClass().getSimpleName() + " does not have a " + "vectorized UDAF annotation (aggregation: \"" + aggregationName + "\"). " + "Vectorization not supported"; return new ImmutablePair<VectorAggregationDesc,String>(null, issue); } final Class<? extends VectorAggregateExpression>[] vecAggrClasses = annotation.value(); // Not final since it may change later due to DECIMAL_64. ColumnVector.Type outputColVectorType = VectorizationContext.getColumnVectorTypeFromTypeInfo(outputTypeInfo); /* * Determine input type info. */ final TypeInfo inputTypeInfo; // Not final since it may change later due to DECIMAL_64. VectorExpression inputExpression; ColumnVector.Type inputColVectorType; final int parameterCount = parameterList.size(); if (parameterCount == 0) { // COUNT(*) inputTypeInfo = null; inputColVectorType = null; inputExpression = null; } else if (parameterCount == 1) { ExprNodeDesc exprNodeDesc = parameterList.get(0); inputTypeInfo = exprNodeDesc.getTypeInfo(); if (inputTypeInfo == null) { String issue ="Aggregations with null parameter type not supported " + aggregationName + "(" + parameterList.toString() + ")"; return new ImmutablePair<VectorAggregationDesc,String>(null, issue); } /* * Determine an *initial* input vector expression. * * Note: we may have to convert it later from DECIMAL_64 to regular decimal. */ inputExpression = vContext.getVectorExpression( exprNodeDesc, VectorExpressionDescriptor.Mode.PROJECTION); if (inputExpression == null) { String issue ="Parameter expression " + exprNodeDesc.toString() + " not supported " + aggregationName + "(" + parameterList.toString() + ")"; return new ImmutablePair<VectorAggregationDesc,String>(null, issue); } if (inputExpression.getOutputTypeInfo() == null) { String issue ="Parameter expression " + exprNodeDesc.toString() + " with null type not supported " + aggregationName + "(" + parameterList.toString() + ")"; return new ImmutablePair<VectorAggregationDesc,String>(null, issue); } inputColVectorType = inputExpression.getOutputColumnVectorType(); } else { // No multi-parameter aggregations supported. String issue ="Aggregations with > 1 parameter are not supported " + aggregationName + "(" + parameterList.toString() + ")"; return new ImmutablePair<VectorAggregationDesc,String>(null, issue); } /* * When we have DECIMAL_64 as the input parameter then we have to see if there is a special * vector UDAF for it. If not we will need to convert the input parameter. */ if (inputTypeInfo != null && inputColVectorType == ColumnVector.Type.DECIMAL_64) { if (outputColVectorType == ColumnVector.Type.DECIMAL) { DecimalTypeInfo outputDecimalTypeInfo = (DecimalTypeInfo) outputTypeInfo; if (HiveDecimalWritable.isPrecisionDecimal64(outputDecimalTypeInfo.getPrecision())) { // Try with DECIMAL_64 input and DECIMAL_64 output. final Class<? extends VectorAggregateExpression> vecAggrClass = findVecAggrClass( vecAggrClasses, aggregationName, inputColVectorType, ColumnVector.Type.DECIMAL_64, udafEvaluatorMode); if (vecAggrClass != null) { final VectorAggregationDesc vecAggrDesc = new VectorAggregationDesc( aggregationName, evaluator, udafEvaluatorMode, inputTypeInfo, inputColVectorType, inputExpression, outputTypeInfo, ColumnVector.Type.DECIMAL_64, vecAggrClass); return new ImmutablePair<VectorAggregationDesc,String>(vecAggrDesc, null); } } // Try with regular DECIMAL output type. final Class<? extends VectorAggregateExpression> vecAggrClass = findVecAggrClass( vecAggrClasses, aggregationName, inputColVectorType, outputColVectorType, udafEvaluatorMode); if (vecAggrClass != null) { final VectorAggregationDesc vecAggrDesc = new VectorAggregationDesc( aggregationName, evaluator, udafEvaluatorMode, inputTypeInfo, inputColVectorType, inputExpression, outputTypeInfo, outputColVectorType, vecAggrClass); return new ImmutablePair<VectorAggregationDesc,String>(vecAggrDesc, null); } // No support for DECIMAL_64 input. We must convert. inputExpression = vContext.wrapWithDecimal64ToDecimalConversion(inputExpression); inputColVectorType = ColumnVector.Type.DECIMAL; // Fall through... } else { // Try with with DECIMAL_64 input and desired output type. final Class<? extends VectorAggregateExpression> vecAggrClass = findVecAggrClass( vecAggrClasses, aggregationName, inputColVectorType, outputColVectorType, udafEvaluatorMode); if (vecAggrClass != null) { // for now, disable operating on decimal64 column vectors for semijoin reduction as // we have to make sure same decimal type should be used during bloom filter creation // and bloom filter probing if (aggregationName.equals("bloom_filter")) { inputExpression = vContext.wrapWithDecimal64ToDecimalConversion(inputExpression); inputColVectorType = ColumnVector.Type.DECIMAL; } final VectorAggregationDesc vecAggrDesc = new VectorAggregationDesc( aggregationName, evaluator, udafEvaluatorMode, inputTypeInfo, inputColVectorType, inputExpression, outputTypeInfo, outputColVectorType, vecAggrClass); return new ImmutablePair<VectorAggregationDesc,String>(vecAggrDesc, null); } // No support for DECIMAL_64 input. We must convert. inputExpression = vContext.wrapWithDecimal64ToDecimalConversion(inputExpression); inputColVectorType = ColumnVector.Type.DECIMAL; // Fall through... } } /* * Look for normal match. */ Class<? extends VectorAggregateExpression> vecAggrClass = findVecAggrClass( vecAggrClasses, aggregationName, inputColVectorType, outputColVectorType, udafEvaluatorMode); if (vecAggrClass != null) { final VectorAggregationDesc vecAggrDesc = new VectorAggregationDesc( aggregationName, evaluator, udafEvaluatorMode, inputTypeInfo, inputColVectorType, inputExpression, outputTypeInfo, outputColVectorType, vecAggrClass); return new ImmutablePair<VectorAggregationDesc,String>(vecAggrDesc, null); } // No match? String issue = "Vector aggregation : \"" + aggregationName + "\" " + "for input type: " + (inputColVectorType == null ? "any" : "\"" + inputColVectorType) + "\" " + "and output type: \"" + outputColVectorType + "\" " + "and mode: " + udafEvaluatorMode + " not supported for " + "evaluator " + evaluator.getClass().getSimpleName(); return new ImmutablePair<VectorAggregationDesc,String>(null, issue); } public static Operator<? extends OperatorDesc> vectorizeGroupByOperator( Operator<? extends OperatorDesc> groupByOp, VectorizationContext vContext, VectorGroupByDesc vectorGroupByDesc) throws HiveException { ImmutablePair<Operator<? extends OperatorDesc>,String> pair = doVectorizeGroupByOperator( groupByOp, vContext, vectorGroupByDesc); return pair.left; } /* * NOTE: The VectorGroupByDesc has already been allocated and will be updated here. */ private static ImmutablePair<Operator<? extends OperatorDesc>,String> doVectorizeGroupByOperator( Operator<? extends OperatorDesc> groupByOp, VectorizationContext vContext, VectorGroupByDesc vectorGroupByDesc) throws HiveException { GroupByDesc groupByDesc = (GroupByDesc) groupByOp.getConf(); List<ExprNodeDesc> keysDesc = groupByDesc.getKeys(); // For now, we don't support group by on DECIMAL_64 keys. VectorExpression[] vecKeyExpressions = vContext.getVectorExpressionsUpConvertDecimal64(keysDesc); List<AggregationDesc> aggrDesc = groupByDesc.getAggregators(); final int size = aggrDesc.size(); VectorAggregationDesc[] vecAggrDescs = new VectorAggregationDesc[size]; int[] projectedOutputColumns = new int[size]; for (int i = 0; i < size; ++i) { AggregationDesc aggDesc = aggrDesc.get(i); ImmutablePair<VectorAggregationDesc,String> pair = getVectorAggregationDesc(aggDesc, vContext); if (pair.left == null) { return new ImmutablePair<Operator<? extends OperatorDesc>, String>(null, pair.right); } vecAggrDescs[i] = pair.left; // GroupBy generates a new vectorized row batch... projectedOutputColumns[i] = i; } vectorGroupByDesc.setKeyExpressions(vecKeyExpressions); vectorGroupByDesc.setVecAggrDescs(vecAggrDescs); vectorGroupByDesc.setProjectedOutputColumns(projectedOutputColumns); Operator<GroupByDesc> vectorOp = OperatorFactory.getVectorOperator( groupByOp.getCompilationOpContext(), groupByDesc, vContext, vectorGroupByDesc); return new ImmutablePair<Operator<? extends OperatorDesc>, String>(vectorOp, null); } public static Operator<? extends OperatorDesc> vectorizeSelectOperator( Operator<? extends OperatorDesc> selectOp, VectorizationContext vContext, VectorSelectDesc vectorSelectDesc) throws HiveException { SelectDesc selectDesc = (SelectDesc) selectOp.getConf(); List<ExprNodeDesc> colList = selectDesc.getColList(); int index = 0; final int size = colList.size(); // this will mark all actual computed columns vContext.markActualScratchColumns(); VectorExpression[] vectorSelectExprs = new VectorExpression[size]; int[] projectedOutputColumns = new int[size]; for (int i = 0; i < size; i++) { ExprNodeDesc expr = colList.get(i); VectorExpression ve = vContext.getVectorExpression(expr); projectedOutputColumns[i] = ve.getOutputColumnNum(); if (ve instanceof IdentityExpression) { // Suppress useless evaluation. continue; } vectorSelectExprs[index++] = ve; } if (index < size) { vectorSelectExprs = Arrays.copyOf(vectorSelectExprs, index); } // Fix up the case where parent expression's output data type physical variations is DECIMAL whereas // at least one of its children is DECIMAL_64. Some expressions like x % y for example only accepts DECIMAL // for x and y (at this time there is only DecimalColModuloDecimalColumn so both x and y has to be DECIMAL). // The following method introduces a cast if x or y is DECIMAL_64 and parent expression (x % y) is DECIMAL. try { fixDecimalDataTypePhysicalVariations(vContext, vectorSelectExprs); } finally { vContext.freeMarkedScratchColumns(); } vectorSelectDesc.setSelectExpressions(vectorSelectExprs); vectorSelectDesc.setProjectedOutputColumns(projectedOutputColumns); return OperatorFactory.getVectorOperator( selectOp.getCompilationOpContext(), selectDesc, vContext, vectorSelectDesc); } private static void fixDecimalDataTypePhysicalVariations(final VectorizationContext vContext, final VectorExpression[] vectorSelectExprs) throws HiveException { for (int i = 0; i < vectorSelectExprs.length; i++) { VectorExpression parent = vectorSelectExprs[i]; VectorExpression newParent = fixDecimalDataTypePhysicalVariations(parent, parent.getChildExpressions(), vContext); if (parent.getClass() == newParent.getClass() && parent != newParent) { vectorSelectExprs[i] = newParent; } } } private static VectorExpression fixDecimalDataTypePhysicalVariations(final VectorExpression parent, final VectorExpression[] children, final VectorizationContext vContext) throws HiveException { if (children == null || children.length == 0) { return parent; } for (int i = 0; i < children.length; i++) { VectorExpression child = children[i]; VectorExpression newChild = fixDecimalDataTypePhysicalVariations(child, child.getChildExpressions(), vContext); if (child.getClass() == newChild.getClass() && child != newChild) { children[i] = newChild; } } if (parent.getOutputDataTypePhysicalVariation() == DataTypePhysicalVariation.NONE && !(parent instanceof ConvertDecimal64ToDecimal)) { boolean inputArgsChanged = false; DataTypePhysicalVariation[] dataTypePhysicalVariations = parent.getInputDataTypePhysicalVariations(); for (int i = 0; i < children.length; i++) { // we found at least one children with mismatch if (children[i].getOutputDataTypePhysicalVariation() == DataTypePhysicalVariation.DECIMAL_64) { children[i] = vContext.wrapWithDecimal64ToDecimalConversion(children[i]); inputArgsChanged = true; dataTypePhysicalVariations[i] = DataTypePhysicalVariation.NONE; } } // fix up the input column numbers and output column numbers if (inputArgsChanged) { if (parent instanceof VectorUDFAdaptor) { VectorUDFAdaptor parentAdaptor = (VectorUDFAdaptor) parent; VectorUDFArgDesc[] argDescs = parentAdaptor.getArgDescs(); for (int i = 0; i < argDescs.length; ++i) { if (argDescs[i].getColumnNum() != children[i].getOutputColumnNum()) { argDescs[i].setColumnNum(children[i].getOutputColumnNum()); break; } } } else { Object[] arguments; int argumentCount = children.length + (parent.getOutputColumnNum() == -1 ? 0 : 1); // VectorCoalesce receives arguments as an array. // Need to handle it as a special case to avoid instantiation failure. if (parent instanceof VectorCoalesce) { arguments = new Object[2]; arguments[0] = new int[children.length]; for (int i = 0; i < children.length; i++) { VectorExpression vce = children[i]; ((int[]) arguments[0])[i] = vce.getOutputColumnNum(); } arguments[1] = parent.getOutputColumnNum(); } else { if (parent instanceof DecimalColDivideDecimalScalar) { arguments = new Object[argumentCount + 1]; arguments[children.length] = ((DecimalColDivideDecimalScalar) parent).getValue(); } else { arguments = new Object[argumentCount]; } for (int i = 0; i < children.length; i++) { VectorExpression vce = children[i]; arguments[i] = vce.getOutputColumnNum(); } } // retain output column number from parent if (parent.getOutputColumnNum() != -1) { arguments[arguments.length - 1] = parent.getOutputColumnNum(); } // re-instantiate the parent expression with new arguments VectorExpression newParent = vContext.instantiateExpression(parent.getClass(), parent.getOutputTypeInfo(), parent.getOutputDataTypePhysicalVariation(), arguments); newParent.setOutputTypeInfo(parent.getOutputTypeInfo()); newParent.setOutputDataTypePhysicalVariation(parent.getOutputDataTypePhysicalVariation()); newParent.setInputTypeInfos(parent.getInputTypeInfos()); newParent.setInputDataTypePhysicalVariations(dataTypePhysicalVariations); newParent.setChildExpressions(parent.getChildExpressions()); return newParent; } } } return parent; } private static void fillInPTFEvaluators(List<WindowFunctionDef> windowsFunctions, String[] evaluatorFunctionNames, boolean[] evaluatorsAreDistinct, WindowFrameDef[] evaluatorWindowFrameDefs, List<ExprNodeDesc>[] evaluatorInputExprNodeDescLists) throws HiveException { final int functionCount = windowsFunctions.size(); for (int i = 0; i < functionCount; i++) { WindowFunctionDef winFunc = windowsFunctions.get(i); evaluatorFunctionNames[i] = winFunc.getName(); evaluatorsAreDistinct[i] = winFunc.isDistinct(); evaluatorWindowFrameDefs[i] = winFunc.getWindowFrame(); List<PTFExpressionDef> args = winFunc.getArgs(); if (args != null) { List<ExprNodeDesc> exprNodeDescList = new ArrayList<ExprNodeDesc>(); for (PTFExpressionDef arg : args) { exprNodeDescList.add(arg.getExprNode()); } evaluatorInputExprNodeDescLists[i] = exprNodeDescList; } } } private static ExprNodeDesc[] getPartitionExprNodeDescs(List<PTFExpressionDef> partitionExpressions) { final int size = partitionExpressions.size(); ExprNodeDesc[] exprNodeDescs = new ExprNodeDesc[size]; for (int i = 0; i < size; i++) { exprNodeDescs[i] = partitionExpressions.get(i).getExprNode(); } return exprNodeDescs; } private static ExprNodeDesc[] getOrderExprNodeDescs(List<OrderExpressionDef> orderExpressions) { final int size = orderExpressions.size(); ExprNodeDesc[] exprNodeDescs = new ExprNodeDesc[size]; for (int i = 0; i < size; i++) { exprNodeDescs[i] = orderExpressions.get(i).getExprNode(); } return exprNodeDescs; } /* * Update the VectorPTFDesc with data that is used during validation and that doesn't rely on * VectorizationContext to lookup column names, etc. */ private static void createVectorPTFDesc(Operator<? extends OperatorDesc> ptfOp, PTFDesc ptfDesc, VectorizationContext vContext, VectorPTFDesc vectorPTFDesc, int vectorizedPTFMaxMemoryBufferingBatchCount) throws HiveException { PartitionedTableFunctionDef funcDef = ptfDesc.getFuncDef(); WindowTableFunctionDef windowTableFunctionDef = (WindowTableFunctionDef) funcDef; List<WindowFunctionDef> windowsFunctions = windowTableFunctionDef.getWindowFunctions(); final int functionCount = windowsFunctions.size(); List<ColumnInfo> outputSignature = ptfOp.getSchema().getSignature(); final int outputSize = outputSignature.size(); /* * Output columns. */ TypeInfo[] reducerBatchTypeInfos = vContext.getAllTypeInfos(); DataTypePhysicalVariation[] reducerBatchDataTypePhysicalVariations = vContext.getAllDataTypePhysicalVariations(); // Evaluator results are first. String[] outputColumnNames = new String[outputSize]; TypeInfo[] outputTypeInfos = new TypeInfo[outputSize]; DataTypePhysicalVariation[] outputDataTypePhysicalVariations = new DataTypePhysicalVariation[outputSize]; for (int i = 0; i < functionCount; i++) { ColumnInfo colInfo = outputSignature.get(i); TypeInfo typeInfo = colInfo.getType(); outputColumnNames[i] = colInfo.getInternalName(); outputTypeInfos[i] = typeInfo; outputDataTypePhysicalVariations[i] = DataTypePhysicalVariation.NONE; } // Followed by key and non-key input columns (some may be missing). for (int i = functionCount; i < outputSize; i++) { ColumnInfo colInfo = outputSignature.get(i); outputColumnNames[i] = colInfo.getInternalName(); outputTypeInfos[i] = colInfo.getType(); outputDataTypePhysicalVariations[i] = reducerBatchDataTypePhysicalVariations[i-functionCount]; } List<PTFExpressionDef> partitionExpressions = funcDef.getPartition().getExpressions(); final int partitionKeyCount = partitionExpressions.size(); ExprNodeDesc[] partitionExprNodeDescs = getPartitionExprNodeDescs(partitionExpressions); List<OrderExpressionDef> orderExpressions = funcDef.getOrder().getExpressions(); final int orderKeyCount = orderExpressions.size(); ExprNodeDesc[] orderExprNodeDescs = getOrderExprNodeDescs(orderExpressions); // When there are PARTITION and ORDER BY clauses, will have different partition expressions. // Otherwise, only order by expressions. boolean isPartitionOrderBy = false; if (partitionKeyCount != orderKeyCount) { // Obviously different expressions. isPartitionOrderBy = true; } else { // Check each ExprNodeDesc. for (int i = 0; i < partitionKeyCount; i++) { final ExprNodeDescEqualityWrapper partitionExprEqualityWrapper = new ExprNodeDesc.ExprNodeDescEqualityWrapper(partitionExprNodeDescs[i]); final ExprNodeDescEqualityWrapper orderExprEqualityWrapper = new ExprNodeDesc.ExprNodeDescEqualityWrapper(orderExprNodeDescs[i]); if (!partitionExprEqualityWrapper.equals(orderExprEqualityWrapper)) { isPartitionOrderBy = true; break; } } } String[] evaluatorFunctionNames = new String[functionCount]; boolean[] evaluatorsAreDistinct = new boolean[functionCount]; WindowFrameDef[] evaluatorWindowFrameDefs = new WindowFrameDef[functionCount]; List<ExprNodeDesc>[] evaluatorInputExprNodeDescLists = (List<ExprNodeDesc>[]) new List<?>[functionCount]; fillInPTFEvaluators( windowsFunctions, evaluatorFunctionNames, evaluatorsAreDistinct, evaluatorWindowFrameDefs, evaluatorInputExprNodeDescLists); vectorPTFDesc.setReducerBatchTypeInfos(reducerBatchTypeInfos, reducerBatchDataTypePhysicalVariations); vectorPTFDesc.setIsPartitionOrderBy(isPartitionOrderBy); vectorPTFDesc.setOrderExprNodeDescs(orderExprNodeDescs); vectorPTFDesc.setPartitionExprNodeDescs(partitionExprNodeDescs); vectorPTFDesc.setEvaluatorFunctionNames(evaluatorFunctionNames); vectorPTFDesc.setEvaluatorsAreDistinct(evaluatorsAreDistinct); vectorPTFDesc.setEvaluatorWindowFrameDefs(evaluatorWindowFrameDefs); vectorPTFDesc.setEvaluatorInputExprNodeDescLists(evaluatorInputExprNodeDescLists); vectorPTFDesc.setOutputColumnNames(outputColumnNames); vectorPTFDesc.setOutputTypeInfos(outputTypeInfos, outputDataTypePhysicalVariations); vectorPTFDesc.setVectorizedPTFMaxMemoryBufferingBatchCount( vectorizedPTFMaxMemoryBufferingBatchCount); } private static void determineKeyAndNonKeyInputColumnMap(int[] outputColumnProjectionMap, boolean isPartitionOrderBy, int[] orderColumnMap, int[] partitionColumnMap, int evaluatorCount, ArrayList<Integer> keyInputColumns, ArrayList<Integer> nonKeyInputColumns) { final int outputSize = outputColumnProjectionMap.length; final int orderKeyCount = orderColumnMap.length; final int partitionKeyCount = (isPartitionOrderBy ? partitionColumnMap.length : 0); for (int i = evaluatorCount; i < outputSize; i++) { final int nonEvalColumnNum = outputColumnProjectionMap[i]; boolean isKey = false; for (int o = 0; o < orderKeyCount; o++) { if (nonEvalColumnNum == orderColumnMap[o]) { isKey = true; break; } } if (!isKey && isPartitionOrderBy) { for (int p = 0; p < partitionKeyCount; p++) { if (nonEvalColumnNum == partitionColumnMap[p]) { isKey = true; break; } } } if (isKey) { keyInputColumns.add(nonEvalColumnNum); } else { nonKeyInputColumns.add(nonEvalColumnNum); } } } /* * Create the additional vectorization PTF information needed by the VectorPTFOperator during * execution. */ private static VectorPTFInfo createVectorPTFInfo(Operator<? extends OperatorDesc> ptfOp, PTFDesc ptfDesc, VectorizationContext vContext, VectorPTFDesc vectorPTFDesc) throws HiveException { List<ColumnInfo> outputSignature = ptfOp.getSchema().getSignature(); final int outputSize = outputSignature.size(); boolean isPartitionOrderBy = vectorPTFDesc.getIsPartitionOrderBy(); ExprNodeDesc[] orderExprNodeDescs = vectorPTFDesc.getOrderExprNodeDescs(); ExprNodeDesc[] partitionExprNodeDescs = vectorPTFDesc.getPartitionExprNodeDescs(); String[] evaluatorFunctionNames = vectorPTFDesc.getEvaluatorFunctionNames(); final int evaluatorCount = evaluatorFunctionNames.length; List<ExprNodeDesc>[] evaluatorInputExprNodeDescLists = vectorPTFDesc.getEvaluatorInputExprNodeDescLists(); /* * Output columns. */ int[] outputColumnProjectionMap = new int[outputSize]; // Evaluator results are first. for (int i = 0; i < evaluatorCount; i++) { ColumnInfo colInfo = outputSignature.get(i); TypeInfo typeInfo = colInfo.getType(); final int outputColumnNum; outputColumnNum = vContext.allocateScratchColumn(typeInfo); outputColumnProjectionMap[i] = outputColumnNum; } // Followed by key and non-key input columns (some may be missing). for (int i = evaluatorCount; i < outputSize; i++) { ColumnInfo colInfo = outputSignature.get(i); outputColumnProjectionMap[i] = vContext.getInputColumnIndex(colInfo.getInternalName()); } /* * Partition and order by. */ int[] partitionColumnMap; Type[] partitionColumnVectorTypes; VectorExpression[] partitionExpressions; final int partitionKeyCount = partitionExprNodeDescs.length; partitionColumnMap = new int[partitionKeyCount]; partitionColumnVectorTypes = new Type[partitionKeyCount]; partitionExpressions = new VectorExpression[partitionKeyCount]; for (int i = 0; i < partitionKeyCount; i++) { VectorExpression partitionExpression = vContext.getVectorExpression(partitionExprNodeDescs[i]); TypeInfo typeInfo = partitionExpression.getOutputTypeInfo(); Type columnVectorType = VectorizationContext.getColumnVectorTypeFromTypeInfo(typeInfo); partitionColumnVectorTypes[i] = columnVectorType; partitionColumnMap[i] = partitionExpression.getOutputColumnNum(); partitionExpressions[i] = partitionExpression; } final int orderKeyCount = orderExprNodeDescs.length; int[] orderColumnMap = new int[orderKeyCount]; Type[] orderColumnVectorTypes = new Type[orderKeyCount]; VectorExpression[] orderExpressions = new VectorExpression[orderKeyCount]; for (int i = 0; i < orderKeyCount; i++) { VectorExpression orderExpression = vContext.getVectorExpression(orderExprNodeDescs[i]); TypeInfo typeInfo = orderExpression.getOutputTypeInfo(); Type columnVectorType = VectorizationContext.getColumnVectorTypeFromTypeInfo(typeInfo); orderColumnVectorTypes[i] = columnVectorType; orderColumnMap[i] = orderExpression.getOutputColumnNum(); orderExpressions[i] = orderExpression; } ArrayList<Integer> keyInputColumns = new ArrayList<Integer>(); ArrayList<Integer> nonKeyInputColumns = new ArrayList<Integer>(); determineKeyAndNonKeyInputColumnMap(outputColumnProjectionMap, isPartitionOrderBy, orderColumnMap, partitionColumnMap, evaluatorCount, keyInputColumns, nonKeyInputColumns); int[] keyInputColumnMap = ArrayUtils.toPrimitive(keyInputColumns.toArray(new Integer[0])); int[] nonKeyInputColumnMap = ArrayUtils.toPrimitive(nonKeyInputColumns.toArray(new Integer[0])); VectorExpression[] evaluatorInputExpressions = new VectorExpression[evaluatorCount]; Type[] evaluatorInputColumnVectorTypes = new Type[evaluatorCount]; for (int i = 0; i < evaluatorCount; i++) { List<ExprNodeDesc> exprNodeDescList = evaluatorInputExprNodeDescLists[i]; VectorExpression inputVectorExpression; final Type columnVectorType; if (exprNodeDescList != null) { // Validation has limited evaluatorInputExprNodeDescLists to size 1. ExprNodeDesc exprNodeDesc = exprNodeDescList.get(0); // Determine input vector expression using the VectorizationContext. inputVectorExpression = vContext.getVectorExpression(exprNodeDesc); if (inputVectorExpression.getOutputColumnVectorType() == ColumnVector.Type.DECIMAL_64) { inputVectorExpression = vContext.wrapWithDecimal64ToDecimalConversion(inputVectorExpression); } TypeInfo typeInfo = exprNodeDesc.getTypeInfo(); columnVectorType = VectorizationContext.getColumnVectorTypeFromTypeInfo(typeInfo); } else { inputVectorExpression = null; columnVectorType = ColumnVector.Type.NONE; } evaluatorInputExpressions[i] = inputVectorExpression; evaluatorInputColumnVectorTypes[i] = columnVectorType; } VectorPTFInfo vectorPTFInfo = new VectorPTFInfo(); vectorPTFInfo.setOutputColumnMap(outputColumnProjectionMap); vectorPTFInfo.setPartitionColumnMap(partitionColumnMap); vectorPTFInfo.setPartitionColumnVectorTypes(partitionColumnVectorTypes); vectorPTFInfo.setPartitionExpressions(partitionExpressions); vectorPTFInfo.setOrderColumnMap(orderColumnMap); vectorPTFInfo.setOrderColumnVectorTypes(orderColumnVectorTypes); vectorPTFInfo.setOrderExpressions(orderExpressions); vectorPTFInfo.setEvaluatorInputExpressions(evaluatorInputExpressions); vectorPTFInfo.setEvaluatorInputColumnVectorTypes(evaluatorInputColumnVectorTypes); vectorPTFInfo.setKeyInputColumnMap(keyInputColumnMap); vectorPTFInfo.setNonKeyInputColumnMap(nonKeyInputColumnMap); return vectorPTFInfo; } /* * NOTE: The VectorPTFDesc has already been allocated and populated. */ public static Operator<? extends OperatorDesc> vectorizePTFOperator( Operator<? extends OperatorDesc> ptfOp, VectorizationContext vContext, VectorPTFDesc vectorPTFDesc) throws HiveException { PTFDesc ptfDesc = (PTFDesc) ptfOp.getConf(); VectorPTFInfo vectorPTFInfo = createVectorPTFInfo(ptfOp, ptfDesc, vContext, vectorPTFDesc); vectorPTFDesc.setVectorPTFInfo(vectorPTFInfo); Class<? extends Operator<?>> opClass = VectorPTFOperator.class; return OperatorFactory.getVectorOperator( opClass, ptfOp.getCompilationOpContext(), ptfOp.getConf(), vContext, vectorPTFDesc); } // UNDONE: Used by tests... public Operator<? extends OperatorDesc> vectorizeOperator(Operator<? extends OperatorDesc> op, VectorizationContext vContext, boolean isReduce, boolean isTezOrSpark, VectorTaskColumnInfo vectorTaskColumnInfo) throws HiveException, VectorizerCannotVectorizeException { Operator<? extends OperatorDesc> vectorOp = validateAndVectorizeOperator(op, vContext, isReduce, isTezOrSpark, vectorTaskColumnInfo); if (vectorOp != op) { fixupParentChildOperators(op, vectorOp); } return vectorOp; } public Operator<? extends OperatorDesc> validateAndVectorizeOperator(Operator<? extends OperatorDesc> op, VectorizationContext vContext, boolean isReduce, boolean isTezOrSpark, VectorTaskColumnInfo vectorTaskColumnInfo) throws HiveException, VectorizerCannotVectorizeException { Operator<? extends OperatorDesc> vectorOp = null; // This "global" allows various validation methods to set the "not vectorized" reason. currentOperator = op; boolean isNative; try { switch (op.getType()) { case MAPJOIN: { if (op instanceof MapJoinOperator) { if (!validateMapJoinOperator((MapJoinOperator) op)) { throw new VectorizerCannotVectorizeException(); } } else if (op instanceof SMBMapJoinOperator) { if (!validateSMBMapJoinOperator((SMBMapJoinOperator) op)) { throw new VectorizerCannotVectorizeException(); } } else { setOperatorNotSupported(op); throw new VectorizerCannotVectorizeException(); } if (op instanceof MapJoinOperator) { MapJoinDesc desc = (MapJoinDesc) op.getConf(); int joinType = desc.getConds()[0].getType(); VectorMapJoinDesc vectorMapJoinDesc = new VectorMapJoinDesc(); boolean specialize = canSpecializeMapJoin(op, desc, isTezOrSpark, vContext, vectorMapJoinDesc); if (!specialize) { Class<? extends Operator<?>> opClass = null; // *NON-NATIVE* vector map differences for LEFT OUTER JOIN and Filtered... List<ExprNodeDesc> bigTableFilters = desc.getFilters().get((byte) desc.getPosBigTable()); boolean isOuterAndFiltered = (!desc.isNoOuterJoin() && bigTableFilters.size() > 0); if (!isOuterAndFiltered) { opClass = VectorMapJoinOperator.class; } else { if (joinType == JoinDesc.FULL_OUTER_JOIN) { setOperatorIssue("Vectorized & filtered full-outer joins not supported"); throw new VectorizerCannotVectorizeException(); } opClass = VectorMapJoinOuterFilteredOperator.class; } vectorOp = OperatorFactory.getVectorOperator( opClass, op.getCompilationOpContext(), desc, vContext, vectorMapJoinDesc); isNative = false; } else { // TEMPORARY Until Native Vector Map Join with Hybrid passes tests... // HiveConf.setBoolVar(physicalContext.getConf(), // HiveConf.ConfVars.HIVEUSEHYBRIDGRACEHASHJOIN, false); vectorOp = specializeMapJoinOperator(op, vContext, desc, vectorMapJoinDesc); isNative = true; if (vectorTaskColumnInfo != null) { if (usesVectorUDFAdaptor(vectorMapJoinDesc.getAllBigTableKeyExpressions())) { vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true); } if (usesVectorUDFAdaptor(vectorMapJoinDesc.getAllBigTableValueExpressions())) { vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true); } } } } else { Preconditions.checkState(op instanceof SMBMapJoinOperator); SMBJoinDesc smbJoinSinkDesc = (SMBJoinDesc) op.getConf(); // Check additional constraint. if (smbJoinSinkDesc.getFilterMap() != null) { setOperatorIssue("FilterMaps not supported for Vector Pass-Thru SMB MapJoin"); throw new VectorizerCannotVectorizeException(); } VectorSMBJoinDesc vectorSMBJoinDesc = new VectorSMBJoinDesc(); vectorOp = OperatorFactory.getVectorOperator( op.getCompilationOpContext(), smbJoinSinkDesc, vContext, vectorSMBJoinDesc); isNative = false; } } break; case REDUCESINK: { if (!validateReduceSinkOperator((ReduceSinkOperator) op)) { throw new VectorizerCannotVectorizeException(); } ReduceSinkDesc reduceDesc = (ReduceSinkDesc) op.getConf(); VectorReduceSinkDesc vectorReduceSinkDesc = new VectorReduceSinkDesc(); boolean specialize = canSpecializeReduceSink(reduceDesc, isTezOrSpark, vContext, vectorReduceSinkDesc); if (!specialize) { vectorOp = OperatorFactory.getVectorOperator( op.getCompilationOpContext(), reduceDesc, vContext, vectorReduceSinkDesc); isNative = false; } else { vectorOp = specializeReduceSinkOperator(op, vContext, reduceDesc, vectorReduceSinkDesc); isNative = true; if (vectorTaskColumnInfo != null) { VectorReduceSinkInfo vectorReduceSinkInfo = vectorReduceSinkDesc.getVectorReduceSinkInfo(); if (usesVectorUDFAdaptor(vectorReduceSinkInfo.getReduceSinkKeyExpressions())) { vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true); } if (usesVectorUDFAdaptor(vectorReduceSinkInfo.getReduceSinkValueExpressions())) { vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true); } } } } break; case FILTER: { if (!validateFilterOperator((FilterOperator) op)) { throw new VectorizerCannotVectorizeException(); } VectorFilterDesc vectorFilterDesc = new VectorFilterDesc(); vectorOp = vectorizeFilterOperator(op, vContext, vectorFilterDesc); isNative = true; if (vectorTaskColumnInfo != null) { VectorExpression vectorPredicateExpr = vectorFilterDesc.getPredicateExpression(); if (usesVectorUDFAdaptor(vectorPredicateExpr)) { vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true); } } } break; case TOPNKEY: { if (!validateTopNKeyOperator((TopNKeyOperator) op)) { throw new VectorizerCannotVectorizeException(); } VectorTopNKeyDesc vectorTopNKeyDesc = new VectorTopNKeyDesc(); vectorOp = vectorizeTopNKeyOperator(op, vContext, vectorTopNKeyDesc); isNative = true; if (vectorTaskColumnInfo != null) { VectorExpression[] keyExpressions = vectorTopNKeyDesc.getKeyExpressions(); if (usesVectorUDFAdaptor(keyExpressions)) { vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true); } } } break; case SELECT: { if (!validateSelectOperator((SelectOperator) op)) { throw new VectorizerCannotVectorizeException(); } VectorSelectDesc vectorSelectDesc = new VectorSelectDesc(); vectorOp = vectorizeSelectOperator(op, vContext, vectorSelectDesc); isNative = true; if (vectorTaskColumnInfo != null) { VectorExpression[] vectorSelectExprs = vectorSelectDesc.getSelectExpressions(); if (usesVectorUDFAdaptor(vectorSelectExprs)) { vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true); } } } break; case GROUPBY: { // The validateGroupByOperator method will update vectorGroupByDesc. VectorGroupByDesc vectorGroupByDesc = new VectorGroupByDesc(); if (!validateGroupByOperator((GroupByOperator) op, isReduce, isTezOrSpark, vectorGroupByDesc)) { throw new VectorizerCannotVectorizeException(); } ImmutablePair<Operator<? extends OperatorDesc>,String> pair = doVectorizeGroupByOperator(op, vContext, vectorGroupByDesc); if (pair.left == null) { setOperatorIssue(pair.right); throw new VectorizerCannotVectorizeException(); } vectorOp = pair.left; isNative = false; if (vectorTaskColumnInfo != null) { VectorExpression[] vecKeyExpressions = vectorGroupByDesc.getKeyExpressions(); if (usesVectorUDFAdaptor(vecKeyExpressions)) { vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true); } VectorAggregationDesc[] vecAggrDescs = vectorGroupByDesc.getVecAggrDescs(); for (VectorAggregationDesc vecAggrDesc : vecAggrDescs) { if (usesVectorUDFAdaptor(vecAggrDesc.getInputExpression())) { vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true); } } } } break; case FILESINK: { if (!validateFileSinkOperator((FileSinkOperator) op)) { throw new VectorizerCannotVectorizeException(); } FileSinkDesc fileSinkDesc = (FileSinkDesc) op.getConf(); VectorFileSinkDesc vectorFileSinkDesc = new VectorFileSinkDesc(); boolean isArrowSpecialization = checkForArrowFileSink(fileSinkDesc, isTezOrSpark, vContext, vectorFileSinkDesc); if (isArrowSpecialization) { vectorOp = specializeArrowFileSinkOperator( op, vContext, fileSinkDesc, vectorFileSinkDesc); isNative = true; } else { vectorOp = OperatorFactory.getVectorOperator( op.getCompilationOpContext(), fileSinkDesc, vContext, vectorFileSinkDesc); isNative = false; } } break; case LIMIT: { // No validation. LimitDesc limitDesc = (LimitDesc) op.getConf(); VectorLimitDesc vectorLimitDesc = new VectorLimitDesc(); vectorOp = OperatorFactory.getVectorOperator( op.getCompilationOpContext(), limitDesc, vContext, vectorLimitDesc); isNative = true; } break; case EVENT: { // No validation. AppMasterEventDesc eventDesc = (AppMasterEventDesc) op.getConf(); VectorAppMasterEventDesc vectorEventDesc = new VectorAppMasterEventDesc(); vectorOp = OperatorFactory.getVectorOperator( op.getCompilationOpContext(), eventDesc, vContext, vectorEventDesc); isNative = true; } break; case PTF: { // The validatePTFOperator method will update vectorPTFDesc. VectorPTFDesc vectorPTFDesc = new VectorPTFDesc(); if (!validatePTFOperator((PTFOperator) op, vContext, vectorPTFDesc)) { throw new VectorizerCannotVectorizeException(); } vectorOp = vectorizePTFOperator(op, vContext, vectorPTFDesc); isNative = true; } break; case HASHTABLESINK: { // No validation. SparkHashTableSinkDesc sparkHashTableSinkDesc = (SparkHashTableSinkDesc) op.getConf(); VectorSparkHashTableSinkDesc vectorSparkHashTableSinkDesc = new VectorSparkHashTableSinkDesc(); vectorOp = OperatorFactory.getVectorOperator( op.getCompilationOpContext(), sparkHashTableSinkDesc, vContext, vectorSparkHashTableSinkDesc); isNative = true; } break; case SPARKPRUNINGSINK: { // No validation. SparkPartitionPruningSinkDesc sparkPartitionPruningSinkDesc = (SparkPartitionPruningSinkDesc) op.getConf(); VectorSparkPartitionPruningSinkDesc vectorSparkPartitionPruningSinkDesc = new VectorSparkPartitionPruningSinkDesc(); vectorOp = OperatorFactory.getVectorOperator( op.getCompilationOpContext(), sparkPartitionPruningSinkDesc, vContext, vectorSparkPartitionPruningSinkDesc); // need to maintain the unique ID so that target map works can // read the output ((SparkPartitionPruningSinkOperator) vectorOp).setUniqueId( ((SparkPartitionPruningSinkOperator) op).getUniqueId()); isNative = true; } break; default: setOperatorNotSupported(op); throw new VectorizerCannotVectorizeException(); } } catch (HiveException e) { setOperatorIssue(e.getMessage()); throw new VectorizerCannotVectorizeException(); } Preconditions.checkState(vectorOp != null); if (vectorTaskColumnInfo != null && !isNative) { vectorTaskColumnInfo.setAllNative(false); } LOG.debug("vectorizeOperator " + vectorOp.getClass().getName()); LOG.debug("vectorizeOperator " + vectorOp.getConf().getClass().getName()); // These operators need to be linked to enable runtime statistics to be gathered/used correctly planMapper.link(op, vectorOp); return vectorOp; } }
/* * Hibernate, Relational Persistence for Idiomatic Java * * License: GNU Lesser General Public License (LGPL), version 2.1 or later. * See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>. */ package org.hibernate.test.annotations.index.jpa; /** * @author <a href="mailto:stliu@hibernate.org">Strong Liu</a> */ public class OrmXmlIndexTest extends AbstractJPAIndexTest { @Override protected String[] getXmlFiles() { return new String[] { "org/hibernate/test/annotations/index/jpa/orm-index.xml" }; } }
package org.contikios.cooja.serialsocket; /* * Copyright (c) 2014, TU Braunschweig. * Copyright (c) 2010, Swedish Institute of Computer Science. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the Institute nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE INSTITUTE AND CONTRIBUTORS ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE INSTITUTE OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * */ import java.awt.BorderLayout; import java.awt.Color; import java.awt.Dimension; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.GridLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.net.ServerSocket; import java.net.Socket; import java.text.NumberFormat; import java.text.ParseException; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedList; import java.util.List; import java.util.Observable; import java.util.Observer; import java.util.logging.Level; import javax.swing.BorderFactory; import javax.swing.BoxLayout; import javax.swing.JButton; import javax.swing.JFormattedTextField; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JSeparator; import javax.swing.SwingUtilities; import javax.swing.Timer; import javax.swing.border.EtchedBorder; import javax.swing.text.NumberFormatter; import org.apache.log4j.Logger; import org.jdom.Element; import org.contikios.cooja.ClassDescription; import org.contikios.cooja.Cooja; import org.contikios.cooja.Mote; import org.contikios.cooja.MotePlugin; import org.contikios.cooja.PluginType; import org.contikios.cooja.Simulation; import org.contikios.cooja.VisPlugin; import org.contikios.cooja.interfaces.SerialPort; /** * Socket to simulated serial port forwarder. Server version. * * @author Fredrik Osterlind * @author Enrico Jorns */ @ClassDescription("Serial Socket (SERVER)") @PluginType(PluginType.MOTE_PLUGIN) public class SerialSocketServer extends VisPlugin implements MotePlugin { private static final long serialVersionUID = 1L; private static final Logger logger = Logger.getLogger(SerialSocketServer.class); private final static int STATUSBAR_WIDTH = 350; private static final Color COLOR_NEUTRAL = Color.DARK_GRAY; private static final Color COLOR_POSITIVE = new Color(0, 161, 83); private static final Color COLOR_NEGATIVE = Color.RED; private final int SERVER_DEFAULT_PORT; private final SerialPort serialPort; private Observer serialDataObserver; private JLabel socketToMoteLabel; private JLabel moteToSocketLabel; private JLabel socketStatusLabel; private JFormattedTextField listenPortField; private JButton serverStartButton; private int inBytes = 0, outBytes = 0; private ServerSocket serverSocket; private Socket clientSocket; private Mote mote; private Simulation simulation; public SerialSocketServer(Mote mote, Simulation simulation, final Cooja gui) { super("Serial Socket (SERVER) (" + mote + ")", gui, false); this.mote = mote; this.simulation = simulation; updateTimer.start(); SERVER_DEFAULT_PORT = 60000 + mote.getID(); /* GUI components */ if (Cooja.isVisualized()) { setResizable(false); setLayout(new BorderLayout()); // --- Server Port setup GridBagConstraints c = new GridBagConstraints(); JPanel socketPanel = new JPanel(new GridBagLayout()); socketPanel.setBorder(BorderFactory.createEmptyBorder(2, 2, 2, 2)); JLabel label = new JLabel("Listen port: "); c.gridx = 0; c.gridy = 0; c.weightx = 0.1; c.anchor = GridBagConstraints.EAST; socketPanel.add(label, c); NumberFormat nf = NumberFormat.getIntegerInstance(); nf.setGroupingUsed(false); listenPortField = new JFormattedTextField(new NumberFormatter(nf)); listenPortField.setColumns(5); listenPortField.setText(String.valueOf(SERVER_DEFAULT_PORT)); c.gridx++; c.weightx = 0.0; socketPanel.add(listenPortField, c); serverStartButton = new JButton("Start") { // Button for label toggeling private final String altString = "Stop"; @Override public Dimension getPreferredSize() { String origText = getText(); Dimension origDim = super.getPreferredSize(); setText(altString); Dimension altDim = super.getPreferredSize(); setText(origText); return new Dimension(Math.max(origDim.width, altDim.width), origDim.height); } }; c.gridx++; c.weightx = 0.1; c.anchor = GridBagConstraints.EAST; socketPanel.add(serverStartButton, c); c.gridx = 0; c.gridy++; c.gridwidth = GridBagConstraints.REMAINDER; c.fill = GridBagConstraints.HORIZONTAL; socketPanel.add(new JSeparator(JSeparator.HORIZONTAL), c); add(BorderLayout.NORTH, socketPanel); // --- Incoming / outgoing info JPanel connectionInfoPanel = new JPanel(new GridLayout(0, 2)); connectionInfoPanel.setBorder(BorderFactory.createEmptyBorder(5, 5, 5, 5)); c = new GridBagConstraints(); label = new JLabel("socket -> mote: "); label.setHorizontalAlignment(JLabel.RIGHT); c.gridx = 0; c.gridy = 0; c.anchor = GridBagConstraints.EAST; connectionInfoPanel.add(label); socketToMoteLabel = new JLabel("0 bytes"); c.gridx++; c.anchor = GridBagConstraints.WEST; connectionInfoPanel.add(socketToMoteLabel); label = new JLabel("mote -> socket: "); label.setHorizontalAlignment(JLabel.RIGHT); c.gridx = 0; c.gridy++; c.anchor = GridBagConstraints.EAST; connectionInfoPanel.add(label); moteToSocketLabel = new JLabel("0 bytes"); c.gridx++; c.anchor = GridBagConstraints.WEST; connectionInfoPanel.add(moteToSocketLabel); add(BorderLayout.CENTER, connectionInfoPanel); // --- Status bar JPanel statusBarPanel = new JPanel(new BorderLayout()) { @Override public Dimension getPreferredSize() { Dimension d = super.getPreferredSize(); return new Dimension(STATUSBAR_WIDTH, d.height); } }; statusBarPanel.setLayout(new BoxLayout(statusBarPanel, BoxLayout.LINE_AXIS)); statusBarPanel.setBorder(BorderFactory.createEtchedBorder(EtchedBorder.RAISED)); label = new JLabel("Status: "); statusBarPanel.add(label); socketStatusLabel = new JLabel("Idle"); socketStatusLabel.setForeground(Color.DARK_GRAY); statusBarPanel.add(socketStatusLabel); add(BorderLayout.SOUTH, statusBarPanel); serverStartButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { if (e.getActionCommand().equals("Start")) { try { listenPortField.commitEdit(); } catch (ParseException ex) { java.util.logging.Logger.getLogger(SerialSocketClient.class.getName()).log(Level.SEVERE, null, ex); } startServer(((Long) listenPortField.getValue()).intValue()); } else { stopServer(); } } }); pack(); } /* Mote serial port */ serialPort = (SerialPort) mote.getInterfaces().getLog(); if (serialPort == null) { throw new RuntimeException("No mote serial port"); } if (Cooja.isVisualized()) { // gui updates for server status updates addServerListener(new ServerListener() { @Override public void onServerStarted(final int port) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { System.out.println("onServerStarted"); socketStatusLabel.setForeground(COLOR_NEUTRAL); socketStatusLabel.setText("Listening on port " + String.valueOf(port)); listenPortField.setEnabled(false); serverStartButton.setText("Stop"); } }); } @Override public void onClientConnected(final Socket client) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { socketStatusLabel.setForeground(COLOR_POSITIVE); socketStatusLabel.setText("Client " + client.getInetAddress() + ":" + client.getPort() + " connected."); } }); } @Override public void onClientDisconnected() { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { // XXX check why needed if (serverSocket != null) { socketStatusLabel.setForeground(COLOR_NEUTRAL); socketStatusLabel.setText("Listening on port " + String.valueOf(serverSocket.getLocalPort())); } } }); } @Override public void onServerStopped() { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { listenPortField.setEnabled(true); serverStartButton.setText("Start"); socketStatusLabel.setForeground(COLOR_NEUTRAL); socketStatusLabel.setText("Idle"); } }); } @Override public void onServerError(final String msg) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { socketStatusLabel.setForeground(COLOR_NEGATIVE); socketStatusLabel.setText(msg); } }); } }); } } private List<ServerListener> listeners = new LinkedList<>(); public interface ServerListener { void onServerStarted(int port); void onClientConnected(Socket client); void onClientDisconnected(); void onServerStopped(); void onServerError(String msg); } private void addServerListener(ServerListener listener) { listeners.add(listener); } public void notifyServerStarted(int port) { for (ServerListener listener : listeners) { listener.onServerStarted(port); } } public void notifyClientConnected(Socket client) { for (ServerListener listener : listeners) { listener.onClientConnected(client); } } public void notifyClientDisconnected() { for (ServerListener listener : listeners) { listener.onClientDisconnected(); } } public void notifyServerStopped() { for (ServerListener listener : listeners) { listener.onServerStopped(); } } public void notifyServerError(String msg) { for (ServerListener listener : listeners) { listener.onServerError(msg); } } /** * Start server .. * @param port */ public void startServer(int port) { try { serverSocket = new ServerSocket(port); logger.info("Listening on port: " + port); notifyServerStarted(port); } catch (IOException ex) { logger.error(ex.getMessage()); notifyServerError(ex.getMessage()); return; } new Thread() { private Thread incomingDataHandler; @Override public void run() { while (!serverSocket.isClosed()) { try { // wait for next client Socket candidateSocket = serverSocket.accept(); // reject connection if already one client connected if (clientSocket != null && !clientSocket.isClosed()) { logger.info("Refused connection of client " + candidateSocket.getInetAddress()); candidateSocket.close(); continue; } clientSocket = candidateSocket; /* Start handler for data input from socket */ incomingDataHandler = new Thread(new IncomingDataHandler()); incomingDataHandler.start(); /* Observe serial port for outgoing data */ serialDataObserver = new SerialDataObserver(); serialPort.addSerialDataObserver(serialDataObserver); inBytes = outBytes = 0; logger.info("Client connected: " + clientSocket.getInetAddress()); notifyClientConnected(clientSocket); } catch (IOException e) { logger.info("Listening thread shut down: " + e.getMessage()); try { serverSocket.close(); } catch (IOException ex) { logger.error(ex); } } } cleanupClient(); if (incomingDataHandler != null) { // Wait for reader thread to terminate try { incomingDataHandler.join(500); } catch (InterruptedException ex) { logger.warn(ex); } } notifyServerStopped(); } }.start(); } /** * Stops server by closing server listen socket. */ public void stopServer() { try { serverSocket.close(); } catch (IOException ex) { logger.error(ex); } } /* Forward data: virtual port -> mote */ private class IncomingDataHandler implements Runnable { DataInputStream in; @Override public void run() { int numRead = 0; byte[] data = new byte[1024]; try { in = new DataInputStream(clientSocket.getInputStream()); } catch (IOException ex) { logger.error(ex); return; } logger.info("Forwarder: socket -> serial port"); while (numRead >= 0) { final int finalNumRead = numRead; final byte[] finalData = data; /* We are not on the simulation thread */ simulation.invokeSimulationThread(new Runnable() { @Override public void run() { for (int i = 0; i < finalNumRead; i++) { serialPort.writeByte(finalData[i]); } inBytes += finalNumRead; } }); try { numRead = in.read(data); } catch (IOException e) { logger.info(e.getMessage()); numRead = -1; } } logger.info("End of Stream"); cleanupClient(); } } private class SerialDataObserver implements Observer { DataOutputStream out; public SerialDataObserver() { try { out = new DataOutputStream(clientSocket.getOutputStream()); } catch (IOException ex) { logger.error(ex); out = null; } } @Override public void update(Observable obs, Object obj) { try { if (out == null) { /*logger.debug("out is null");*/ return; } out.write(serialPort.getLastSerialData()); out.flush(); outBytes++; } catch (IOException ex) { logger.error(ex); cleanupClient(); } } } @Override public Collection<Element> getConfigXML() { List<Element> config = new ArrayList<>(); Element element; // XXX isVisualized guards? element = new Element("port"); if (serverSocket == null || !serverSocket.isBound()) { try { listenPortField.commitEdit(); element.setText(String.valueOf((Long) listenPortField.getValue())); } catch (ParseException ex) { logger.error(ex.getMessage()); listenPortField.setText("null"); } } else { element.setText(String.valueOf(serverSocket.getLocalPort())); } config.add(element); element = new Element("bound"); if (serverSocket == null) { element.setText(String.valueOf(false)); } else { element.setText(String.valueOf(!serverSocket.isClosed())); } config.add(element); return config; } @Override public boolean setConfigXML(Collection<Element> configXML, boolean visAvailable) { Integer port = null; boolean bound = false; for (Element element : configXML) { switch (element.getName()) { case "port": port = Integer.parseInt(element.getText()); break; case "bound": bound = Boolean.parseBoolean(element.getText()); break; default: logger.warn("Unknwon config element: " + element.getName()); break; } } if (Cooja.isVisualized()) { if (port != null) { listenPortField.setText(String.valueOf(port)); } if (bound) { serverStartButton.doClick(); } } else { // if bound and all set up, start client if (port != null) { startServer(port); } else { logger.error("Server not started due to incomplete configuration"); } } return true; } private void cleanupClient() { try { if (clientSocket != null) { clientSocket.close(); clientSocket = null; } } catch (IOException e1) { logger.error(e1.getMessage()); } serialPort.deleteSerialDataObserver(serialDataObserver); notifyClientDisconnected(); } private boolean closed = false; @Override public void closePlugin() { closed = true; cleanupClient(); try { if (serverSocket != null) { serverSocket.close(); } } catch (IOException ex) { logger.error(ex); } } @Override public Mote getMote() { return mote; } private static final int UPDATE_INTERVAL = 150; private Timer updateTimer = new Timer(UPDATE_INTERVAL, new ActionListener() { @Override public void actionPerformed(ActionEvent e) { if (Cooja.isVisualized()) { if (closed) { updateTimer.stop(); return; } socketToMoteLabel.setText(inBytes + " bytes"); moteToSocketLabel.setText(outBytes + " bytes"); } } }); }
package com.catehuston.imagefilter.model; public class HSBColor { public final float h; public final float s; public final float b; public HSBColor(float h, float s, float b) { this.h = h; this.s = s; this.b = b; } }
package com.simplechat.perf.client.message.api; /** * Created by lampt on 6/24/2017. */ public interface MessageComposer { String composeSendTypeMessage(); String composeListTypeMessage(); String composeSendFileTypeMessage(); }
/* * ApplicationInsights-Java * Copyright (c) Microsoft Corporation * All rights reserved. * * MIT License * Permission is hereby granted, free of charge, to any person obtaining a copy of this * software and associated documentation files (the ""Software""), to deal in the Software * without restriction, including without limitation the rights to use, copy, modify, merge, * publish, distribute, sublicense, and/or sell copies of the Software, and to permit * persons to whom the Software is furnished to do so, subject to the following conditions: * The above copyright notice and this permission notice shall be included in all copies or * substantial portions of the Software. * THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR * PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE * FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. */ package com.microsoft.applicationinsights.internal.config; import org.junit.*; import java.io.InputStream; public final class JaxbAppInsightsConfigurationBuilderTest { private final static String EXISTING_CONF_TEST_FILE = "ApplicationInsights2.xml"; @Before public void clearProp() { System.clearProperty(ConfigurationFileLocator.CONFIG_DIR_PROPERTY); } @Test public void testNullInputShouldReturnNull() { Assert.assertNull(new JaxbAppInsightsConfigurationBuilder().build(null)); } @Test public void testBuilderProducesCorrectConfig() { System.setProperty(ConfigurationFileLocator.CONFIG_DIR_PROPERTY, "src/test/resources"); InputStream resourceFile = new ConfigurationFileLocator(EXISTING_CONF_TEST_FILE).getConfigurationFile(); JaxbAppInsightsConfigurationBuilder builder = new JaxbAppInsightsConfigurationBuilder(); ApplicationInsightsXmlConfiguration config = builder.build(resourceFile); // asserting a few config items only since the point of the test is to validate deserialization occurs // with no errors. Assert.assertEquals("myikey", config.getInstrumentationKey()); Assert.assertFalse(config.getChannel().getDeveloperMode()); Assert.assertEquals("mypackage.MyCustomContextInitializer", config.getContextInitializers().getAdds().get(0).getType()); } }
package com.orion.generator.addres; import com.orion.lang.collect.WeightRandomMap; import java.util.Collections; import java.util.Map; import java.util.TreeMap; /** * 中国3级地址 * * @author Jiahang Li * @version 1.0.0 * @since 2021/8/11 14:59 */ public class AddressArea { /** * 省人口权重 */ protected static final WeightRandomMap<Integer> POPULATION; /** * 省 */ protected static final Map<Integer, String> PROVINCE; /** * 市 */ protected static final Map<Integer, String> CITY; /** * 县 */ protected static final Map<Integer, String> COUNTY; private AddressArea() { } static { POPULATION = new WeightRandomMap<>(); POPULATION.put(37, 8D); POPULATION.put(44, 7.1D); POPULATION.put(41, 6.8D); POPULATION.put(51, 5.9D); POPULATION.put(32, 5.7D); POPULATION.put(13, 5.3D); POPULATION.put(43, 4.9D); POPULATION.put(34, 4.5D); POPULATION.put(42, 4.2D); POPULATION.put(33, 4.0D); POPULATION.put(45, 3.5D); POPULATION.put(53, 3.42D); POPULATION.put(36, 3.28D); POPULATION.put(21, 3.07D); POPULATION.put(35, 2.81D); POPULATION.put(61, 2.76D); POPULATION.put(23, 2.69D); POPULATION.put(14, 2.65D); POPULATION.put(52, 2.57D); POPULATION.put(50, 2.21D); POPULATION.put(22, 1.92D); POPULATION.put(62, 1.85D); POPULATION.put(15, 1.78D); POPULATION.put(65, 1.77D); POPULATION.put(31, 1.73D); POPULATION.put(11, 1.53D); POPULATION.put(12, 1.11D); POPULATION.put(46, 0.066D); POPULATION.put(64, 0.049D); POPULATION.put(63, 0.0422D); POPULATION.put(54, 0.02D); } static { TreeMap<Integer, String> tempProvince = new TreeMap<>(); tempProvince.put(11, "北京市"); tempProvince.put(12, "天津市"); tempProvince.put(13, "河北省"); tempProvince.put(14, "山西省"); tempProvince.put(15, "内蒙古自治区"); tempProvince.put(21, "辽宁省"); tempProvince.put(22, "吉林省"); tempProvince.put(23, "黑龙江省"); tempProvince.put(31, "上海市"); tempProvince.put(32, "江苏省"); tempProvince.put(33, "浙江省"); tempProvince.put(34, "安徽省"); tempProvince.put(35, "福建省"); tempProvince.put(36, "江西省"); tempProvince.put(37, "山东省"); tempProvince.put(41, "河南省"); tempProvince.put(42, "湖北省"); tempProvince.put(43, "湖南省"); tempProvince.put(44, "广东省"); tempProvince.put(45, "广西壮族自治区"); tempProvince.put(46, "海南省"); tempProvince.put(50, "重庆市"); tempProvince.put(51, "四川省"); tempProvince.put(52, "贵州省"); tempProvince.put(53, "云南省"); tempProvince.put(54, "西藏自治区"); tempProvince.put(61, "陕西省"); tempProvince.put(62, "甘肃省"); tempProvince.put(63, "青海省"); tempProvince.put(64, "宁夏回族自治区"); tempProvince.put(65, "新疆维吾尔自治区"); PROVINCE = Collections.unmodifiableSortedMap(tempProvince); } static { TreeMap<Integer, String> tempCity = new TreeMap<>(); tempCity.put(1101, "北京市"); tempCity.put(1201, "天津市"); tempCity.put(1301, "石家庄市"); tempCity.put(1302, "唐山市"); tempCity.put(1303, "秦皇岛市"); tempCity.put(1304, "邯郸市"); tempCity.put(1305, "邢台市"); tempCity.put(1306, "保定市"); tempCity.put(1307, "张家口市"); tempCity.put(1308, "承德市"); tempCity.put(1309, "沧州市"); tempCity.put(1310, "廊坊市"); tempCity.put(1311, "衡水市"); tempCity.put(1401, "太原市"); tempCity.put(1402, "大同市"); tempCity.put(1403, "阳泉市"); tempCity.put(1404, "长治市"); tempCity.put(1405, "晋城市"); tempCity.put(1406, "朔州市"); tempCity.put(1407, "晋中市"); tempCity.put(1408, "运城市"); tempCity.put(1409, "忻州市"); tempCity.put(1410, "临汾市"); tempCity.put(1411, "吕梁市"); tempCity.put(1501, "呼和浩特市"); tempCity.put(1502, "包头市"); tempCity.put(1503, "乌海市"); tempCity.put(1504, "赤峰市"); tempCity.put(1505, "通辽市"); tempCity.put(1506, "鄂尔多斯市"); tempCity.put(1507, "呼伦贝尔市"); tempCity.put(1508, "巴彦淖尔市"); tempCity.put(1509, "乌兰察布市"); tempCity.put(1522, "兴安盟"); tempCity.put(1525, "锡林郭勒盟"); tempCity.put(1529, "阿拉善盟"); tempCity.put(2101, "沈阳市"); tempCity.put(2102, "大连市"); tempCity.put(2103, "鞍山市"); tempCity.put(2104, "抚顺市"); tempCity.put(2105, "本溪市"); tempCity.put(2106, "丹东市"); tempCity.put(2107, "锦州市"); tempCity.put(2108, "营口市"); tempCity.put(2109, "阜新市"); tempCity.put(2110, "辽阳市"); tempCity.put(2111, "盘锦市"); tempCity.put(2112, "铁岭市"); tempCity.put(2113, "朝阳市"); tempCity.put(2114, "葫芦岛市"); tempCity.put(2201, "长春市"); tempCity.put(2202, "吉林市"); tempCity.put(2203, "四平市"); tempCity.put(2204, "辽源市"); tempCity.put(2205, "通化市"); tempCity.put(2206, "白山市"); tempCity.put(2207, "松原市"); tempCity.put(2208, "白城市"); tempCity.put(2224, "延边朝鲜族自治州"); tempCity.put(2301, "哈尔滨市"); tempCity.put(2302, "齐齐哈尔市"); tempCity.put(2303, "鸡西市"); tempCity.put(2304, "鹤岗市"); tempCity.put(2305, "双鸭山市"); tempCity.put(2306, "大庆市"); tempCity.put(2307, "伊春市"); tempCity.put(2308, "佳木斯市"); tempCity.put(2309, "七台河市"); tempCity.put(2310, "牡丹江市"); tempCity.put(2311, "黑河市"); tempCity.put(2312, "绥化市"); tempCity.put(2327, "大兴安岭地区"); tempCity.put(3101, "上海市"); tempCity.put(3201, "南京市"); tempCity.put(3202, "无锡市"); tempCity.put(3203, "徐州市"); tempCity.put(3204, "常州市"); tempCity.put(3205, "苏州市"); tempCity.put(3206, "南通市"); tempCity.put(3207, "连云港市"); tempCity.put(3208, "淮安市"); tempCity.put(3209, "盐城市"); tempCity.put(3210, "扬州市"); tempCity.put(3211, "镇江市"); tempCity.put(3212, "泰州市"); tempCity.put(3213, "宿迁市"); tempCity.put(3301, "杭州市"); tempCity.put(3302, "宁波市"); tempCity.put(3303, "温州市"); tempCity.put(3304, "嘉兴市"); tempCity.put(3305, "湖州市"); tempCity.put(3306, "绍兴市"); tempCity.put(3307, "金华市"); tempCity.put(3308, "衢州市"); tempCity.put(3309, "舟山市"); tempCity.put(3310, "台州市"); tempCity.put(3311, "丽水市"); tempCity.put(3401, "合肥市"); tempCity.put(3402, "芜湖市"); tempCity.put(3403, "蚌埠市"); tempCity.put(3404, "淮南市"); tempCity.put(3405, "马鞍山市"); tempCity.put(3406, "淮北市"); tempCity.put(3407, "铜陵市"); tempCity.put(3408, "安庆市"); tempCity.put(3410, "黄山市"); tempCity.put(3411, "滁州市"); tempCity.put(3412, "阜阳市"); tempCity.put(3413, "宿州市"); tempCity.put(3415, "六安市"); tempCity.put(3416, "亳州市"); tempCity.put(3417, "池州市"); tempCity.put(3418, "宣城市"); tempCity.put(3501, "福州市"); tempCity.put(3502, "厦门市"); tempCity.put(3503, "莆田市"); tempCity.put(3504, "三明市"); tempCity.put(3505, "泉州市"); tempCity.put(3506, "漳州市"); tempCity.put(3507, "南平市"); tempCity.put(3508, "龙岩市"); tempCity.put(3509, "宁德市"); tempCity.put(3601, "南昌市"); tempCity.put(3602, "景德镇市"); tempCity.put(3603, "萍乡市"); tempCity.put(3604, "九江市"); tempCity.put(3605, "新余市"); tempCity.put(3606, "鹰潭市"); tempCity.put(3607, "赣州市"); tempCity.put(3608, "吉安市"); tempCity.put(3609, "宜春市"); tempCity.put(3610, "抚州市"); tempCity.put(3611, "上饶市"); tempCity.put(3701, "济南市"); tempCity.put(3702, "青岛市"); tempCity.put(3703, "淄博市"); tempCity.put(3704, "枣庄市"); tempCity.put(3705, "东营市"); tempCity.put(3706, "烟台市"); tempCity.put(3707, "潍坊市"); tempCity.put(3708, "济宁市"); tempCity.put(3709, "泰安市"); tempCity.put(3710, "威海市"); tempCity.put(3711, "日照市"); tempCity.put(3713, "临沂市"); tempCity.put(3714, "德州市"); tempCity.put(3715, "聊城市"); tempCity.put(3716, "滨州市"); tempCity.put(3717, "菏泽市"); tempCity.put(4101, "郑州市"); tempCity.put(4102, "开封市"); tempCity.put(4103, "洛阳市"); tempCity.put(4104, "平顶山市"); tempCity.put(4105, "安阳市"); tempCity.put(4106, "鹤壁市"); tempCity.put(4107, "新乡市"); tempCity.put(4108, "焦作市"); tempCity.put(4109, "濮阳市"); tempCity.put(4110, "许昌市"); tempCity.put(4111, "漯河市"); tempCity.put(4112, "三门峡市"); tempCity.put(4113, "南阳市"); tempCity.put(4114, "商丘市"); tempCity.put(4115, "信阳市"); tempCity.put(4116, "周口市"); tempCity.put(4117, "驻马店市"); tempCity.put(4190, "省直辖县级行政区划"); tempCity.put(4201, "武汉市"); tempCity.put(4202, "黄石市"); tempCity.put(4203, "十堰市"); tempCity.put(4205, "宜昌市"); tempCity.put(4206, "襄阳市"); tempCity.put(4207, "鄂州市"); tempCity.put(4208, "荆门市"); tempCity.put(4209, "孝感市"); tempCity.put(4210, "荆州市"); tempCity.put(4211, "黄冈市"); tempCity.put(4212, "咸宁市"); tempCity.put(4213, "随州市"); tempCity.put(4228, "恩施土家族苗族自治州"); tempCity.put(4290, "省直辖县级行政区划"); tempCity.put(4301, "长沙市"); tempCity.put(4302, "株洲市"); tempCity.put(4303, "湘潭市"); tempCity.put(4304, "衡阳市"); tempCity.put(4305, "邵阳市"); tempCity.put(4306, "岳阳市"); tempCity.put(4307, "常德市"); tempCity.put(4308, "张家界市"); tempCity.put(4309, "益阳市"); tempCity.put(4310, "郴州市"); tempCity.put(4311, "永州市"); tempCity.put(4312, "怀化市"); tempCity.put(4313, "娄底市"); tempCity.put(4331, "湘西土家族苗族自治州"); tempCity.put(4401, "广州市"); tempCity.put(4402, "韶关市"); tempCity.put(4403, "深圳市"); tempCity.put(4404, "珠海市"); tempCity.put(4405, "汕头市"); tempCity.put(4406, "佛山市"); tempCity.put(4407, "江门市"); tempCity.put(4408, "湛江市"); tempCity.put(4409, "茂名市"); tempCity.put(4412, "肇庆市"); tempCity.put(4413, "惠州市"); tempCity.put(4414, "梅州市"); tempCity.put(4415, "汕尾市"); tempCity.put(4416, "河源市"); tempCity.put(4417, "阳江市"); tempCity.put(4418, "清远市"); tempCity.put(4419, "东莞市"); tempCity.put(4420, "中山市"); tempCity.put(4451, "潮州市"); tempCity.put(4452, "揭阳市"); tempCity.put(4453, "云浮市"); tempCity.put(4501, "南宁市"); tempCity.put(4502, "柳州市"); tempCity.put(4503, "桂林市"); tempCity.put(4504, "梧州市"); tempCity.put(4505, "北海市"); tempCity.put(4506, "防城港市"); tempCity.put(4507, "钦州市"); tempCity.put(4508, "贵港市"); tempCity.put(4509, "玉林市"); tempCity.put(4510, "百色市"); tempCity.put(4511, "贺州市"); tempCity.put(4512, "河池市"); tempCity.put(4513, "来宾市"); tempCity.put(4514, "崇左市"); tempCity.put(4601, "海口市"); tempCity.put(4602, "三亚市"); tempCity.put(4603, "三沙市"); tempCity.put(4604, "儋州市"); tempCity.put(4690, "省直辖县级行政区划"); tempCity.put(5001, "重庆市"); tempCity.put(5002, "县"); tempCity.put(5101, "成都市"); tempCity.put(5103, "自贡市"); tempCity.put(5104, "攀枝花市"); tempCity.put(5105, "泸州市"); tempCity.put(5106, "德阳市"); tempCity.put(5107, "绵阳市"); tempCity.put(5108, "广元市"); tempCity.put(5109, "遂宁市"); tempCity.put(5110, "内江市"); tempCity.put(5111, "乐山市"); tempCity.put(5113, "南充市"); tempCity.put(5114, "眉山市"); tempCity.put(5115, "宜宾市"); tempCity.put(5116, "广安市"); tempCity.put(5117, "达州市"); tempCity.put(5118, "雅安市"); tempCity.put(5119, "巴中市"); tempCity.put(5120, "资阳市"); tempCity.put(5132, "阿坝藏族羌族自治州"); tempCity.put(5133, "甘孜藏族自治州"); tempCity.put(5134, "凉山彝族自治州"); tempCity.put(5201, "贵阳市"); tempCity.put(5202, "六盘水市"); tempCity.put(5203, "遵义市"); tempCity.put(5204, "安顺市"); tempCity.put(5205, "毕节市"); tempCity.put(5206, "铜仁市"); tempCity.put(5223, "黔西南布依族苗族自治州"); tempCity.put(5226, "黔东南苗族侗族自治州"); tempCity.put(5227, "黔南布依族苗族自治州"); tempCity.put(5301, "昆明市"); tempCity.put(5303, "曲靖市"); tempCity.put(5304, "玉溪市"); tempCity.put(5305, "保山市"); tempCity.put(5306, "昭通市"); tempCity.put(5307, "丽江市"); tempCity.put(5308, "普洱市"); tempCity.put(5309, "临沧市"); tempCity.put(5323, "楚雄彝族自治州"); tempCity.put(5325, "红河哈尼族彝族自治州"); tempCity.put(5326, "文山壮族苗族自治州"); tempCity.put(5328, "西双版纳傣族自治州"); tempCity.put(5329, "大理白族自治州"); tempCity.put(5331, "德宏傣族景颇族自治州"); tempCity.put(5333, "怒江傈僳族自治州"); tempCity.put(5334, "迪庆藏族自治州"); tempCity.put(5401, "拉萨市"); tempCity.put(5402, "日喀则市"); tempCity.put(5403, "昌都市"); tempCity.put(5404, "林芝市"); tempCity.put(5405, "山南市"); tempCity.put(5406, "那曲市"); tempCity.put(5425, "阿里地区"); tempCity.put(6101, "西安市"); tempCity.put(6102, "铜川市"); tempCity.put(6103, "宝鸡市"); tempCity.put(6104, "咸阳市"); tempCity.put(6105, "渭南市"); tempCity.put(6106, "延安市"); tempCity.put(6107, "汉中市"); tempCity.put(6108, "榆林市"); tempCity.put(6109, "安康市"); tempCity.put(6110, "商洛市"); tempCity.put(6201, "兰州市"); tempCity.put(6202, "嘉峪关市"); tempCity.put(6203, "金昌市"); tempCity.put(6204, "白银市"); tempCity.put(6205, "天水市"); tempCity.put(6206, "武威市"); tempCity.put(6207, "张掖市"); tempCity.put(6208, "平凉市"); tempCity.put(6209, "酒泉市"); tempCity.put(6210, "庆阳市"); tempCity.put(6211, "定西市"); tempCity.put(6212, "陇南市"); tempCity.put(6229, "临夏回族自治州"); tempCity.put(6230, "甘南藏族自治州"); tempCity.put(6301, "西宁市"); tempCity.put(6302, "海东市"); tempCity.put(6322, "海北藏族自治州"); tempCity.put(6323, "黄南藏族自治州"); tempCity.put(6325, "海南藏族自治州"); tempCity.put(6326, "果洛藏族自治州"); tempCity.put(6327, "玉树藏族自治州"); tempCity.put(6328, "海西蒙古族藏族自治州"); tempCity.put(6401, "银川市"); tempCity.put(6402, "石嘴山市"); tempCity.put(6403, "吴忠市"); tempCity.put(6404, "固原市"); tempCity.put(6405, "中卫市"); tempCity.put(6501, "乌鲁木齐市"); tempCity.put(6502, "克拉玛依市"); tempCity.put(6504, "吐鲁番市"); tempCity.put(6505, "哈密市"); tempCity.put(6523, "昌吉回族自治州"); tempCity.put(6527, "博尔塔拉蒙古自治州"); tempCity.put(6528, "巴音郭楞蒙古自治州"); tempCity.put(6529, "阿克苏地区"); tempCity.put(6530, "克孜勒苏柯尔克孜自治州"); tempCity.put(6531, "喀什地区"); tempCity.put(6532, "和田地区"); tempCity.put(6540, "伊犁哈萨克自治州"); tempCity.put(6542, "塔城地区"); tempCity.put(6543, "阿勒泰地区"); tempCity.put(6590, "自治区直辖县级行政区划"); CITY = Collections.unmodifiableSortedMap(tempCity); } static { TreeMap<Integer, String> tempCounty = new TreeMap<>(); tempCounty.put(110101, "东城区"); tempCounty.put(110102, "西城区"); tempCounty.put(110105, "朝阳区"); tempCounty.put(110106, "丰台区"); tempCounty.put(110107, "石景山区"); tempCounty.put(110108, "海淀区"); tempCounty.put(110109, "门头沟区"); tempCounty.put(110111, "房山区"); tempCounty.put(110112, "通州区"); tempCounty.put(110113, "顺义区"); tempCounty.put(110114, "昌平区"); tempCounty.put(110115, "大兴区"); tempCounty.put(110116, "怀柔区"); tempCounty.put(110117, "平谷区"); tempCounty.put(110118, "密云区"); tempCounty.put(110119, "延庆区"); tempCounty.put(120101, "和平区"); tempCounty.put(120102, "河东区"); tempCounty.put(120103, "河西区"); tempCounty.put(120104, "南开区"); tempCounty.put(120105, "河北区"); tempCounty.put(120106, "红桥区"); tempCounty.put(120110, "东丽区"); tempCounty.put(120111, "西青区"); tempCounty.put(120112, "津南区"); tempCounty.put(120113, "北辰区"); tempCounty.put(120114, "武清区"); tempCounty.put(120115, "宝坻区"); tempCounty.put(120116, "滨海新区"); tempCounty.put(120117, "宁河区"); tempCounty.put(120118, "静海区"); tempCounty.put(120119, "蓟州区"); tempCounty.put(130102, "长安区"); tempCounty.put(130104, "桥西区"); tempCounty.put(130105, "新华区"); tempCounty.put(130107, "井陉矿区"); tempCounty.put(130108, "裕华区"); tempCounty.put(130109, "藁城区"); tempCounty.put(130110, "鹿泉区"); tempCounty.put(130111, "栾城区"); tempCounty.put(130121, "井陉县"); tempCounty.put(130123, "正定县"); tempCounty.put(130125, "行唐县"); tempCounty.put(130126, "灵寿县"); tempCounty.put(130127, "高邑县"); tempCounty.put(130128, "深泽县"); tempCounty.put(130129, "赞皇县"); tempCounty.put(130130, "无极县"); tempCounty.put(130131, "平山县"); tempCounty.put(130132, "元氏县"); tempCounty.put(130133, "赵县"); tempCounty.put(130171, "石家庄高新技术产业开发区"); tempCounty.put(130172, "石家庄循环化工园区"); tempCounty.put(130181, "辛集市"); tempCounty.put(130183, "晋州市"); tempCounty.put(130184, "新乐市"); tempCounty.put(130202, "路南区"); tempCounty.put(130203, "路北区"); tempCounty.put(130204, "古冶区"); tempCounty.put(130205, "开平区"); tempCounty.put(130207, "丰南区"); tempCounty.put(130208, "丰润区"); tempCounty.put(130209, "曹妃甸区"); tempCounty.put(130224, "滦南县"); tempCounty.put(130225, "乐亭县"); tempCounty.put(130227, "迁西县"); tempCounty.put(130229, "玉田县"); tempCounty.put(130271, "河北唐山芦台经济开发区"); tempCounty.put(130272, "唐山市汉沽管理区"); tempCounty.put(130273, "唐山高新技术产业开发区"); tempCounty.put(130274, "河北唐山海港经济开发区"); tempCounty.put(130281, "遵化市"); tempCounty.put(130283, "迁安市"); tempCounty.put(130284, "滦州市"); tempCounty.put(130302, "海港区"); tempCounty.put(130303, "山海关区"); tempCounty.put(130304, "北戴河区"); tempCounty.put(130306, "抚宁区"); tempCounty.put(130321, "青龙满族自治县"); tempCounty.put(130322, "昌黎县"); tempCounty.put(130324, "卢龙县"); tempCounty.put(130371, "秦皇岛市经济技术开发区"); tempCounty.put(130372, "北戴河新区"); tempCounty.put(130402, "邯山区"); tempCounty.put(130403, "丛台区"); tempCounty.put(130404, "复兴区"); tempCounty.put(130406, "峰峰矿区"); tempCounty.put(130407, "肥乡区"); tempCounty.put(130408, "永年区"); tempCounty.put(130423, "临漳县"); tempCounty.put(130424, "成安县"); tempCounty.put(130425, "大名县"); tempCounty.put(130426, "涉县"); tempCounty.put(130427, "磁县"); tempCounty.put(130430, "邱县"); tempCounty.put(130431, "鸡泽县"); tempCounty.put(130432, "广平县"); tempCounty.put(130433, "馆陶县"); tempCounty.put(130434, "魏县"); tempCounty.put(130435, "曲周县"); tempCounty.put(130471, "邯郸经济技术开发区"); tempCounty.put(130473, "邯郸冀南新区"); tempCounty.put(130481, "武安市"); tempCounty.put(130502, "襄都区"); tempCounty.put(130503, "信都区"); tempCounty.put(130505, "任泽区"); tempCounty.put(130506, "南和区"); tempCounty.put(130522, "临城县"); tempCounty.put(130523, "内丘县"); tempCounty.put(130524, "柏乡县"); tempCounty.put(130525, "隆尧县"); tempCounty.put(130528, "宁晋县"); tempCounty.put(130529, "巨鹿县"); tempCounty.put(130530, "新河县"); tempCounty.put(130531, "广宗县"); tempCounty.put(130532, "平乡县"); tempCounty.put(130533, "威县"); tempCounty.put(130534, "清河县"); tempCounty.put(130535, "临西县"); tempCounty.put(130571, "河北邢台经济开发区"); tempCounty.put(130581, "南宫市"); tempCounty.put(130582, "沙河市"); tempCounty.put(130602, "竞秀区"); tempCounty.put(130606, "莲池区"); tempCounty.put(130607, "满城区"); tempCounty.put(130608, "清苑区"); tempCounty.put(130609, "徐水区"); tempCounty.put(130623, "涞水县"); tempCounty.put(130624, "阜平县"); tempCounty.put(130626, "定兴县"); tempCounty.put(130627, "唐县"); tempCounty.put(130628, "高阳县"); tempCounty.put(130629, "容城县"); tempCounty.put(130630, "涞源县"); tempCounty.put(130631, "望都县"); tempCounty.put(130632, "安新县"); tempCounty.put(130633, "易县"); tempCounty.put(130634, "曲阳县"); tempCounty.put(130635, "蠡县"); tempCounty.put(130636, "顺平县"); tempCounty.put(130637, "博野县"); tempCounty.put(130638, "雄县"); tempCounty.put(130671, "保定高新技术产业开发区"); tempCounty.put(130672, "保定白沟新城"); tempCounty.put(130681, "涿州市"); tempCounty.put(130682, "定州市"); tempCounty.put(130683, "安国市"); tempCounty.put(130684, "高碑店市"); tempCounty.put(130702, "桥东区"); tempCounty.put(130703, "桥西区"); tempCounty.put(130705, "宣化区"); tempCounty.put(130706, "下花园区"); tempCounty.put(130708, "万全区"); tempCounty.put(130709, "崇礼区"); tempCounty.put(130722, "张北县"); tempCounty.put(130723, "康保县"); tempCounty.put(130724, "沽源县"); tempCounty.put(130725, "尚义县"); tempCounty.put(130726, "蔚县"); tempCounty.put(130727, "阳原县"); tempCounty.put(130728, "怀安县"); tempCounty.put(130730, "怀来县"); tempCounty.put(130731, "涿鹿县"); tempCounty.put(130732, "赤城县"); tempCounty.put(130771, "张家口经济开发区"); tempCounty.put(130772, "张家口市察北管理区"); tempCounty.put(130773, "张家口市塞北管理区"); tempCounty.put(130802, "双桥区"); tempCounty.put(130803, "双滦区"); tempCounty.put(130804, "鹰手营子矿区"); tempCounty.put(130821, "承德县"); tempCounty.put(130822, "兴隆县"); tempCounty.put(130824, "滦平县"); tempCounty.put(130825, "隆化县"); tempCounty.put(130826, "丰宁满族自治县"); tempCounty.put(130827, "宽城满族自治县"); tempCounty.put(130828, "围场满族蒙古族自治县"); tempCounty.put(130871, "承德高新技术产业开发区"); tempCounty.put(130881, "平泉市"); tempCounty.put(130902, "新华区"); tempCounty.put(130903, "运河区"); tempCounty.put(130921, "沧县"); tempCounty.put(130922, "青县"); tempCounty.put(130923, "东光县"); tempCounty.put(130924, "海兴县"); tempCounty.put(130925, "盐山县"); tempCounty.put(130926, "肃宁县"); tempCounty.put(130927, "南皮县"); tempCounty.put(130928, "吴桥县"); tempCounty.put(130929, "献县"); tempCounty.put(130930, "孟村回族自治县"); tempCounty.put(130971, "河北沧州经济开发区"); tempCounty.put(130972, "沧州高新技术产业开发区"); tempCounty.put(130973, "沧州渤海新区"); tempCounty.put(130981, "泊头市"); tempCounty.put(130982, "任丘市"); tempCounty.put(130983, "黄骅市"); tempCounty.put(130984, "河间市"); tempCounty.put(131002, "安次区"); tempCounty.put(131003, "广阳区"); tempCounty.put(131022, "固安县"); tempCounty.put(131023, "永清县"); tempCounty.put(131024, "香河县"); tempCounty.put(131025, "大城县"); tempCounty.put(131026, "文安县"); tempCounty.put(131028, "大厂回族自治县"); tempCounty.put(131071, "廊坊经济技术开发区"); tempCounty.put(131081, "霸州市"); tempCounty.put(131082, "三河市"); tempCounty.put(131102, "桃城区"); tempCounty.put(131103, "冀州区"); tempCounty.put(131121, "枣强县"); tempCounty.put(131122, "武邑县"); tempCounty.put(131123, "武强县"); tempCounty.put(131124, "饶阳县"); tempCounty.put(131125, "安平县"); tempCounty.put(131126, "故城县"); tempCounty.put(131127, "景县"); tempCounty.put(131128, "阜城县"); tempCounty.put(131171, "河北衡水高新技术产业开发区"); tempCounty.put(131172, "衡水滨湖新区"); tempCounty.put(131182, "深州市"); tempCounty.put(140105, "小店区"); tempCounty.put(140106, "迎泽区"); tempCounty.put(140107, "杏花岭区"); tempCounty.put(140108, "尖草坪区"); tempCounty.put(140109, "万柏林区"); tempCounty.put(140110, "晋源区"); tempCounty.put(140121, "清徐县"); tempCounty.put(140122, "阳曲县"); tempCounty.put(140123, "娄烦县"); tempCounty.put(140171, "山西转型综合改革示范区"); tempCounty.put(140181, "古交市"); tempCounty.put(140212, "新荣区"); tempCounty.put(140213, "平城区"); tempCounty.put(140214, "云冈区"); tempCounty.put(140215, "云州区"); tempCounty.put(140221, "阳高县"); tempCounty.put(140222, "天镇县"); tempCounty.put(140223, "广灵县"); tempCounty.put(140224, "灵丘县"); tempCounty.put(140225, "浑源县"); tempCounty.put(140226, "左云县"); tempCounty.put(140271, "山西大同经济开发区"); tempCounty.put(140302, "城区"); tempCounty.put(140303, "矿区"); tempCounty.put(140311, "郊区"); tempCounty.put(140321, "平定县"); tempCounty.put(140322, "盂县"); tempCounty.put(140403, "潞州区"); tempCounty.put(140404, "上党区"); tempCounty.put(140405, "屯留区"); tempCounty.put(140406, "潞城区"); tempCounty.put(140423, "襄垣县"); tempCounty.put(140425, "平顺县"); tempCounty.put(140426, "黎城县"); tempCounty.put(140427, "壶关县"); tempCounty.put(140428, "长子县"); tempCounty.put(140429, "武乡县"); tempCounty.put(140430, "沁县"); tempCounty.put(140431, "沁源县"); tempCounty.put(140471, "山西长治高新技术产业园区"); tempCounty.put(140502, "城区"); tempCounty.put(140521, "沁水县"); tempCounty.put(140522, "阳城县"); tempCounty.put(140524, "陵川县"); tempCounty.put(140525, "泽州县"); tempCounty.put(140581, "高平市"); tempCounty.put(140602, "朔城区"); tempCounty.put(140603, "平鲁区"); tempCounty.put(140621, "山阴县"); tempCounty.put(140622, "应县"); tempCounty.put(140623, "右玉县"); tempCounty.put(140671, "山西朔州经济开发区"); tempCounty.put(140681, "怀仁市"); tempCounty.put(140702, "榆次区"); tempCounty.put(140703, "太谷区"); tempCounty.put(140721, "榆社县"); tempCounty.put(140722, "左权县"); tempCounty.put(140723, "和顺县"); tempCounty.put(140724, "昔阳县"); tempCounty.put(140725, "寿阳县"); tempCounty.put(140727, "祁县"); tempCounty.put(140728, "平遥县"); tempCounty.put(140729, "灵石县"); tempCounty.put(140781, "介休市"); tempCounty.put(140802, "盐湖区"); tempCounty.put(140821, "临猗县"); tempCounty.put(140822, "万荣县"); tempCounty.put(140823, "闻喜县"); tempCounty.put(140824, "稷山县"); tempCounty.put(140825, "新绛县"); tempCounty.put(140826, "绛县"); tempCounty.put(140827, "垣曲县"); tempCounty.put(140828, "夏县"); tempCounty.put(140829, "平陆县"); tempCounty.put(140830, "芮城县"); tempCounty.put(140881, "永济市"); tempCounty.put(140882, "河津市"); tempCounty.put(140902, "忻府区"); tempCounty.put(140921, "定襄县"); tempCounty.put(140922, "五台县"); tempCounty.put(140923, "代县"); tempCounty.put(140924, "繁峙县"); tempCounty.put(140925, "宁武县"); tempCounty.put(140926, "静乐县"); tempCounty.put(140927, "神池县"); tempCounty.put(140928, "五寨县"); tempCounty.put(140929, "岢岚县"); tempCounty.put(140930, "河曲县"); tempCounty.put(140931, "保德县"); tempCounty.put(140932, "偏关县"); tempCounty.put(140971, "五台山风景名胜区"); tempCounty.put(140981, "原平市"); tempCounty.put(141002, "尧都区"); tempCounty.put(141021, "曲沃县"); tempCounty.put(141022, "翼城县"); tempCounty.put(141023, "襄汾县"); tempCounty.put(141024, "洪洞县"); tempCounty.put(141025, "古县"); tempCounty.put(141026, "安泽县"); tempCounty.put(141027, "浮山县"); tempCounty.put(141028, "吉县"); tempCounty.put(141029, "乡宁县"); tempCounty.put(141030, "大宁县"); tempCounty.put(141031, "隰县"); tempCounty.put(141032, "永和县"); tempCounty.put(141033, "蒲县"); tempCounty.put(141034, "汾西县"); tempCounty.put(141081, "侯马市"); tempCounty.put(141082, "霍州市"); tempCounty.put(141102, "离石区"); tempCounty.put(141121, "文水县"); tempCounty.put(141122, "交城县"); tempCounty.put(141123, "兴县"); tempCounty.put(141124, "临县"); tempCounty.put(141125, "柳林县"); tempCounty.put(141126, "石楼县"); tempCounty.put(141127, "岚县"); tempCounty.put(141128, "方山县"); tempCounty.put(141129, "中阳县"); tempCounty.put(141130, "交口县"); tempCounty.put(141181, "孝义市"); tempCounty.put(141182, "汾阳市"); tempCounty.put(150102, "新城区"); tempCounty.put(150103, "回民区"); tempCounty.put(150104, "玉泉区"); tempCounty.put(150105, "赛罕区"); tempCounty.put(150121, "土默特左旗"); tempCounty.put(150122, "托克托县"); tempCounty.put(150123, "和林格尔县"); tempCounty.put(150124, "清水河县"); tempCounty.put(150125, "武川县"); tempCounty.put(150172, "呼和浩特经济技术开发区"); tempCounty.put(150202, "东河区"); tempCounty.put(150203, "昆都仑区"); tempCounty.put(150204, "青山区"); tempCounty.put(150205, "石拐区"); tempCounty.put(150206, "白云鄂博矿区"); tempCounty.put(150207, "九原区"); tempCounty.put(150221, "土默特右旗"); tempCounty.put(150222, "固阳县"); tempCounty.put(150223, "达尔罕茂明安联合旗"); tempCounty.put(150271, "包头稀土高新技术产业开发区"); tempCounty.put(150302, "海勃湾区"); tempCounty.put(150303, "海南区"); tempCounty.put(150304, "乌达区"); tempCounty.put(150402, "红山区"); tempCounty.put(150403, "元宝山区"); tempCounty.put(150404, "松山区"); tempCounty.put(150421, "阿鲁科尔沁旗"); tempCounty.put(150422, "巴林左旗"); tempCounty.put(150423, "巴林右旗"); tempCounty.put(150424, "林西县"); tempCounty.put(150425, "克什克腾旗"); tempCounty.put(150426, "翁牛特旗"); tempCounty.put(150428, "喀喇沁旗"); tempCounty.put(150429, "宁城县"); tempCounty.put(150430, "敖汉旗"); tempCounty.put(150502, "科尔沁区"); tempCounty.put(150521, "科尔沁左翼中旗"); tempCounty.put(150522, "科尔沁左翼后旗"); tempCounty.put(150523, "开鲁县"); tempCounty.put(150524, "库伦旗"); tempCounty.put(150525, "奈曼旗"); tempCounty.put(150526, "扎鲁特旗"); tempCounty.put(150571, "通辽经济技术开发区"); tempCounty.put(150581, "霍林郭勒市"); tempCounty.put(150602, "东胜区"); tempCounty.put(150603, "康巴什区"); tempCounty.put(150621, "达拉特旗"); tempCounty.put(150622, "准格尔旗"); tempCounty.put(150623, "鄂托克前旗"); tempCounty.put(150624, "鄂托克旗"); tempCounty.put(150625, "杭锦旗"); tempCounty.put(150626, "乌审旗"); tempCounty.put(150627, "伊金霍洛旗"); tempCounty.put(150702, "海拉尔区"); tempCounty.put(150703, "扎赉诺尔区"); tempCounty.put(150721, "阿荣旗"); tempCounty.put(150722, "莫力达瓦达斡尔族自治旗"); tempCounty.put(150723, "鄂伦春自治旗"); tempCounty.put(150724, "鄂温克族自治旗"); tempCounty.put(150725, "陈巴尔虎旗"); tempCounty.put(150726, "新巴尔虎左旗"); tempCounty.put(150727, "新巴尔虎右旗"); tempCounty.put(150781, "满洲里市"); tempCounty.put(150782, "牙克石市"); tempCounty.put(150783, "扎兰屯市"); tempCounty.put(150784, "额尔古纳市"); tempCounty.put(150785, "根河市"); tempCounty.put(150802, "临河区"); tempCounty.put(150821, "五原县"); tempCounty.put(150822, "磴口县"); tempCounty.put(150823, "乌拉特前旗"); tempCounty.put(150824, "乌拉特中旗"); tempCounty.put(150825, "乌拉特后旗"); tempCounty.put(150826, "杭锦后旗"); tempCounty.put(150902, "集宁区"); tempCounty.put(150921, "卓资县"); tempCounty.put(150922, "化德县"); tempCounty.put(150923, "商都县"); tempCounty.put(150924, "兴和县"); tempCounty.put(150925, "凉城县"); tempCounty.put(150926, "察哈尔右翼前旗"); tempCounty.put(150927, "察哈尔右翼中旗"); tempCounty.put(150928, "察哈尔右翼后旗"); tempCounty.put(150929, "四子王旗"); tempCounty.put(150981, "丰镇市"); tempCounty.put(152201, "乌兰浩特市"); tempCounty.put(152202, "阿尔山市"); tempCounty.put(152221, "科尔沁右翼前旗"); tempCounty.put(152222, "科尔沁右翼中旗"); tempCounty.put(152223, "扎赉特旗"); tempCounty.put(152224, "突泉县"); tempCounty.put(152501, "二连浩特市"); tempCounty.put(152502, "锡林浩特市"); tempCounty.put(152522, "阿巴嘎旗"); tempCounty.put(152523, "苏尼特左旗"); tempCounty.put(152524, "苏尼特右旗"); tempCounty.put(152525, "东乌珠穆沁旗"); tempCounty.put(152526, "西乌珠穆沁旗"); tempCounty.put(152527, "太仆寺旗"); tempCounty.put(152528, "镶黄旗"); tempCounty.put(152529, "正镶白旗"); tempCounty.put(152530, "正蓝旗"); tempCounty.put(152531, "多伦县"); tempCounty.put(152571, "乌拉盖管委会"); tempCounty.put(152921, "阿拉善左旗"); tempCounty.put(152922, "阿拉善右旗"); tempCounty.put(152923, "额济纳旗"); tempCounty.put(152971, "内蒙古阿拉善经济开发区"); tempCounty.put(210102, "和平区"); tempCounty.put(210103, "沈河区"); tempCounty.put(210104, "大东区"); tempCounty.put(210105, "皇姑区"); tempCounty.put(210106, "铁西区"); tempCounty.put(210111, "苏家屯区"); tempCounty.put(210112, "浑南区"); tempCounty.put(210113, "沈北新区"); tempCounty.put(210114, "于洪区"); tempCounty.put(210115, "辽中区"); tempCounty.put(210123, "康平县"); tempCounty.put(210124, "法库县"); tempCounty.put(210181, "新民市"); tempCounty.put(210202, "中山区"); tempCounty.put(210203, "西岗区"); tempCounty.put(210204, "沙河口区"); tempCounty.put(210211, "甘井子区"); tempCounty.put(210212, "旅顺口区"); tempCounty.put(210213, "金州区"); tempCounty.put(210214, "普兰店区"); tempCounty.put(210224, "长海县"); tempCounty.put(210281, "瓦房店市"); tempCounty.put(210283, "庄河市"); tempCounty.put(210302, "铁东区"); tempCounty.put(210303, "铁西区"); tempCounty.put(210304, "立山区"); tempCounty.put(210311, "千山区"); tempCounty.put(210321, "台安县"); tempCounty.put(210323, "岫岩满族自治县"); tempCounty.put(210381, "海城市"); tempCounty.put(210402, "新抚区"); tempCounty.put(210403, "东洲区"); tempCounty.put(210404, "望花区"); tempCounty.put(210411, "顺城区"); tempCounty.put(210421, "抚顺县"); tempCounty.put(210422, "新宾满族自治县"); tempCounty.put(210423, "清原满族自治县"); tempCounty.put(210502, "平山区"); tempCounty.put(210503, "溪湖区"); tempCounty.put(210504, "明山区"); tempCounty.put(210505, "南芬区"); tempCounty.put(210521, "本溪满族自治县"); tempCounty.put(210522, "桓仁满族自治县"); tempCounty.put(210602, "元宝区"); tempCounty.put(210603, "振兴区"); tempCounty.put(210604, "振安区"); tempCounty.put(210624, "宽甸满族自治县"); tempCounty.put(210681, "东港市"); tempCounty.put(210682, "凤城市"); tempCounty.put(210702, "古塔区"); tempCounty.put(210703, "凌河区"); tempCounty.put(210711, "太和区"); tempCounty.put(210726, "黑山县"); tempCounty.put(210727, "义县"); tempCounty.put(210781, "凌海市"); tempCounty.put(210782, "北镇市"); tempCounty.put(210802, "站前区"); tempCounty.put(210803, "西市区"); tempCounty.put(210804, "鲅鱼圈区"); tempCounty.put(210811, "老边区"); tempCounty.put(210881, "盖州市"); tempCounty.put(210882, "大石桥市"); tempCounty.put(210902, "海州区"); tempCounty.put(210903, "新邱区"); tempCounty.put(210904, "太平区"); tempCounty.put(210905, "清河门区"); tempCounty.put(210911, "细河区"); tempCounty.put(210921, "阜新蒙古族自治县"); tempCounty.put(210922, "彰武县"); tempCounty.put(211002, "白塔区"); tempCounty.put(211003, "文圣区"); tempCounty.put(211004, "宏伟区"); tempCounty.put(211005, "弓长岭区"); tempCounty.put(211011, "太子河区"); tempCounty.put(211021, "辽阳县"); tempCounty.put(211081, "灯塔市"); tempCounty.put(211102, "双台子区"); tempCounty.put(211103, "兴隆台区"); tempCounty.put(211104, "大洼区"); tempCounty.put(211122, "盘山县"); tempCounty.put(211202, "银州区"); tempCounty.put(211204, "清河区"); tempCounty.put(211221, "铁岭县"); tempCounty.put(211223, "西丰县"); tempCounty.put(211224, "昌图县"); tempCounty.put(211281, "调兵山市"); tempCounty.put(211282, "开原市"); tempCounty.put(211302, "双塔区"); tempCounty.put(211303, "龙城区"); tempCounty.put(211321, "朝阳县"); tempCounty.put(211322, "建平县"); tempCounty.put(211324, "喀喇沁左翼蒙古族自治县"); tempCounty.put(211381, "北票市"); tempCounty.put(211382, "凌源市"); tempCounty.put(211402, "连山区"); tempCounty.put(211403, "龙港区"); tempCounty.put(211404, "南票区"); tempCounty.put(211421, "绥中县"); tempCounty.put(211422, "建昌县"); tempCounty.put(211481, "兴城市"); tempCounty.put(220102, "南关区"); tempCounty.put(220103, "宽城区"); tempCounty.put(220104, "朝阳区"); tempCounty.put(220105, "二道区"); tempCounty.put(220106, "绿园区"); tempCounty.put(220112, "双阳区"); tempCounty.put(220113, "九台区"); tempCounty.put(220122, "农安县"); tempCounty.put(220171, "长春经济技术开发区"); tempCounty.put(220172, "长春净月高新技术产业开发区"); tempCounty.put(220173, "长春高新技术产业开发区"); tempCounty.put(220174, "长春汽车经济技术开发区"); tempCounty.put(220182, "榆树市"); tempCounty.put(220183, "德惠市"); tempCounty.put(220184, "公主岭市"); tempCounty.put(220202, "昌邑区"); tempCounty.put(220203, "龙潭区"); tempCounty.put(220204, "船营区"); tempCounty.put(220211, "丰满区"); tempCounty.put(220221, "永吉县"); tempCounty.put(220271, "吉林经济开发区"); tempCounty.put(220272, "吉林高新技术产业开发区"); tempCounty.put(220273, "吉林中国新加坡食品区"); tempCounty.put(220281, "蛟河市"); tempCounty.put(220282, "桦甸市"); tempCounty.put(220283, "舒兰市"); tempCounty.put(220284, "磐石市"); tempCounty.put(220302, "铁西区"); tempCounty.put(220303, "铁东区"); tempCounty.put(220322, "梨树县"); tempCounty.put(220323, "伊通满族自治县"); tempCounty.put(220382, "双辽市"); tempCounty.put(220402, "龙山区"); tempCounty.put(220403, "西安区"); tempCounty.put(220421, "东丰县"); tempCounty.put(220422, "东辽县"); tempCounty.put(220502, "东昌区"); tempCounty.put(220503, "二道江区"); tempCounty.put(220521, "通化县"); tempCounty.put(220523, "辉南县"); tempCounty.put(220524, "柳河县"); tempCounty.put(220581, "梅河口市"); tempCounty.put(220582, "集安市"); tempCounty.put(220602, "浑江区"); tempCounty.put(220605, "江源区"); tempCounty.put(220621, "抚松县"); tempCounty.put(220622, "靖宇县"); tempCounty.put(220623, "长白朝鲜族自治县"); tempCounty.put(220681, "临江市"); tempCounty.put(220702, "宁江区"); tempCounty.put(220721, "前郭尔罗斯蒙古族自治县"); tempCounty.put(220722, "长岭县"); tempCounty.put(220723, "乾安县"); tempCounty.put(220771, "吉林松原经济开发区"); tempCounty.put(220781, "扶余市"); tempCounty.put(220802, "洮北区"); tempCounty.put(220821, "镇赉县"); tempCounty.put(220822, "通榆县"); tempCounty.put(220871, "吉林白城经济开发区"); tempCounty.put(220881, "洮南市"); tempCounty.put(220882, "大安市"); tempCounty.put(222401, "延吉市"); tempCounty.put(222402, "图们市"); tempCounty.put(222403, "敦化市"); tempCounty.put(222404, "珲春市"); tempCounty.put(222405, "龙井市"); tempCounty.put(222406, "和龙市"); tempCounty.put(222424, "汪清县"); tempCounty.put(222426, "安图县"); tempCounty.put(230102, "道里区"); tempCounty.put(230103, "南岗区"); tempCounty.put(230104, "道外区"); tempCounty.put(230108, "平房区"); tempCounty.put(230109, "松北区"); tempCounty.put(230110, "香坊区"); tempCounty.put(230111, "呼兰区"); tempCounty.put(230112, "阿城区"); tempCounty.put(230113, "双城区"); tempCounty.put(230123, "依兰县"); tempCounty.put(230124, "方正县"); tempCounty.put(230125, "宾县"); tempCounty.put(230126, "巴彦县"); tempCounty.put(230127, "木兰县"); tempCounty.put(230128, "通河县"); tempCounty.put(230129, "延寿县"); tempCounty.put(230183, "尚志市"); tempCounty.put(230184, "五常市"); tempCounty.put(230202, "龙沙区"); tempCounty.put(230203, "建华区"); tempCounty.put(230204, "铁锋区"); tempCounty.put(230205, "昂昂溪区"); tempCounty.put(230206, "富拉尔基区"); tempCounty.put(230207, "碾子山区"); tempCounty.put(230208, "梅里斯达斡尔族区"); tempCounty.put(230221, "龙江县"); tempCounty.put(230223, "依安县"); tempCounty.put(230224, "泰来县"); tempCounty.put(230225, "甘南县"); tempCounty.put(230227, "富裕县"); tempCounty.put(230229, "克山县"); tempCounty.put(230230, "克东县"); tempCounty.put(230231, "拜泉县"); tempCounty.put(230281, "讷河市"); tempCounty.put(230302, "鸡冠区"); tempCounty.put(230303, "恒山区"); tempCounty.put(230304, "滴道区"); tempCounty.put(230305, "梨树区"); tempCounty.put(230306, "城子河区"); tempCounty.put(230307, "麻山区"); tempCounty.put(230321, "鸡东县"); tempCounty.put(230381, "虎林市"); tempCounty.put(230382, "密山市"); tempCounty.put(230402, "向阳区"); tempCounty.put(230403, "工农区"); tempCounty.put(230404, "南山区"); tempCounty.put(230405, "兴安区"); tempCounty.put(230406, "东山区"); tempCounty.put(230407, "兴山区"); tempCounty.put(230421, "萝北县"); tempCounty.put(230422, "绥滨县"); tempCounty.put(230502, "尖山区"); tempCounty.put(230503, "岭东区"); tempCounty.put(230505, "四方台区"); tempCounty.put(230506, "宝山区"); tempCounty.put(230521, "集贤县"); tempCounty.put(230522, "友谊县"); tempCounty.put(230523, "宝清县"); tempCounty.put(230524, "饶河县"); tempCounty.put(230602, "萨尔图区"); tempCounty.put(230603, "龙凤区"); tempCounty.put(230604, "让胡路区"); tempCounty.put(230605, "红岗区"); tempCounty.put(230606, "大同区"); tempCounty.put(230621, "肇州县"); tempCounty.put(230622, "肇源县"); tempCounty.put(230623, "林甸县"); tempCounty.put(230624, "杜尔伯特蒙古族自治县"); tempCounty.put(230671, "大庆高新技术产业开发区"); tempCounty.put(230717, "伊美区"); tempCounty.put(230718, "乌翠区"); tempCounty.put(230719, "友好区"); tempCounty.put(230722, "嘉荫县"); tempCounty.put(230723, "汤旺县"); tempCounty.put(230724, "丰林县"); tempCounty.put(230725, "大箐山县"); tempCounty.put(230726, "南岔县"); tempCounty.put(230751, "金林区"); tempCounty.put(230781, "铁力市"); tempCounty.put(230803, "向阳区"); tempCounty.put(230804, "前进区"); tempCounty.put(230805, "东风区"); tempCounty.put(230811, "郊区"); tempCounty.put(230822, "桦南县"); tempCounty.put(230826, "桦川县"); tempCounty.put(230828, "汤原县"); tempCounty.put(230881, "同江市"); tempCounty.put(230882, "富锦市"); tempCounty.put(230883, "抚远市"); tempCounty.put(230902, "新兴区"); tempCounty.put(230903, "桃山区"); tempCounty.put(230904, "茄子河区"); tempCounty.put(230921, "勃利县"); tempCounty.put(231002, "东安区"); tempCounty.put(231003, "阳明区"); tempCounty.put(231004, "爱民区"); tempCounty.put(231005, "西安区"); tempCounty.put(231025, "林口县"); tempCounty.put(231071, "牡丹江经济技术开发区"); tempCounty.put(231081, "绥芬河市"); tempCounty.put(231083, "海林市"); tempCounty.put(231084, "宁安市"); tempCounty.put(231085, "穆棱市"); tempCounty.put(231086, "东宁市"); tempCounty.put(231102, "爱辉区"); tempCounty.put(231123, "逊克县"); tempCounty.put(231124, "孙吴县"); tempCounty.put(231181, "北安市"); tempCounty.put(231182, "五大连池市"); tempCounty.put(231183, "嫩江市"); tempCounty.put(231202, "北林区"); tempCounty.put(231221, "望奎县"); tempCounty.put(231222, "兰西县"); tempCounty.put(231223, "青冈县"); tempCounty.put(231224, "庆安县"); tempCounty.put(231225, "明水县"); tempCounty.put(231226, "绥棱县"); tempCounty.put(231281, "安达市"); tempCounty.put(231282, "肇东市"); tempCounty.put(231283, "海伦市"); tempCounty.put(232701, "漠河市"); tempCounty.put(232721, "呼玛县"); tempCounty.put(232722, "塔河县"); tempCounty.put(232761, "加格达奇区"); tempCounty.put(232762, "松岭区"); tempCounty.put(232763, "新林区"); tempCounty.put(232764, "呼中区"); tempCounty.put(310101, "黄浦区"); tempCounty.put(310104, "徐汇区"); tempCounty.put(310105, "长宁区"); tempCounty.put(310106, "静安区"); tempCounty.put(310107, "普陀区"); tempCounty.put(310109, "虹口区"); tempCounty.put(310110, "杨浦区"); tempCounty.put(310112, "闵行区"); tempCounty.put(310113, "宝山区"); tempCounty.put(310114, "嘉定区"); tempCounty.put(310115, "浦东新区"); tempCounty.put(310116, "金山区"); tempCounty.put(310117, "松江区"); tempCounty.put(310118, "青浦区"); tempCounty.put(310120, "奉贤区"); tempCounty.put(310151, "崇明区"); tempCounty.put(320102, "玄武区"); tempCounty.put(320104, "秦淮区"); tempCounty.put(320105, "建邺区"); tempCounty.put(320106, "鼓楼区"); tempCounty.put(320111, "浦口区"); tempCounty.put(320113, "栖霞区"); tempCounty.put(320114, "雨花台区"); tempCounty.put(320115, "江宁区"); tempCounty.put(320116, "六合区"); tempCounty.put(320117, "溧水区"); tempCounty.put(320118, "高淳区"); tempCounty.put(320205, "锡山区"); tempCounty.put(320206, "惠山区"); tempCounty.put(320211, "滨湖区"); tempCounty.put(320213, "梁溪区"); tempCounty.put(320214, "新吴区"); tempCounty.put(320281, "江阴市"); tempCounty.put(320282, "宜兴市"); tempCounty.put(320302, "鼓楼区"); tempCounty.put(320303, "云龙区"); tempCounty.put(320305, "贾汪区"); tempCounty.put(320311, "泉山区"); tempCounty.put(320312, "铜山区"); tempCounty.put(320321, "丰县"); tempCounty.put(320322, "沛县"); tempCounty.put(320324, "睢宁县"); tempCounty.put(320371, "徐州经济技术开发区"); tempCounty.put(320381, "新沂市"); tempCounty.put(320382, "邳州市"); tempCounty.put(320402, "天宁区"); tempCounty.put(320404, "钟楼区"); tempCounty.put(320411, "新北区"); tempCounty.put(320412, "武进区"); tempCounty.put(320413, "金坛区"); tempCounty.put(320481, "溧阳市"); tempCounty.put(320505, "虎丘区"); tempCounty.put(320506, "吴中区"); tempCounty.put(320507, "相城区"); tempCounty.put(320508, "姑苏区"); tempCounty.put(320509, "吴江区"); tempCounty.put(320571, "苏州工业园区"); tempCounty.put(320581, "常熟市"); tempCounty.put(320582, "张家港市"); tempCounty.put(320583, "昆山市"); tempCounty.put(320585, "太仓市"); tempCounty.put(320602, "崇川区"); tempCounty.put(320611, "港闸区"); tempCounty.put(320612, "通州区"); tempCounty.put(320623, "如东县"); tempCounty.put(320671, "南通经济技术开发区"); tempCounty.put(320681, "启东市"); tempCounty.put(320682, "如皋市"); tempCounty.put(320684, "海门市"); tempCounty.put(320685, "海安市"); tempCounty.put(320703, "连云区"); tempCounty.put(320706, "海州区"); tempCounty.put(320707, "赣榆区"); tempCounty.put(320722, "东海县"); tempCounty.put(320723, "灌云县"); tempCounty.put(320724, "灌南县"); tempCounty.put(320771, "连云港经济技术开发区"); tempCounty.put(320772, "连云港高新技术产业开发区"); tempCounty.put(320803, "淮安区"); tempCounty.put(320804, "淮阴区"); tempCounty.put(320812, "清江浦区"); tempCounty.put(320813, "洪泽区"); tempCounty.put(320826, "涟水县"); tempCounty.put(320830, "盱眙县"); tempCounty.put(320831, "金湖县"); tempCounty.put(320871, "淮安经济技术开发区"); tempCounty.put(320902, "亭湖区"); tempCounty.put(320903, "盐都区"); tempCounty.put(320904, "大丰区"); tempCounty.put(320921, "响水县"); tempCounty.put(320922, "滨海县"); tempCounty.put(320923, "阜宁县"); tempCounty.put(320924, "射阳县"); tempCounty.put(320925, "建湖县"); tempCounty.put(320971, "盐城经济技术开发区"); tempCounty.put(320981, "东台市"); tempCounty.put(321002, "广陵区"); tempCounty.put(321003, "邗江区"); tempCounty.put(321012, "江都区"); tempCounty.put(321023, "宝应县"); tempCounty.put(321071, "扬州经济技术开发区"); tempCounty.put(321081, "仪征市"); tempCounty.put(321084, "高邮市"); tempCounty.put(321102, "京口区"); tempCounty.put(321111, "润州区"); tempCounty.put(321112, "丹徒区"); tempCounty.put(321171, "镇江新区"); tempCounty.put(321181, "丹阳市"); tempCounty.put(321182, "扬中市"); tempCounty.put(321183, "句容市"); tempCounty.put(321202, "海陵区"); tempCounty.put(321203, "高港区"); tempCounty.put(321204, "姜堰区"); tempCounty.put(321271, "泰州医药高新技术产业开发区"); tempCounty.put(321281, "兴化市"); tempCounty.put(321282, "靖江市"); tempCounty.put(321283, "泰兴市"); tempCounty.put(321302, "宿城区"); tempCounty.put(321311, "宿豫区"); tempCounty.put(321322, "沭阳县"); tempCounty.put(321323, "泗阳县"); tempCounty.put(321324, "泗洪县"); tempCounty.put(321371, "宿迁经济技术开发区"); tempCounty.put(330102, "上城区"); tempCounty.put(330103, "下城区"); tempCounty.put(330104, "江干区"); tempCounty.put(330105, "拱墅区"); tempCounty.put(330106, "西湖区"); tempCounty.put(330108, "滨江区"); tempCounty.put(330109, "萧山区"); tempCounty.put(330110, "余杭区"); tempCounty.put(330111, "富阳区"); tempCounty.put(330112, "临安区"); tempCounty.put(330122, "桐庐县"); tempCounty.put(330127, "淳安县"); tempCounty.put(330182, "建德市"); tempCounty.put(330203, "海曙区"); tempCounty.put(330205, "江北区"); tempCounty.put(330206, "北仑区"); tempCounty.put(330211, "镇海区"); tempCounty.put(330212, "鄞州区"); tempCounty.put(330213, "奉化区"); tempCounty.put(330225, "象山县"); tempCounty.put(330226, "宁海县"); tempCounty.put(330281, "余姚市"); tempCounty.put(330282, "慈溪市"); tempCounty.put(330302, "鹿城区"); tempCounty.put(330303, "龙湾区"); tempCounty.put(330304, "瓯海区"); tempCounty.put(330305, "洞头区"); tempCounty.put(330324, "永嘉县"); tempCounty.put(330326, "平阳县"); tempCounty.put(330327, "苍南县"); tempCounty.put(330328, "文成县"); tempCounty.put(330329, "泰顺县"); tempCounty.put(330371, "温州经济技术开发区"); tempCounty.put(330381, "瑞安市"); tempCounty.put(330382, "乐清市"); tempCounty.put(330383, "龙港市"); tempCounty.put(330402, "南湖区"); tempCounty.put(330411, "秀洲区"); tempCounty.put(330421, "嘉善县"); tempCounty.put(330424, "海盐县"); tempCounty.put(330481, "海宁市"); tempCounty.put(330482, "平湖市"); tempCounty.put(330483, "桐乡市"); tempCounty.put(330502, "吴兴区"); tempCounty.put(330503, "南浔区"); tempCounty.put(330521, "德清县"); tempCounty.put(330522, "长兴县"); tempCounty.put(330523, "安吉县"); tempCounty.put(330602, "越城区"); tempCounty.put(330603, "柯桥区"); tempCounty.put(330604, "上虞区"); tempCounty.put(330624, "新昌县"); tempCounty.put(330681, "诸暨市"); tempCounty.put(330683, "嵊州市"); tempCounty.put(330702, "婺城区"); tempCounty.put(330703, "金东区"); tempCounty.put(330723, "武义县"); tempCounty.put(330726, "浦江县"); tempCounty.put(330727, "磐安县"); tempCounty.put(330781, "兰溪市"); tempCounty.put(330782, "义乌市"); tempCounty.put(330783, "东阳市"); tempCounty.put(330784, "永康市"); tempCounty.put(330802, "柯城区"); tempCounty.put(330803, "衢江区"); tempCounty.put(330822, "常山县"); tempCounty.put(330824, "开化县"); tempCounty.put(330825, "龙游县"); tempCounty.put(330881, "江山市"); tempCounty.put(330902, "定海区"); tempCounty.put(330903, "普陀区"); tempCounty.put(330921, "岱山县"); tempCounty.put(330922, "嵊泗县"); tempCounty.put(331002, "椒江区"); tempCounty.put(331003, "黄岩区"); tempCounty.put(331004, "路桥区"); tempCounty.put(331022, "三门县"); tempCounty.put(331023, "天台县"); tempCounty.put(331024, "仙居县"); tempCounty.put(331081, "温岭市"); tempCounty.put(331082, "临海市"); tempCounty.put(331083, "玉环市"); tempCounty.put(331102, "莲都区"); tempCounty.put(331121, "青田县"); tempCounty.put(331122, "缙云县"); tempCounty.put(331123, "遂昌县"); tempCounty.put(331124, "松阳县"); tempCounty.put(331125, "云和县"); tempCounty.put(331126, "庆元县"); tempCounty.put(331127, "景宁畲族自治县"); tempCounty.put(331181, "龙泉市"); tempCounty.put(340102, "瑶海区"); tempCounty.put(340103, "庐阳区"); tempCounty.put(340104, "蜀山区"); tempCounty.put(340111, "包河区"); tempCounty.put(340121, "长丰县"); tempCounty.put(340122, "肥东县"); tempCounty.put(340123, "肥西县"); tempCounty.put(340124, "庐江县"); tempCounty.put(340171, "合肥高新技术产业开发区"); tempCounty.put(340172, "合肥经济技术开发区"); tempCounty.put(340173, "合肥新站高新技术产业开发区"); tempCounty.put(340181, "巢湖市"); tempCounty.put(340202, "镜湖区"); tempCounty.put(340203, "弋江区"); tempCounty.put(340207, "鸠江区"); tempCounty.put(340208, "三山区"); tempCounty.put(340221, "芜湖县"); tempCounty.put(340222, "繁昌县"); tempCounty.put(340223, "南陵县"); tempCounty.put(340271, "芜湖经济技术开发区"); tempCounty.put(340272, "安徽芜湖长江大桥经济开发区"); tempCounty.put(340281, "无为市"); tempCounty.put(340302, "龙子湖区"); tempCounty.put(340303, "蚌山区"); tempCounty.put(340304, "禹会区"); tempCounty.put(340311, "淮上区"); tempCounty.put(340321, "怀远县"); tempCounty.put(340322, "五河县"); tempCounty.put(340323, "固镇县"); tempCounty.put(340371, "蚌埠市高新技术开发区"); tempCounty.put(340372, "蚌埠市经济开发区"); tempCounty.put(340402, "大通区"); tempCounty.put(340403, "田家庵区"); tempCounty.put(340404, "谢家集区"); tempCounty.put(340405, "八公山区"); tempCounty.put(340406, "潘集区"); tempCounty.put(340421, "凤台县"); tempCounty.put(340422, "寿县"); tempCounty.put(340503, "花山区"); tempCounty.put(340504, "雨山区"); tempCounty.put(340506, "博望区"); tempCounty.put(340521, "当涂县"); tempCounty.put(340522, "含山县"); tempCounty.put(340523, "和县"); tempCounty.put(340602, "杜集区"); tempCounty.put(340603, "相山区"); tempCounty.put(340604, "烈山区"); tempCounty.put(340621, "濉溪县"); tempCounty.put(340705, "铜官区"); tempCounty.put(340706, "义安区"); tempCounty.put(340711, "郊区"); tempCounty.put(340722, "枞阳县"); tempCounty.put(340802, "迎江区"); tempCounty.put(340803, "大观区"); tempCounty.put(340811, "宜秀区"); tempCounty.put(340822, "怀宁县"); tempCounty.put(340825, "太湖县"); tempCounty.put(340826, "宿松县"); tempCounty.put(340827, "望江县"); tempCounty.put(340828, "岳西县"); tempCounty.put(340871, "安徽安庆经济开发区"); tempCounty.put(340881, "桐城市"); tempCounty.put(340882, "潜山市"); tempCounty.put(341002, "屯溪区"); tempCounty.put(341003, "黄山区"); tempCounty.put(341004, "徽州区"); tempCounty.put(341021, "歙县"); tempCounty.put(341022, "休宁县"); tempCounty.put(341023, "黟县"); tempCounty.put(341024, "祁门县"); tempCounty.put(341102, "琅琊区"); tempCounty.put(341103, "南谯区"); tempCounty.put(341122, "来安县"); tempCounty.put(341124, "全椒县"); tempCounty.put(341125, "定远县"); tempCounty.put(341126, "凤阳县"); tempCounty.put(341171, "苏滁现代产业园"); tempCounty.put(341172, "滁州经济技术开发区"); tempCounty.put(341181, "天长市"); tempCounty.put(341182, "明光市"); tempCounty.put(341202, "颍州区"); tempCounty.put(341203, "颍东区"); tempCounty.put(341204, "颍泉区"); tempCounty.put(341221, "临泉县"); tempCounty.put(341222, "太和县"); tempCounty.put(341225, "阜南县"); tempCounty.put(341226, "颍上县"); tempCounty.put(341271, "阜阳合肥现代产业园区"); tempCounty.put(341272, "阜阳经济技术开发区"); tempCounty.put(341282, "界首市"); tempCounty.put(341302, "埇桥区"); tempCounty.put(341321, "砀山县"); tempCounty.put(341322, "萧县"); tempCounty.put(341323, "灵璧县"); tempCounty.put(341324, "泗县"); tempCounty.put(341371, "宿州马鞍山现代产业园区"); tempCounty.put(341372, "宿州经济技术开发区"); tempCounty.put(341502, "金安区"); tempCounty.put(341503, "裕安区"); tempCounty.put(341504, "叶集区"); tempCounty.put(341522, "霍邱县"); tempCounty.put(341523, "舒城县"); tempCounty.put(341524, "金寨县"); tempCounty.put(341525, "霍山县"); tempCounty.put(341602, "谯城区"); tempCounty.put(341621, "涡阳县"); tempCounty.put(341622, "蒙城县"); tempCounty.put(341623, "利辛县"); tempCounty.put(341702, "贵池区"); tempCounty.put(341721, "东至县"); tempCounty.put(341722, "石台县"); tempCounty.put(341723, "青阳县"); tempCounty.put(341802, "宣州区"); tempCounty.put(341821, "郎溪县"); tempCounty.put(341823, "泾县"); tempCounty.put(341824, "绩溪县"); tempCounty.put(341825, "旌德县"); tempCounty.put(341871, "宣城市经济开发区"); tempCounty.put(341881, "宁国市"); tempCounty.put(341882, "广德市"); tempCounty.put(350102, "鼓楼区"); tempCounty.put(350103, "台江区"); tempCounty.put(350104, "仓山区"); tempCounty.put(350105, "马尾区"); tempCounty.put(350111, "晋安区"); tempCounty.put(350112, "长乐区"); tempCounty.put(350121, "闽侯县"); tempCounty.put(350122, "连江县"); tempCounty.put(350123, "罗源县"); tempCounty.put(350124, "闽清县"); tempCounty.put(350125, "永泰县"); tempCounty.put(350128, "平潭县"); tempCounty.put(350181, "福清市"); tempCounty.put(350203, "思明区"); tempCounty.put(350205, "海沧区"); tempCounty.put(350206, "湖里区"); tempCounty.put(350211, "集美区"); tempCounty.put(350212, "同安区"); tempCounty.put(350213, "翔安区"); tempCounty.put(350302, "城厢区"); tempCounty.put(350303, "涵江区"); tempCounty.put(350304, "荔城区"); tempCounty.put(350305, "秀屿区"); tempCounty.put(350322, "仙游县"); tempCounty.put(350402, "梅列区"); tempCounty.put(350403, "三元区"); tempCounty.put(350421, "明溪县"); tempCounty.put(350423, "清流县"); tempCounty.put(350424, "宁化县"); tempCounty.put(350425, "大田县"); tempCounty.put(350426, "尤溪县"); tempCounty.put(350427, "沙县"); tempCounty.put(350428, "将乐县"); tempCounty.put(350429, "泰宁县"); tempCounty.put(350430, "建宁县"); tempCounty.put(350481, "永安市"); tempCounty.put(350502, "鲤城区"); tempCounty.put(350503, "丰泽区"); tempCounty.put(350504, "洛江区"); tempCounty.put(350505, "泉港区"); tempCounty.put(350521, "惠安县"); tempCounty.put(350524, "安溪县"); tempCounty.put(350525, "永春县"); tempCounty.put(350526, "德化县"); tempCounty.put(350527, "金门县"); tempCounty.put(350581, "石狮市"); tempCounty.put(350582, "晋江市"); tempCounty.put(350583, "南安市"); tempCounty.put(350602, "芗城区"); tempCounty.put(350603, "龙文区"); tempCounty.put(350622, "云霄县"); tempCounty.put(350623, "漳浦县"); tempCounty.put(350624, "诏安县"); tempCounty.put(350625, "长泰县"); tempCounty.put(350626, "东山县"); tempCounty.put(350627, "南靖县"); tempCounty.put(350628, "平和县"); tempCounty.put(350629, "华安县"); tempCounty.put(350681, "龙海市"); tempCounty.put(350702, "延平区"); tempCounty.put(350703, "建阳区"); tempCounty.put(350721, "顺昌县"); tempCounty.put(350722, "浦城县"); tempCounty.put(350723, "光泽县"); tempCounty.put(350724, "松溪县"); tempCounty.put(350725, "政和县"); tempCounty.put(350781, "邵武市"); tempCounty.put(350782, "武夷山市"); tempCounty.put(350783, "建瓯市"); tempCounty.put(350802, "新罗区"); tempCounty.put(350803, "永定区"); tempCounty.put(350821, "长汀县"); tempCounty.put(350823, "上杭县"); tempCounty.put(350824, "武平县"); tempCounty.put(350825, "连城县"); tempCounty.put(350881, "漳平市"); tempCounty.put(350902, "蕉城区"); tempCounty.put(350921, "霞浦县"); tempCounty.put(350922, "古田县"); tempCounty.put(350923, "屏南县"); tempCounty.put(350924, "寿宁县"); tempCounty.put(350925, "周宁县"); tempCounty.put(350926, "柘荣县"); tempCounty.put(350981, "福安市"); tempCounty.put(350982, "福鼎市"); tempCounty.put(360102, "东湖区"); tempCounty.put(360103, "西湖区"); tempCounty.put(360104, "青云谱区"); tempCounty.put(360111, "青山湖区"); tempCounty.put(360112, "新建区"); tempCounty.put(360113, "红谷滩区"); tempCounty.put(360121, "南昌县"); tempCounty.put(360123, "安义县"); tempCounty.put(360124, "进贤县"); tempCounty.put(360202, "昌江区"); tempCounty.put(360203, "珠山区"); tempCounty.put(360222, "浮梁县"); tempCounty.put(360281, "乐平市"); tempCounty.put(360302, "安源区"); tempCounty.put(360313, "湘东区"); tempCounty.put(360321, "莲花县"); tempCounty.put(360322, "上栗县"); tempCounty.put(360323, "芦溪县"); tempCounty.put(360402, "濂溪区"); tempCounty.put(360403, "浔阳区"); tempCounty.put(360404, "柴桑区"); tempCounty.put(360423, "武宁县"); tempCounty.put(360424, "修水县"); tempCounty.put(360425, "永修县"); tempCounty.put(360426, "德安县"); tempCounty.put(360428, "都昌县"); tempCounty.put(360429, "湖口县"); tempCounty.put(360430, "彭泽县"); tempCounty.put(360481, "瑞昌市"); tempCounty.put(360482, "共青城市"); tempCounty.put(360483, "庐山市"); tempCounty.put(360502, "渝水区"); tempCounty.put(360521, "分宜县"); tempCounty.put(360602, "月湖区"); tempCounty.put(360603, "余江区"); tempCounty.put(360681, "贵溪市"); tempCounty.put(360702, "章贡区"); tempCounty.put(360703, "南康区"); tempCounty.put(360704, "赣县区"); tempCounty.put(360722, "信丰县"); tempCounty.put(360723, "大余县"); tempCounty.put(360724, "上犹县"); tempCounty.put(360725, "崇义县"); tempCounty.put(360726, "安远县"); tempCounty.put(360728, "定南县"); tempCounty.put(360729, "全南县"); tempCounty.put(360730, "宁都县"); tempCounty.put(360731, "于都县"); tempCounty.put(360732, "兴国县"); tempCounty.put(360733, "会昌县"); tempCounty.put(360734, "寻乌县"); tempCounty.put(360735, "石城县"); tempCounty.put(360781, "瑞金市"); tempCounty.put(360783, "龙南市"); tempCounty.put(360802, "吉州区"); tempCounty.put(360803, "青原区"); tempCounty.put(360821, "吉安县"); tempCounty.put(360822, "吉水县"); tempCounty.put(360823, "峡江县"); tempCounty.put(360824, "新干县"); tempCounty.put(360825, "永丰县"); tempCounty.put(360826, "泰和县"); tempCounty.put(360827, "遂川县"); tempCounty.put(360828, "万安县"); tempCounty.put(360829, "安福县"); tempCounty.put(360830, "永新县"); tempCounty.put(360881, "井冈山市"); tempCounty.put(360902, "袁州区"); tempCounty.put(360921, "奉新县"); tempCounty.put(360922, "万载县"); tempCounty.put(360923, "上高县"); tempCounty.put(360924, "宜丰县"); tempCounty.put(360925, "靖安县"); tempCounty.put(360926, "铜鼓县"); tempCounty.put(360981, "丰城市"); tempCounty.put(360982, "樟树市"); tempCounty.put(360983, "高安市"); tempCounty.put(361002, "临川区"); tempCounty.put(361003, "东乡区"); tempCounty.put(361021, "南城县"); tempCounty.put(361022, "黎川县"); tempCounty.put(361023, "南丰县"); tempCounty.put(361024, "崇仁县"); tempCounty.put(361025, "乐安县"); tempCounty.put(361026, "宜黄县"); tempCounty.put(361027, "金溪县"); tempCounty.put(361028, "资溪县"); tempCounty.put(361030, "广昌县"); tempCounty.put(361102, "信州区"); tempCounty.put(361103, "广丰区"); tempCounty.put(361104, "广信区"); tempCounty.put(361123, "玉山县"); tempCounty.put(361124, "铅山县"); tempCounty.put(361125, "横峰县"); tempCounty.put(361126, "弋阳县"); tempCounty.put(361127, "余干县"); tempCounty.put(361128, "鄱阳县"); tempCounty.put(361129, "万年县"); tempCounty.put(361130, "婺源县"); tempCounty.put(361181, "德兴市"); tempCounty.put(370102, "历下区"); tempCounty.put(370103, "市中区"); tempCounty.put(370104, "槐荫区"); tempCounty.put(370105, "天桥区"); tempCounty.put(370112, "历城区"); tempCounty.put(370113, "长清区"); tempCounty.put(370114, "章丘区"); tempCounty.put(370115, "济阳区"); tempCounty.put(370116, "莱芜区"); tempCounty.put(370117, "钢城区"); tempCounty.put(370124, "平阴县"); tempCounty.put(370126, "商河县"); tempCounty.put(370171, "济南高新技术产业开发区"); tempCounty.put(370202, "市南区"); tempCounty.put(370203, "市北区"); tempCounty.put(370211, "黄岛区"); tempCounty.put(370212, "崂山区"); tempCounty.put(370213, "李沧区"); tempCounty.put(370214, "城阳区"); tempCounty.put(370215, "即墨区"); tempCounty.put(370271, "青岛高新技术产业开发区"); tempCounty.put(370281, "胶州市"); tempCounty.put(370283, "平度市"); tempCounty.put(370285, "莱西市"); tempCounty.put(370302, "淄川区"); tempCounty.put(370303, "张店区"); tempCounty.put(370304, "博山区"); tempCounty.put(370305, "临淄区"); tempCounty.put(370306, "周村区"); tempCounty.put(370321, "桓台县"); tempCounty.put(370322, "高青县"); tempCounty.put(370323, "沂源县"); tempCounty.put(370402, "市中区"); tempCounty.put(370403, "薛城区"); tempCounty.put(370404, "峄城区"); tempCounty.put(370405, "台儿庄区"); tempCounty.put(370406, "山亭区"); tempCounty.put(370481, "滕州市"); tempCounty.put(370502, "东营区"); tempCounty.put(370503, "河口区"); tempCounty.put(370505, "垦利区"); tempCounty.put(370522, "利津县"); tempCounty.put(370523, "广饶县"); tempCounty.put(370571, "东营经济技术开发区"); tempCounty.put(370572, "东营港经济开发区"); tempCounty.put(370602, "芝罘区"); tempCounty.put(370611, "福山区"); tempCounty.put(370612, "牟平区"); tempCounty.put(370613, "莱山区"); tempCounty.put(370614, "蓬莱区"); tempCounty.put(370671, "烟台高新技术产业开发区"); tempCounty.put(370672, "烟台经济技术开发区"); tempCounty.put(370681, "龙口市"); tempCounty.put(370682, "莱阳市"); tempCounty.put(370683, "莱州市"); tempCounty.put(370685, "招远市"); tempCounty.put(370686, "栖霞市"); tempCounty.put(370687, "海阳市"); tempCounty.put(370702, "潍城区"); tempCounty.put(370703, "寒亭区"); tempCounty.put(370704, "坊子区"); tempCounty.put(370705, "奎文区"); tempCounty.put(370724, "临朐县"); tempCounty.put(370725, "昌乐县"); tempCounty.put(370772, "潍坊滨海经济技术开发区"); tempCounty.put(370781, "青州市"); tempCounty.put(370782, "诸城市"); tempCounty.put(370783, "寿光市"); tempCounty.put(370784, "安丘市"); tempCounty.put(370785, "高密市"); tempCounty.put(370786, "昌邑市"); tempCounty.put(370811, "任城区"); tempCounty.put(370812, "兖州区"); tempCounty.put(370826, "微山县"); tempCounty.put(370827, "鱼台县"); tempCounty.put(370828, "金乡县"); tempCounty.put(370829, "嘉祥县"); tempCounty.put(370830, "汶上县"); tempCounty.put(370831, "泗水县"); tempCounty.put(370832, "梁山县"); tempCounty.put(370871, "济宁高新技术产业开发区"); tempCounty.put(370881, "曲阜市"); tempCounty.put(370883, "邹城市"); tempCounty.put(370902, "泰山区"); tempCounty.put(370911, "岱岳区"); tempCounty.put(370921, "宁阳县"); tempCounty.put(370923, "东平县"); tempCounty.put(370982, "新泰市"); tempCounty.put(370983, "肥城市"); tempCounty.put(371002, "环翠区"); tempCounty.put(371003, "文登区"); tempCounty.put(371071, "威海火炬高技术产业开发区"); tempCounty.put(371072, "威海经济技术开发区"); tempCounty.put(371073, "威海临港经济技术开发区"); tempCounty.put(371082, "荣成市"); tempCounty.put(371083, "乳山市"); tempCounty.put(371102, "东港区"); tempCounty.put(371103, "岚山区"); tempCounty.put(371121, "五莲县"); tempCounty.put(371122, "莒县"); tempCounty.put(371171, "日照经济技术开发区"); tempCounty.put(371302, "兰山区"); tempCounty.put(371311, "罗庄区"); tempCounty.put(371312, "河东区"); tempCounty.put(371321, "沂南县"); tempCounty.put(371322, "郯城县"); tempCounty.put(371323, "沂水县"); tempCounty.put(371324, "兰陵县"); tempCounty.put(371325, "费县"); tempCounty.put(371326, "平邑县"); tempCounty.put(371327, "莒南县"); tempCounty.put(371328, "蒙阴县"); tempCounty.put(371329, "临沭县"); tempCounty.put(371371, "临沂高新技术产业开发区"); tempCounty.put(371402, "德城区"); tempCounty.put(371403, "陵城区"); tempCounty.put(371422, "宁津县"); tempCounty.put(371423, "庆云县"); tempCounty.put(371424, "临邑县"); tempCounty.put(371425, "齐河县"); tempCounty.put(371426, "平原县"); tempCounty.put(371427, "夏津县"); tempCounty.put(371428, "武城县"); tempCounty.put(371471, "德州经济技术开发区"); tempCounty.put(371472, "德州运河经济开发区"); tempCounty.put(371481, "乐陵市"); tempCounty.put(371482, "禹城市"); tempCounty.put(371502, "东昌府区"); tempCounty.put(371503, "茌平区"); tempCounty.put(371521, "阳谷县"); tempCounty.put(371522, "莘县"); tempCounty.put(371524, "东阿县"); tempCounty.put(371525, "冠县"); tempCounty.put(371526, "高唐县"); tempCounty.put(371581, "临清市"); tempCounty.put(371602, "滨城区"); tempCounty.put(371603, "沾化区"); tempCounty.put(371621, "惠民县"); tempCounty.put(371622, "阳信县"); tempCounty.put(371623, "无棣县"); tempCounty.put(371625, "博兴县"); tempCounty.put(371681, "邹平市"); tempCounty.put(371702, "牡丹区"); tempCounty.put(371703, "定陶区"); tempCounty.put(371721, "曹县"); tempCounty.put(371722, "单县"); tempCounty.put(371723, "成武县"); tempCounty.put(371724, "巨野县"); tempCounty.put(371725, "郓城县"); tempCounty.put(371726, "鄄城县"); tempCounty.put(371728, "东明县"); tempCounty.put(371771, "菏泽经济技术开发区"); tempCounty.put(371772, "菏泽高新技术开发区"); tempCounty.put(410102, "中原区"); tempCounty.put(410103, "二七区"); tempCounty.put(410104, "管城回族区"); tempCounty.put(410105, "金水区"); tempCounty.put(410106, "上街区"); tempCounty.put(410108, "惠济区"); tempCounty.put(410122, "中牟县"); tempCounty.put(410171, "郑州经济技术开发区"); tempCounty.put(410172, "郑州高新技术产业开发区"); tempCounty.put(410173, "郑州航空港经济综合实验区"); tempCounty.put(410181, "巩义市"); tempCounty.put(410182, "荥阳市"); tempCounty.put(410183, "新密市"); tempCounty.put(410184, "新郑市"); tempCounty.put(410185, "登封市"); tempCounty.put(410202, "龙亭区"); tempCounty.put(410203, "顺河回族区"); tempCounty.put(410204, "鼓楼区"); tempCounty.put(410205, "禹王台区"); tempCounty.put(410212, "祥符区"); tempCounty.put(410221, "杞县"); tempCounty.put(410222, "通许县"); tempCounty.put(410223, "尉氏县"); tempCounty.put(410225, "兰考县"); tempCounty.put(410302, "老城区"); tempCounty.put(410303, "西工区"); tempCounty.put(410304, "瀍河回族区"); tempCounty.put(410305, "涧西区"); tempCounty.put(410306, "吉利区"); tempCounty.put(410311, "洛龙区"); tempCounty.put(410322, "孟津县"); tempCounty.put(410323, "新安县"); tempCounty.put(410324, "栾川县"); tempCounty.put(410325, "嵩县"); tempCounty.put(410326, "汝阳县"); tempCounty.put(410327, "宜阳县"); tempCounty.put(410328, "洛宁县"); tempCounty.put(410329, "伊川县"); tempCounty.put(410371, "洛阳高新技术产业开发区"); tempCounty.put(410381, "偃师市"); tempCounty.put(410402, "新华区"); tempCounty.put(410403, "卫东区"); tempCounty.put(410404, "石龙区"); tempCounty.put(410411, "湛河区"); tempCounty.put(410421, "宝丰县"); tempCounty.put(410422, "叶县"); tempCounty.put(410423, "鲁山县"); tempCounty.put(410425, "郏县"); tempCounty.put(410471, "平顶山高新技术产业开发区"); tempCounty.put(410472, "平顶山市城乡一体化示范区"); tempCounty.put(410481, "舞钢市"); tempCounty.put(410482, "汝州市"); tempCounty.put(410502, "文峰区"); tempCounty.put(410503, "北关区"); tempCounty.put(410505, "殷都区"); tempCounty.put(410506, "龙安区"); tempCounty.put(410522, "安阳县"); tempCounty.put(410523, "汤阴县"); tempCounty.put(410526, "滑县"); tempCounty.put(410527, "内黄县"); tempCounty.put(410571, "安阳高新技术产业开发区"); tempCounty.put(410581, "林州市"); tempCounty.put(410602, "鹤山区"); tempCounty.put(410603, "山城区"); tempCounty.put(410611, "淇滨区"); tempCounty.put(410621, "浚县"); tempCounty.put(410622, "淇县"); tempCounty.put(410671, "鹤壁经济技术开发区"); tempCounty.put(410702, "红旗区"); tempCounty.put(410703, "卫滨区"); tempCounty.put(410704, "凤泉区"); tempCounty.put(410711, "牧野区"); tempCounty.put(410721, "新乡县"); tempCounty.put(410724, "获嘉县"); tempCounty.put(410725, "原阳县"); tempCounty.put(410726, "延津县"); tempCounty.put(410727, "封丘县"); tempCounty.put(410771, "新乡高新技术产业开发区"); tempCounty.put(410772, "新乡经济技术开发区"); tempCounty.put(410773, "新乡市平原城乡一体化示范区"); tempCounty.put(410781, "卫辉市"); tempCounty.put(410782, "辉县市"); tempCounty.put(410783, "长垣市"); tempCounty.put(410802, "解放区"); tempCounty.put(410803, "中站区"); tempCounty.put(410804, "马村区"); tempCounty.put(410811, "山阳区"); tempCounty.put(410821, "修武县"); tempCounty.put(410822, "博爱县"); tempCounty.put(410823, "武陟县"); tempCounty.put(410825, "温县"); tempCounty.put(410871, "焦作城乡一体化示范区"); tempCounty.put(410882, "沁阳市"); tempCounty.put(410883, "孟州市"); tempCounty.put(410902, "华龙区"); tempCounty.put(410922, "清丰县"); tempCounty.put(410923, "南乐县"); tempCounty.put(410926, "范县"); tempCounty.put(410927, "台前县"); tempCounty.put(410928, "濮阳县"); tempCounty.put(410971, "河南濮阳工业园区"); tempCounty.put(410972, "濮阳经济技术开发区"); tempCounty.put(411002, "魏都区"); tempCounty.put(411003, "建安区"); tempCounty.put(411024, "鄢陵县"); tempCounty.put(411025, "襄城县"); tempCounty.put(411071, "许昌经济技术开发区"); tempCounty.put(411081, "禹州市"); tempCounty.put(411082, "长葛市"); tempCounty.put(411102, "源汇区"); tempCounty.put(411103, "郾城区"); tempCounty.put(411104, "召陵区"); tempCounty.put(411121, "舞阳县"); tempCounty.put(411122, "临颍县"); tempCounty.put(411171, "漯河经济技术开发区"); tempCounty.put(411202, "湖滨区"); tempCounty.put(411203, "陕州区"); tempCounty.put(411221, "渑池县"); tempCounty.put(411224, "卢氏县"); tempCounty.put(411271, "河南三门峡经济开发区"); tempCounty.put(411281, "义马市"); tempCounty.put(411282, "灵宝市"); tempCounty.put(411302, "宛城区"); tempCounty.put(411303, "卧龙区"); tempCounty.put(411321, "南召县"); tempCounty.put(411322, "方城县"); tempCounty.put(411323, "西峡县"); tempCounty.put(411324, "镇平县"); tempCounty.put(411325, "内乡县"); tempCounty.put(411326, "淅川县"); tempCounty.put(411327, "社旗县"); tempCounty.put(411328, "唐河县"); tempCounty.put(411329, "新野县"); tempCounty.put(411330, "桐柏县"); tempCounty.put(411371, "南阳高新技术产业开发区"); tempCounty.put(411372, "南阳市城乡一体化示范区"); tempCounty.put(411381, "邓州市"); tempCounty.put(411402, "梁园区"); tempCounty.put(411403, "睢阳区"); tempCounty.put(411421, "民权县"); tempCounty.put(411422, "睢县"); tempCounty.put(411423, "宁陵县"); tempCounty.put(411424, "柘城县"); tempCounty.put(411425, "虞城县"); tempCounty.put(411426, "夏邑县"); tempCounty.put(411471, "豫东综合物流产业聚集区"); tempCounty.put(411472, "河南商丘经济开发区"); tempCounty.put(411481, "永城市"); tempCounty.put(411502, "浉河区"); tempCounty.put(411503, "平桥区"); tempCounty.put(411521, "罗山县"); tempCounty.put(411522, "光山县"); tempCounty.put(411523, "新县"); tempCounty.put(411524, "商城县"); tempCounty.put(411525, "固始县"); tempCounty.put(411526, "潢川县"); tempCounty.put(411527, "淮滨县"); tempCounty.put(411528, "息县"); tempCounty.put(411571, "信阳高新技术产业开发区"); tempCounty.put(411602, "川汇区"); tempCounty.put(411603, "淮阳区"); tempCounty.put(411621, "扶沟县"); tempCounty.put(411622, "西华县"); tempCounty.put(411623, "商水县"); tempCounty.put(411624, "沈丘县"); tempCounty.put(411625, "郸城县"); tempCounty.put(411627, "太康县"); tempCounty.put(411628, "鹿邑县"); tempCounty.put(411671, "河南周口经济开发区"); tempCounty.put(411681, "项城市"); tempCounty.put(411702, "驿城区"); tempCounty.put(411721, "西平县"); tempCounty.put(411722, "上蔡县"); tempCounty.put(411723, "平舆县"); tempCounty.put(411724, "正阳县"); tempCounty.put(411725, "确山县"); tempCounty.put(411726, "泌阳县"); tempCounty.put(411727, "汝南县"); tempCounty.put(411728, "遂平县"); tempCounty.put(411729, "新蔡县"); tempCounty.put(411771, "河南驻马店经济开发区"); tempCounty.put(419001, "济源市"); tempCounty.put(420102, "江岸区"); tempCounty.put(420103, "江汉区"); tempCounty.put(420104, "硚口区"); tempCounty.put(420105, "汉阳区"); tempCounty.put(420106, "武昌区"); tempCounty.put(420107, "青山区"); tempCounty.put(420111, "洪山区"); tempCounty.put(420112, "东西湖区"); tempCounty.put(420113, "汉南区"); tempCounty.put(420114, "蔡甸区"); tempCounty.put(420115, "江夏区"); tempCounty.put(420116, "黄陂区"); tempCounty.put(420117, "新洲区"); tempCounty.put(420202, "黄石港区"); tempCounty.put(420203, "西塞山区"); tempCounty.put(420204, "下陆区"); tempCounty.put(420205, "铁山区"); tempCounty.put(420222, "阳新县"); tempCounty.put(420281, "大冶市"); tempCounty.put(420302, "茅箭区"); tempCounty.put(420303, "张湾区"); tempCounty.put(420304, "郧阳区"); tempCounty.put(420322, "郧西县"); tempCounty.put(420323, "竹山县"); tempCounty.put(420324, "竹溪县"); tempCounty.put(420325, "房县"); tempCounty.put(420381, "丹江口市"); tempCounty.put(420502, "西陵区"); tempCounty.put(420503, "伍家岗区"); tempCounty.put(420504, "点军区"); tempCounty.put(420505, "猇亭区"); tempCounty.put(420506, "夷陵区"); tempCounty.put(420525, "远安县"); tempCounty.put(420526, "兴山县"); tempCounty.put(420527, "秭归县"); tempCounty.put(420528, "长阳土家族自治县"); tempCounty.put(420529, "五峰土家族自治县"); tempCounty.put(420581, "宜都市"); tempCounty.put(420582, "当阳市"); tempCounty.put(420583, "枝江市"); tempCounty.put(420602, "襄城区"); tempCounty.put(420606, "樊城区"); tempCounty.put(420607, "襄州区"); tempCounty.put(420624, "南漳县"); tempCounty.put(420625, "谷城县"); tempCounty.put(420626, "保康县"); tempCounty.put(420682, "老河口市"); tempCounty.put(420683, "枣阳市"); tempCounty.put(420684, "宜城市"); tempCounty.put(420702, "梁子湖区"); tempCounty.put(420703, "华容区"); tempCounty.put(420704, "鄂城区"); tempCounty.put(420802, "东宝区"); tempCounty.put(420804, "掇刀区"); tempCounty.put(420822, "沙洋县"); tempCounty.put(420881, "钟祥市"); tempCounty.put(420882, "京山市"); tempCounty.put(420902, "孝南区"); tempCounty.put(420921, "孝昌县"); tempCounty.put(420922, "大悟县"); tempCounty.put(420923, "云梦县"); tempCounty.put(420981, "应城市"); tempCounty.put(420982, "安陆市"); tempCounty.put(420984, "汉川市"); tempCounty.put(421002, "沙市区"); tempCounty.put(421003, "荆州区"); tempCounty.put(421022, "公安县"); tempCounty.put(421023, "监利县"); tempCounty.put(421024, "江陵县"); tempCounty.put(421071, "荆州经济技术开发区"); tempCounty.put(421081, "石首市"); tempCounty.put(421083, "洪湖市"); tempCounty.put(421087, "松滋市"); tempCounty.put(421102, "黄州区"); tempCounty.put(421121, "团风县"); tempCounty.put(421122, "红安县"); tempCounty.put(421123, "罗田县"); tempCounty.put(421124, "英山县"); tempCounty.put(421125, "浠水县"); tempCounty.put(421126, "蕲春县"); tempCounty.put(421127, "黄梅县"); tempCounty.put(421171, "龙感湖管理区"); tempCounty.put(421181, "麻城市"); tempCounty.put(421182, "武穴市"); tempCounty.put(421202, "咸安区"); tempCounty.put(421221, "嘉鱼县"); tempCounty.put(421222, "通城县"); tempCounty.put(421223, "崇阳县"); tempCounty.put(421224, "通山县"); tempCounty.put(421281, "赤壁市"); tempCounty.put(421303, "曾都区"); tempCounty.put(421321, "随县"); tempCounty.put(421381, "广水市"); tempCounty.put(422801, "恩施市"); tempCounty.put(422802, "利川市"); tempCounty.put(422822, "建始县"); tempCounty.put(422823, "巴东县"); tempCounty.put(422825, "宣恩县"); tempCounty.put(422826, "咸丰县"); tempCounty.put(422827, "来凤县"); tempCounty.put(422828, "鹤峰县"); tempCounty.put(429004, "仙桃市"); tempCounty.put(429005, "潜江市"); tempCounty.put(429006, "天门市"); tempCounty.put(429021, "神农架林区"); tempCounty.put(430102, "芙蓉区"); tempCounty.put(430103, "天心区"); tempCounty.put(430104, "岳麓区"); tempCounty.put(430105, "开福区"); tempCounty.put(430111, "雨花区"); tempCounty.put(430112, "望城区"); tempCounty.put(430121, "长沙县"); tempCounty.put(430181, "浏阳市"); tempCounty.put(430182, "宁乡市"); tempCounty.put(430202, "荷塘区"); tempCounty.put(430203, "芦淞区"); tempCounty.put(430204, "石峰区"); tempCounty.put(430211, "天元区"); tempCounty.put(430212, "渌口区"); tempCounty.put(430223, "攸县"); tempCounty.put(430224, "茶陵县"); tempCounty.put(430225, "炎陵县"); tempCounty.put(430271, "云龙示范区"); tempCounty.put(430281, "醴陵市"); tempCounty.put(430302, "雨湖区"); tempCounty.put(430304, "岳塘区"); tempCounty.put(430321, "湘潭县"); tempCounty.put(430371, "湖南湘潭高新技术产业园区"); tempCounty.put(430372, "湘潭昭山示范区"); tempCounty.put(430373, "湘潭九华示范区"); tempCounty.put(430381, "湘乡市"); tempCounty.put(430382, "韶山市"); tempCounty.put(430405, "珠晖区"); tempCounty.put(430406, "雁峰区"); tempCounty.put(430407, "石鼓区"); tempCounty.put(430408, "蒸湘区"); tempCounty.put(430412, "南岳区"); tempCounty.put(430421, "衡阳县"); tempCounty.put(430422, "衡南县"); tempCounty.put(430423, "衡山县"); tempCounty.put(430424, "衡东县"); tempCounty.put(430426, "祁东县"); tempCounty.put(430471, "衡阳综合保税区"); tempCounty.put(430472, "湖南衡阳高新技术产业园区"); tempCounty.put(430473, "湖南衡阳松木经济开发区"); tempCounty.put(430481, "耒阳市"); tempCounty.put(430482, "常宁市"); tempCounty.put(430502, "双清区"); tempCounty.put(430503, "大祥区"); tempCounty.put(430511, "北塔区"); tempCounty.put(430522, "新邵县"); tempCounty.put(430523, "邵阳县"); tempCounty.put(430524, "隆回县"); tempCounty.put(430525, "洞口县"); tempCounty.put(430527, "绥宁县"); tempCounty.put(430528, "新宁县"); tempCounty.put(430529, "城步苗族自治县"); tempCounty.put(430581, "武冈市"); tempCounty.put(430582, "邵东市"); tempCounty.put(430602, "岳阳楼区"); tempCounty.put(430603, "云溪区"); tempCounty.put(430611, "君山区"); tempCounty.put(430621, "岳阳县"); tempCounty.put(430623, "华容县"); tempCounty.put(430624, "湘阴县"); tempCounty.put(430626, "平江县"); tempCounty.put(430671, "岳阳市屈原管理区"); tempCounty.put(430681, "汨罗市"); tempCounty.put(430682, "临湘市"); tempCounty.put(430702, "武陵区"); tempCounty.put(430703, "鼎城区"); tempCounty.put(430721, "安乡县"); tempCounty.put(430722, "汉寿县"); tempCounty.put(430723, "澧县"); tempCounty.put(430724, "临澧县"); tempCounty.put(430725, "桃源县"); tempCounty.put(430726, "石门县"); tempCounty.put(430771, "常德市西洞庭管理区"); tempCounty.put(430781, "津市市"); tempCounty.put(430802, "永定区"); tempCounty.put(430811, "武陵源区"); tempCounty.put(430821, "慈利县"); tempCounty.put(430822, "桑植县"); tempCounty.put(430902, "资阳区"); tempCounty.put(430903, "赫山区"); tempCounty.put(430921, "南县"); tempCounty.put(430922, "桃江县"); tempCounty.put(430923, "安化县"); tempCounty.put(430971, "益阳市大通湖管理区"); tempCounty.put(430972, "湖南益阳高新技术产业园区"); tempCounty.put(430981, "沅江市"); tempCounty.put(431002, "北湖区"); tempCounty.put(431003, "苏仙区"); tempCounty.put(431021, "桂阳县"); tempCounty.put(431022, "宜章县"); tempCounty.put(431023, "永兴县"); tempCounty.put(431024, "嘉禾县"); tempCounty.put(431025, "临武县"); tempCounty.put(431026, "汝城县"); tempCounty.put(431027, "桂东县"); tempCounty.put(431028, "安仁县"); tempCounty.put(431081, "资兴市"); tempCounty.put(431102, "零陵区"); tempCounty.put(431103, "冷水滩区"); tempCounty.put(431121, "祁阳县"); tempCounty.put(431122, "东安县"); tempCounty.put(431123, "双牌县"); tempCounty.put(431124, "道县"); tempCounty.put(431125, "江永县"); tempCounty.put(431126, "宁远县"); tempCounty.put(431127, "蓝山县"); tempCounty.put(431128, "新田县"); tempCounty.put(431129, "江华瑶族自治县"); tempCounty.put(431171, "永州经济技术开发区"); tempCounty.put(431172, "永州市金洞管理区"); tempCounty.put(431173, "永州市回龙圩管理区"); tempCounty.put(431202, "鹤城区"); tempCounty.put(431221, "中方县"); tempCounty.put(431222, "沅陵县"); tempCounty.put(431223, "辰溪县"); tempCounty.put(431224, "溆浦县"); tempCounty.put(431225, "会同县"); tempCounty.put(431226, "麻阳苗族自治县"); tempCounty.put(431227, "新晃侗族自治县"); tempCounty.put(431228, "芷江侗族自治县"); tempCounty.put(431229, "靖州苗族侗族自治县"); tempCounty.put(431230, "通道侗族自治县"); tempCounty.put(431271, "怀化市洪江管理区"); tempCounty.put(431281, "洪江市"); tempCounty.put(431302, "娄星区"); tempCounty.put(431321, "双峰县"); tempCounty.put(431322, "新化县"); tempCounty.put(431381, "冷水江市"); tempCounty.put(431382, "涟源市"); tempCounty.put(433101, "吉首市"); tempCounty.put(433122, "泸溪县"); tempCounty.put(433123, "凤凰县"); tempCounty.put(433124, "花垣县"); tempCounty.put(433125, "保靖县"); tempCounty.put(433126, "古丈县"); tempCounty.put(433127, "永顺县"); tempCounty.put(433130, "龙山县"); tempCounty.put(440103, "荔湾区"); tempCounty.put(440104, "越秀区"); tempCounty.put(440105, "海珠区"); tempCounty.put(440106, "天河区"); tempCounty.put(440111, "白云区"); tempCounty.put(440112, "黄埔区"); tempCounty.put(440113, "番禺区"); tempCounty.put(440114, "花都区"); tempCounty.put(440115, "南沙区"); tempCounty.put(440117, "从化区"); tempCounty.put(440118, "增城区"); tempCounty.put(440203, "武江区"); tempCounty.put(440204, "浈江区"); tempCounty.put(440205, "曲江区"); tempCounty.put(440222, "始兴县"); tempCounty.put(440224, "仁化县"); tempCounty.put(440229, "翁源县"); tempCounty.put(440232, "乳源瑶族自治县"); tempCounty.put(440233, "新丰县"); tempCounty.put(440281, "乐昌市"); tempCounty.put(440282, "南雄市"); tempCounty.put(440303, "罗湖区"); tempCounty.put(440304, "福田区"); tempCounty.put(440305, "南山区"); tempCounty.put(440306, "宝安区"); tempCounty.put(440307, "龙岗区"); tempCounty.put(440308, "盐田区"); tempCounty.put(440309, "龙华区"); tempCounty.put(440310, "坪山区"); tempCounty.put(440311, "光明区"); tempCounty.put(440402, "香洲区"); tempCounty.put(440403, "斗门区"); tempCounty.put(440404, "金湾区"); tempCounty.put(440507, "龙湖区"); tempCounty.put(440511, "金平区"); tempCounty.put(440512, "濠江区"); tempCounty.put(440513, "潮阳区"); tempCounty.put(440514, "潮南区"); tempCounty.put(440515, "澄海区"); tempCounty.put(440523, "南澳县"); tempCounty.put(440604, "禅城区"); tempCounty.put(440605, "南海区"); tempCounty.put(440606, "顺德区"); tempCounty.put(440607, "三水区"); tempCounty.put(440608, "高明区"); tempCounty.put(440703, "蓬江区"); tempCounty.put(440704, "江海区"); tempCounty.put(440705, "新会区"); tempCounty.put(440781, "台山市"); tempCounty.put(440783, "开平市"); tempCounty.put(440784, "鹤山市"); tempCounty.put(440785, "恩平市"); tempCounty.put(440802, "赤坎区"); tempCounty.put(440803, "霞山区"); tempCounty.put(440804, "坡头区"); tempCounty.put(440811, "麻章区"); tempCounty.put(440823, "遂溪县"); tempCounty.put(440825, "徐闻县"); tempCounty.put(440881, "廉江市"); tempCounty.put(440882, "雷州市"); tempCounty.put(440883, "吴川市"); tempCounty.put(440902, "茂南区"); tempCounty.put(440904, "电白区"); tempCounty.put(440981, "高州市"); tempCounty.put(440982, "化州市"); tempCounty.put(440983, "信宜市"); tempCounty.put(441202, "端州区"); tempCounty.put(441203, "鼎湖区"); tempCounty.put(441204, "高要区"); tempCounty.put(441223, "广宁县"); tempCounty.put(441224, "怀集县"); tempCounty.put(441225, "封开县"); tempCounty.put(441226, "德庆县"); tempCounty.put(441284, "四会市"); tempCounty.put(441302, "惠城区"); tempCounty.put(441303, "惠阳区"); tempCounty.put(441322, "博罗县"); tempCounty.put(441323, "惠东县"); tempCounty.put(441324, "龙门县"); tempCounty.put(441402, "梅江区"); tempCounty.put(441403, "梅县区"); tempCounty.put(441422, "大埔县"); tempCounty.put(441423, "丰顺县"); tempCounty.put(441424, "五华县"); tempCounty.put(441426, "平远县"); tempCounty.put(441427, "蕉岭县"); tempCounty.put(441481, "兴宁市"); tempCounty.put(441502, "城区"); tempCounty.put(441521, "海丰县"); tempCounty.put(441523, "陆河县"); tempCounty.put(441581, "陆丰市"); tempCounty.put(441602, "源城区"); tempCounty.put(441621, "紫金县"); tempCounty.put(441622, "龙川县"); tempCounty.put(441623, "连平县"); tempCounty.put(441624, "和平县"); tempCounty.put(441625, "东源县"); tempCounty.put(441702, "江城区"); tempCounty.put(441704, "阳东区"); tempCounty.put(441721, "阳西县"); tempCounty.put(441781, "阳春市"); tempCounty.put(441802, "清城区"); tempCounty.put(441803, "清新区"); tempCounty.put(441821, "佛冈县"); tempCounty.put(441823, "阳山县"); tempCounty.put(441825, "连山壮族瑶族自治县"); tempCounty.put(441826, "连南瑶族自治县"); tempCounty.put(441881, "英德市"); tempCounty.put(441882, "连州市"); tempCounty.put(441900, "常平镇"); tempCounty.put(442000, "横栏镇"); tempCounty.put(445102, "湘桥区"); tempCounty.put(445103, "潮安区"); tempCounty.put(445122, "饶平县"); tempCounty.put(445202, "榕城区"); tempCounty.put(445203, "揭东区"); tempCounty.put(445222, "揭西县"); tempCounty.put(445224, "惠来县"); tempCounty.put(445281, "普宁市"); tempCounty.put(445302, "云城区"); tempCounty.put(445303, "云安区"); tempCounty.put(445321, "新兴县"); tempCounty.put(445322, "郁南县"); tempCounty.put(445381, "罗定市"); tempCounty.put(450102, "兴宁区"); tempCounty.put(450103, "青秀区"); tempCounty.put(450105, "江南区"); tempCounty.put(450107, "西乡塘区"); tempCounty.put(450108, "良庆区"); tempCounty.put(450109, "邕宁区"); tempCounty.put(450110, "武鸣区"); tempCounty.put(450123, "隆安县"); tempCounty.put(450124, "马山县"); tempCounty.put(450125, "上林县"); tempCounty.put(450126, "宾阳县"); tempCounty.put(450127, "横县"); tempCounty.put(450202, "城中区"); tempCounty.put(450203, "鱼峰区"); tempCounty.put(450204, "柳南区"); tempCounty.put(450205, "柳北区"); tempCounty.put(450206, "柳江区"); tempCounty.put(450222, "柳城县"); tempCounty.put(450223, "鹿寨县"); tempCounty.put(450224, "融安县"); tempCounty.put(450225, "融水苗族自治县"); tempCounty.put(450226, "三江侗族自治县"); tempCounty.put(450302, "秀峰区"); tempCounty.put(450303, "叠彩区"); tempCounty.put(450304, "象山区"); tempCounty.put(450305, "七星区"); tempCounty.put(450311, "雁山区"); tempCounty.put(450312, "临桂区"); tempCounty.put(450321, "阳朔县"); tempCounty.put(450323, "灵川县"); tempCounty.put(450324, "全州县"); tempCounty.put(450325, "兴安县"); tempCounty.put(450326, "永福县"); tempCounty.put(450327, "灌阳县"); tempCounty.put(450328, "龙胜各族自治县"); tempCounty.put(450329, "资源县"); tempCounty.put(450330, "平乐县"); tempCounty.put(450332, "恭城瑶族自治县"); tempCounty.put(450381, "荔浦市"); tempCounty.put(450403, "万秀区"); tempCounty.put(450405, "长洲区"); tempCounty.put(450406, "龙圩区"); tempCounty.put(450421, "苍梧县"); tempCounty.put(450422, "藤县"); tempCounty.put(450423, "蒙山县"); tempCounty.put(450481, "岑溪市"); tempCounty.put(450502, "海城区"); tempCounty.put(450503, "银海区"); tempCounty.put(450512, "铁山港区"); tempCounty.put(450521, "合浦县"); tempCounty.put(450602, "港口区"); tempCounty.put(450603, "防城区"); tempCounty.put(450621, "上思县"); tempCounty.put(450681, "东兴市"); tempCounty.put(450702, "钦南区"); tempCounty.put(450703, "钦北区"); tempCounty.put(450721, "灵山县"); tempCounty.put(450722, "浦北县"); tempCounty.put(450802, "港北区"); tempCounty.put(450803, "港南区"); tempCounty.put(450804, "覃塘区"); tempCounty.put(450821, "平南县"); tempCounty.put(450881, "桂平市"); tempCounty.put(450902, "玉州区"); tempCounty.put(450903, "福绵区"); tempCounty.put(450921, "容县"); tempCounty.put(450922, "陆川县"); tempCounty.put(450923, "博白县"); tempCounty.put(450924, "兴业县"); tempCounty.put(450981, "北流市"); tempCounty.put(451002, "右江区"); tempCounty.put(451003, "田阳区"); tempCounty.put(451022, "田东县"); tempCounty.put(451024, "德保县"); tempCounty.put(451026, "那坡县"); tempCounty.put(451027, "凌云县"); tempCounty.put(451028, "乐业县"); tempCounty.put(451029, "田林县"); tempCounty.put(451030, "西林县"); tempCounty.put(451031, "隆林各族自治县"); tempCounty.put(451081, "靖西市"); tempCounty.put(451082, "平果市"); tempCounty.put(451102, "八步区"); tempCounty.put(451103, "平桂区"); tempCounty.put(451121, "昭平县"); tempCounty.put(451122, "钟山县"); tempCounty.put(451123, "富川瑶族自治县"); tempCounty.put(451202, "金城江区"); tempCounty.put(451203, "宜州区"); tempCounty.put(451221, "南丹县"); tempCounty.put(451222, "天峨县"); tempCounty.put(451223, "凤山县"); tempCounty.put(451224, "东兰县"); tempCounty.put(451225, "罗城仫佬族自治县"); tempCounty.put(451226, "环江毛南族自治县"); tempCounty.put(451227, "巴马瑶族自治县"); tempCounty.put(451228, "都安瑶族自治县"); tempCounty.put(451229, "大化瑶族自治县"); tempCounty.put(451302, "兴宾区"); tempCounty.put(451321, "忻城县"); tempCounty.put(451322, "象州县"); tempCounty.put(451323, "武宣县"); tempCounty.put(451324, "金秀瑶族自治县"); tempCounty.put(451381, "合山市"); tempCounty.put(451402, "江州区"); tempCounty.put(451421, "扶绥县"); tempCounty.put(451422, "宁明县"); tempCounty.put(451423, "龙州县"); tempCounty.put(451424, "大新县"); tempCounty.put(451425, "天等县"); tempCounty.put(451481, "凭祥市"); tempCounty.put(460105, "秀英区"); tempCounty.put(460106, "龙华区"); tempCounty.put(460107, "琼山区"); tempCounty.put(460108, "美兰区"); tempCounty.put(460202, "海棠区"); tempCounty.put(460203, "吉阳区"); tempCounty.put(460204, "天涯区"); tempCounty.put(460205, "崖州区"); tempCounty.put(460321, "西沙群岛"); tempCounty.put(460322, "南沙群岛"); tempCounty.put(460323, "中沙群岛的岛礁及其海域"); tempCounty.put(460400, "那大镇"); tempCounty.put(469001, "五指山市"); tempCounty.put(469002, "琼海市"); tempCounty.put(469005, "文昌市"); tempCounty.put(469006, "万宁市"); tempCounty.put(469007, "东方市"); tempCounty.put(469021, "定安县"); tempCounty.put(469022, "屯昌县"); tempCounty.put(469023, "澄迈县"); tempCounty.put(469024, "临高县"); tempCounty.put(469025, "白沙黎族自治县"); tempCounty.put(469026, "昌江黎族自治县"); tempCounty.put(469027, "乐东黎族自治县"); tempCounty.put(469028, "陵水黎族自治县"); tempCounty.put(469029, "保亭黎族苗族自治县"); tempCounty.put(469030, "琼中黎族苗族自治县"); tempCounty.put(500101, "万州区"); tempCounty.put(500102, "涪陵区"); tempCounty.put(500103, "渝中区"); tempCounty.put(500104, "大渡口区"); tempCounty.put(500105, "江北区"); tempCounty.put(500106, "沙坪坝区"); tempCounty.put(500107, "九龙坡区"); tempCounty.put(500108, "南岸区"); tempCounty.put(500109, "北碚区"); tempCounty.put(500110, "綦江区"); tempCounty.put(500111, "大足区"); tempCounty.put(500112, "渝北区"); tempCounty.put(500113, "巴南区"); tempCounty.put(500114, "黔江区"); tempCounty.put(500115, "长寿区"); tempCounty.put(500116, "江津区"); tempCounty.put(500117, "合川区"); tempCounty.put(500118, "永川区"); tempCounty.put(500119, "南川区"); tempCounty.put(500120, "璧山区"); tempCounty.put(500151, "铜梁区"); tempCounty.put(500152, "潼南区"); tempCounty.put(500153, "荣昌区"); tempCounty.put(500154, "开州区"); tempCounty.put(500155, "梁平区"); tempCounty.put(500156, "武隆区"); tempCounty.put(500229, "城口县"); tempCounty.put(500230, "丰都县"); tempCounty.put(500231, "垫江县"); tempCounty.put(500233, "忠县"); tempCounty.put(500235, "云阳县"); tempCounty.put(500236, "奉节县"); tempCounty.put(500237, "巫山县"); tempCounty.put(500238, "巫溪县"); tempCounty.put(500240, "石柱土家族自治县"); tempCounty.put(500241, "秀山土家族苗族自治县"); tempCounty.put(500242, "酉阳土家族苗族自治县"); tempCounty.put(500243, "彭水苗族土家族自治县"); tempCounty.put(510104, "锦江区"); tempCounty.put(510105, "青羊区"); tempCounty.put(510106, "金牛区"); tempCounty.put(510107, "武侯区"); tempCounty.put(510108, "成华区"); tempCounty.put(510112, "龙泉驿区"); tempCounty.put(510113, "青白江区"); tempCounty.put(510114, "新都区"); tempCounty.put(510115, "温江区"); tempCounty.put(510116, "双流区"); tempCounty.put(510117, "郫都区"); tempCounty.put(510118, "新津区"); tempCounty.put(510121, "金堂县"); tempCounty.put(510129, "大邑县"); tempCounty.put(510131, "蒲江县"); tempCounty.put(510181, "都江堰市"); tempCounty.put(510182, "彭州市"); tempCounty.put(510183, "邛崃市"); tempCounty.put(510184, "崇州市"); tempCounty.put(510185, "简阳市"); tempCounty.put(510302, "自流井区"); tempCounty.put(510303, "贡井区"); tempCounty.put(510304, "大安区"); tempCounty.put(510311, "沿滩区"); tempCounty.put(510321, "荣县"); tempCounty.put(510322, "富顺县"); tempCounty.put(510402, "东区"); tempCounty.put(510403, "西区"); tempCounty.put(510411, "仁和区"); tempCounty.put(510421, "米易县"); tempCounty.put(510422, "盐边县"); tempCounty.put(510502, "江阳区"); tempCounty.put(510503, "纳溪区"); tempCounty.put(510504, "龙马潭区"); tempCounty.put(510521, "泸县"); tempCounty.put(510522, "合江县"); tempCounty.put(510524, "叙永县"); tempCounty.put(510525, "古蔺县"); tempCounty.put(510603, "旌阳区"); tempCounty.put(510604, "罗江区"); tempCounty.put(510623, "中江县"); tempCounty.put(510681, "广汉市"); tempCounty.put(510682, "什邡市"); tempCounty.put(510683, "绵竹市"); tempCounty.put(510703, "涪城区"); tempCounty.put(510704, "游仙区"); tempCounty.put(510705, "安州区"); tempCounty.put(510722, "三台县"); tempCounty.put(510723, "盐亭县"); tempCounty.put(510725, "梓潼县"); tempCounty.put(510726, "北川羌族自治县"); tempCounty.put(510727, "平武县"); tempCounty.put(510781, "江油市"); tempCounty.put(510802, "利州区"); tempCounty.put(510811, "昭化区"); tempCounty.put(510812, "朝天区"); tempCounty.put(510821, "旺苍县"); tempCounty.put(510822, "青川县"); tempCounty.put(510823, "剑阁县"); tempCounty.put(510824, "苍溪县"); tempCounty.put(510903, "船山区"); tempCounty.put(510904, "安居区"); tempCounty.put(510921, "蓬溪县"); tempCounty.put(510923, "大英县"); tempCounty.put(510981, "射洪市"); tempCounty.put(511002, "市中区"); tempCounty.put(511011, "东兴区"); tempCounty.put(511024, "威远县"); tempCounty.put(511025, "资中县"); tempCounty.put(511071, "内江经济开发区"); tempCounty.put(511083, "隆昌市"); tempCounty.put(511102, "市中区"); tempCounty.put(511111, "沙湾区"); tempCounty.put(511112, "五通桥区"); tempCounty.put(511113, "金口河区"); tempCounty.put(511123, "犍为县"); tempCounty.put(511124, "井研县"); tempCounty.put(511126, "夹江县"); tempCounty.put(511129, "沐川县"); tempCounty.put(511132, "峨边彝族自治县"); tempCounty.put(511133, "马边彝族自治县"); tempCounty.put(511181, "峨眉山市"); tempCounty.put(511302, "顺庆区"); tempCounty.put(511303, "高坪区"); tempCounty.put(511304, "嘉陵区"); tempCounty.put(511321, "南部县"); tempCounty.put(511322, "营山县"); tempCounty.put(511323, "蓬安县"); tempCounty.put(511324, "仪陇县"); tempCounty.put(511325, "西充县"); tempCounty.put(511381, "阆中市"); tempCounty.put(511402, "东坡区"); tempCounty.put(511403, "彭山区"); tempCounty.put(511421, "仁寿县"); tempCounty.put(511423, "洪雅县"); tempCounty.put(511424, "丹棱县"); tempCounty.put(511425, "青神县"); tempCounty.put(511502, "翠屏区"); tempCounty.put(511503, "南溪区"); tempCounty.put(511504, "叙州区"); tempCounty.put(511523, "江安县"); tempCounty.put(511524, "长宁县"); tempCounty.put(511525, "高县"); tempCounty.put(511526, "珙县"); tempCounty.put(511527, "筠连县"); tempCounty.put(511528, "兴文县"); tempCounty.put(511529, "屏山县"); tempCounty.put(511602, "广安区"); tempCounty.put(511603, "前锋区"); tempCounty.put(511621, "岳池县"); tempCounty.put(511622, "武胜县"); tempCounty.put(511623, "邻水县"); tempCounty.put(511681, "华蓥市"); tempCounty.put(511702, "通川区"); tempCounty.put(511703, "达川区"); tempCounty.put(511722, "宣汉县"); tempCounty.put(511723, "开江县"); tempCounty.put(511724, "大竹县"); tempCounty.put(511725, "渠县"); tempCounty.put(511771, "达州经济开发区"); tempCounty.put(511781, "万源市"); tempCounty.put(511802, "雨城区"); tempCounty.put(511803, "名山区"); tempCounty.put(511822, "荥经县"); tempCounty.put(511823, "汉源县"); tempCounty.put(511824, "石棉县"); tempCounty.put(511825, "天全县"); tempCounty.put(511826, "芦山县"); tempCounty.put(511827, "宝兴县"); tempCounty.put(511902, "巴州区"); tempCounty.put(511903, "恩阳区"); tempCounty.put(511921, "通江县"); tempCounty.put(511922, "南江县"); tempCounty.put(511923, "平昌县"); tempCounty.put(511971, "巴中经济开发区"); tempCounty.put(512002, "雁江区"); tempCounty.put(512021, "安岳县"); tempCounty.put(512022, "乐至县"); tempCounty.put(513201, "马尔康市"); tempCounty.put(513221, "汶川县"); tempCounty.put(513222, "理县"); tempCounty.put(513223, "茂县"); tempCounty.put(513224, "松潘县"); tempCounty.put(513225, "九寨沟县"); tempCounty.put(513226, "金川县"); tempCounty.put(513227, "小金县"); tempCounty.put(513228, "黑水县"); tempCounty.put(513230, "壤塘县"); tempCounty.put(513231, "阿坝县"); tempCounty.put(513232, "若尔盖县"); tempCounty.put(513233, "红原县"); tempCounty.put(513301, "康定市"); tempCounty.put(513322, "泸定县"); tempCounty.put(513323, "丹巴县"); tempCounty.put(513324, "九龙县"); tempCounty.put(513325, "雅江县"); tempCounty.put(513326, "道孚县"); tempCounty.put(513327, "炉霍县"); tempCounty.put(513328, "甘孜县"); tempCounty.put(513329, "新龙县"); tempCounty.put(513330, "德格县"); tempCounty.put(513331, "白玉县"); tempCounty.put(513332, "石渠县"); tempCounty.put(513333, "色达县"); tempCounty.put(513334, "理塘县"); tempCounty.put(513335, "巴塘县"); tempCounty.put(513336, "乡城县"); tempCounty.put(513337, "稻城县"); tempCounty.put(513338, "得荣县"); tempCounty.put(513401, "西昌市"); tempCounty.put(513422, "木里藏族自治县"); tempCounty.put(513423, "盐源县"); tempCounty.put(513424, "德昌县"); tempCounty.put(513425, "会理县"); tempCounty.put(513426, "会东县"); tempCounty.put(513427, "宁南县"); tempCounty.put(513428, "普格县"); tempCounty.put(513429, "布拖县"); tempCounty.put(513430, "金阳县"); tempCounty.put(513431, "昭觉县"); tempCounty.put(513432, "喜德县"); tempCounty.put(513433, "冕宁县"); tempCounty.put(513434, "越西县"); tempCounty.put(513435, "甘洛县"); tempCounty.put(513436, "美姑县"); tempCounty.put(513437, "雷波县"); tempCounty.put(520102, "南明区"); tempCounty.put(520103, "云岩区"); tempCounty.put(520111, "花溪区"); tempCounty.put(520112, "乌当区"); tempCounty.put(520113, "白云区"); tempCounty.put(520115, "观山湖区"); tempCounty.put(520121, "开阳县"); tempCounty.put(520122, "息烽县"); tempCounty.put(520123, "修文县"); tempCounty.put(520181, "清镇市"); tempCounty.put(520201, "钟山区"); tempCounty.put(520203, "六枝特区"); tempCounty.put(520221, "水城县"); tempCounty.put(520281, "盘州市"); tempCounty.put(520302, "红花岗区"); tempCounty.put(520303, "汇川区"); tempCounty.put(520304, "播州区"); tempCounty.put(520322, "桐梓县"); tempCounty.put(520323, "绥阳县"); tempCounty.put(520324, "正安县"); tempCounty.put(520325, "道真仡佬族苗族自治县"); tempCounty.put(520326, "务川仡佬族苗族自治县"); tempCounty.put(520327, "凤冈县"); tempCounty.put(520328, "湄潭县"); tempCounty.put(520329, "余庆县"); tempCounty.put(520330, "习水县"); tempCounty.put(520381, "赤水市"); tempCounty.put(520382, "仁怀市"); tempCounty.put(520402, "西秀区"); tempCounty.put(520403, "平坝区"); tempCounty.put(520422, "普定县"); tempCounty.put(520423, "镇宁布依族苗族自治县"); tempCounty.put(520424, "关岭布依族苗族自治县"); tempCounty.put(520425, "紫云苗族布依族自治县"); tempCounty.put(520502, "七星关区"); tempCounty.put(520521, "大方县"); tempCounty.put(520522, "黔西县"); tempCounty.put(520523, "金沙县"); tempCounty.put(520524, "织金县"); tempCounty.put(520525, "纳雍县"); tempCounty.put(520526, "威宁彝族回族苗族自治县"); tempCounty.put(520527, "赫章县"); tempCounty.put(520602, "碧江区"); tempCounty.put(520603, "万山区"); tempCounty.put(520621, "江口县"); tempCounty.put(520622, "玉屏侗族自治县"); tempCounty.put(520623, "石阡县"); tempCounty.put(520624, "思南县"); tempCounty.put(520625, "印江土家族苗族自治县"); tempCounty.put(520626, "德江县"); tempCounty.put(520627, "沿河土家族自治县"); tempCounty.put(520628, "松桃苗族自治县"); tempCounty.put(522301, "兴义市"); tempCounty.put(522302, "兴仁市"); tempCounty.put(522323, "普安县"); tempCounty.put(522324, "晴隆县"); tempCounty.put(522325, "贞丰县"); tempCounty.put(522326, "望谟县"); tempCounty.put(522327, "册亨县"); tempCounty.put(522328, "安龙县"); tempCounty.put(522601, "凯里市"); tempCounty.put(522622, "黄平县"); tempCounty.put(522623, "施秉县"); tempCounty.put(522624, "三穗县"); tempCounty.put(522625, "镇远县"); tempCounty.put(522626, "岑巩县"); tempCounty.put(522627, "天柱县"); tempCounty.put(522628, "锦屏县"); tempCounty.put(522629, "剑河县"); tempCounty.put(522630, "台江县"); tempCounty.put(522631, "黎平县"); tempCounty.put(522632, "榕江县"); tempCounty.put(522633, "从江县"); tempCounty.put(522634, "雷山县"); tempCounty.put(522635, "麻江县"); tempCounty.put(522636, "丹寨县"); tempCounty.put(522701, "都匀市"); tempCounty.put(522702, "福泉市"); tempCounty.put(522722, "荔波县"); tempCounty.put(522723, "贵定县"); tempCounty.put(522725, "瓮安县"); tempCounty.put(522726, "独山县"); tempCounty.put(522727, "平塘县"); tempCounty.put(522728, "罗甸县"); tempCounty.put(522729, "长顺县"); tempCounty.put(522730, "龙里县"); tempCounty.put(522731, "惠水县"); tempCounty.put(522732, "三都水族自治县"); tempCounty.put(530102, "五华区"); tempCounty.put(530103, "盘龙区"); tempCounty.put(530111, "官渡区"); tempCounty.put(530112, "西山区"); tempCounty.put(530113, "东川区"); tempCounty.put(530114, "呈贡区"); tempCounty.put(530115, "晋宁区"); tempCounty.put(530124, "富民县"); tempCounty.put(530125, "宜良县"); tempCounty.put(530126, "石林彝族自治县"); tempCounty.put(530127, "嵩明县"); tempCounty.put(530128, "禄劝彝族苗族自治县"); tempCounty.put(530129, "寻甸回族彝族自治县"); tempCounty.put(530181, "安宁市"); tempCounty.put(530302, "麒麟区"); tempCounty.put(530303, "沾益区"); tempCounty.put(530304, "马龙区"); tempCounty.put(530322, "陆良县"); tempCounty.put(530323, "师宗县"); tempCounty.put(530324, "罗平县"); tempCounty.put(530325, "富源县"); tempCounty.put(530326, "会泽县"); tempCounty.put(530381, "宣威市"); tempCounty.put(530402, "红塔区"); tempCounty.put(530403, "江川区"); tempCounty.put(530423, "通海县"); tempCounty.put(530424, "华宁县"); tempCounty.put(530425, "易门县"); tempCounty.put(530426, "峨山彝族自治县"); tempCounty.put(530427, "新平彝族傣族自治县"); tempCounty.put(530428, "元江哈尼族彝族傣族自治县"); tempCounty.put(530481, "澄江市"); tempCounty.put(530502, "隆阳区"); tempCounty.put(530521, "施甸县"); tempCounty.put(530523, "龙陵县"); tempCounty.put(530524, "昌宁县"); tempCounty.put(530581, "腾冲市"); tempCounty.put(530602, "昭阳区"); tempCounty.put(530621, "鲁甸县"); tempCounty.put(530622, "巧家县"); tempCounty.put(530623, "盐津县"); tempCounty.put(530624, "大关县"); tempCounty.put(530625, "永善县"); tempCounty.put(530626, "绥江县"); tempCounty.put(530627, "镇雄县"); tempCounty.put(530628, "彝良县"); tempCounty.put(530629, "威信县"); tempCounty.put(530681, "水富市"); tempCounty.put(530702, "古城区"); tempCounty.put(530721, "玉龙纳西族自治县"); tempCounty.put(530722, "永胜县"); tempCounty.put(530723, "华坪县"); tempCounty.put(530724, "宁蒗彝族自治县"); tempCounty.put(530802, "思茅区"); tempCounty.put(530821, "宁洱哈尼族彝族自治县"); tempCounty.put(530822, "墨江哈尼族自治县"); tempCounty.put(530823, "景东彝族自治县"); tempCounty.put(530824, "景谷傣族彝族自治县"); tempCounty.put(530825, "镇沅彝族哈尼族拉祜族自治县"); tempCounty.put(530826, "江城哈尼族彝族自治县"); tempCounty.put(530827, "孟连傣族拉祜族佤族自治县"); tempCounty.put(530828, "澜沧拉祜族自治县"); tempCounty.put(530829, "西盟佤族自治县"); tempCounty.put(530902, "临翔区"); tempCounty.put(530921, "凤庆县"); tempCounty.put(530922, "云县"); tempCounty.put(530923, "永德县"); tempCounty.put(530924, "镇康县"); tempCounty.put(530925, "双江拉祜族佤族布朗族傣族自治县"); tempCounty.put(530926, "耿马傣族佤族自治县"); tempCounty.put(530927, "沧源佤族自治县"); tempCounty.put(532301, "楚雄市"); tempCounty.put(532322, "双柏县"); tempCounty.put(532323, "牟定县"); tempCounty.put(532324, "南华县"); tempCounty.put(532325, "姚安县"); tempCounty.put(532326, "大姚县"); tempCounty.put(532327, "永仁县"); tempCounty.put(532328, "元谋县"); tempCounty.put(532329, "武定县"); tempCounty.put(532331, "禄丰县"); tempCounty.put(532501, "个旧市"); tempCounty.put(532502, "开远市"); tempCounty.put(532503, "蒙自市"); tempCounty.put(532504, "弥勒市"); tempCounty.put(532523, "屏边苗族自治县"); tempCounty.put(532524, "建水县"); tempCounty.put(532525, "石屏县"); tempCounty.put(532527, "泸西县"); tempCounty.put(532528, "元阳县"); tempCounty.put(532529, "红河县"); tempCounty.put(532530, "金平苗族瑶族傣族自治县"); tempCounty.put(532531, "绿春县"); tempCounty.put(532532, "河口瑶族自治县"); tempCounty.put(532601, "文山市"); tempCounty.put(532622, "砚山县"); tempCounty.put(532623, "西畴县"); tempCounty.put(532624, "麻栗坡县"); tempCounty.put(532625, "马关县"); tempCounty.put(532626, "丘北县"); tempCounty.put(532627, "广南县"); tempCounty.put(532628, "富宁县"); tempCounty.put(532801, "景洪市"); tempCounty.put(532822, "勐海县"); tempCounty.put(532823, "勐腊县"); tempCounty.put(532901, "大理市"); tempCounty.put(532922, "漾濞彝族自治县"); tempCounty.put(532923, "祥云县"); tempCounty.put(532924, "宾川县"); tempCounty.put(532925, "弥渡县"); tempCounty.put(532926, "南涧彝族自治县"); tempCounty.put(532927, "巍山彝族回族自治县"); tempCounty.put(532928, "永平县"); tempCounty.put(532929, "云龙县"); tempCounty.put(532930, "洱源县"); tempCounty.put(532931, "剑川县"); tempCounty.put(532932, "鹤庆县"); tempCounty.put(533102, "瑞丽市"); tempCounty.put(533103, "芒市"); tempCounty.put(533122, "梁河县"); tempCounty.put(533123, "盈江县"); tempCounty.put(533124, "陇川县"); tempCounty.put(533301, "泸水市"); tempCounty.put(533323, "福贡县"); tempCounty.put(533324, "贡山独龙族怒族自治县"); tempCounty.put(533325, "兰坪白族普米族自治县"); tempCounty.put(533401, "香格里拉市"); tempCounty.put(533422, "德钦县"); tempCounty.put(533423, "维西傈僳族自治县"); tempCounty.put(540102, "城关区"); tempCounty.put(540103, "堆龙德庆区"); tempCounty.put(540104, "达孜区"); tempCounty.put(540121, "林周县"); tempCounty.put(540122, "当雄县"); tempCounty.put(540123, "尼木县"); tempCounty.put(540124, "曲水县"); tempCounty.put(540127, "墨竹工卡县"); tempCounty.put(540171, "格尔木藏青工业园区"); tempCounty.put(540172, "拉萨经济技术开发区"); tempCounty.put(540173, "西藏文化旅游创意园区"); tempCounty.put(540174, "达孜工业园区"); tempCounty.put(540202, "桑珠孜区"); tempCounty.put(540221, "南木林县"); tempCounty.put(540222, "江孜县"); tempCounty.put(540223, "定日县"); tempCounty.put(540224, "萨迦县"); tempCounty.put(540225, "拉孜县"); tempCounty.put(540226, "昂仁县"); tempCounty.put(540227, "谢通门县"); tempCounty.put(540228, "白朗县"); tempCounty.put(540229, "仁布县"); tempCounty.put(540230, "康马县"); tempCounty.put(540231, "定结县"); tempCounty.put(540232, "仲巴县"); tempCounty.put(540233, "亚东县"); tempCounty.put(540234, "吉隆县"); tempCounty.put(540235, "聂拉木县"); tempCounty.put(540236, "萨嘎县"); tempCounty.put(540237, "岗巴县"); tempCounty.put(540302, "卡若区"); tempCounty.put(540321, "江达县"); tempCounty.put(540322, "贡觉县"); tempCounty.put(540323, "类乌齐县"); tempCounty.put(540324, "丁青县"); tempCounty.put(540325, "察雅县"); tempCounty.put(540326, "八宿县"); tempCounty.put(540327, "左贡县"); tempCounty.put(540328, "芒康县"); tempCounty.put(540329, "洛隆县"); tempCounty.put(540330, "边坝县"); tempCounty.put(540402, "巴宜区"); tempCounty.put(540421, "工布江达县"); tempCounty.put(540422, "米林县"); tempCounty.put(540423, "墨脱县"); tempCounty.put(540424, "波密县"); tempCounty.put(540425, "察隅县"); tempCounty.put(540426, "朗县"); tempCounty.put(540502, "乃东区"); tempCounty.put(540521, "扎囊县"); tempCounty.put(540522, "贡嘎县"); tempCounty.put(540523, "桑日县"); tempCounty.put(540524, "琼结县"); tempCounty.put(540525, "曲松县"); tempCounty.put(540526, "措美县"); tempCounty.put(540527, "洛扎县"); tempCounty.put(540528, "加查县"); tempCounty.put(540529, "隆子县"); tempCounty.put(540530, "错那县"); tempCounty.put(540531, "浪卡子县"); tempCounty.put(540602, "色尼区"); tempCounty.put(540621, "嘉黎县"); tempCounty.put(540622, "比如县"); tempCounty.put(540623, "聂荣县"); tempCounty.put(540624, "安多县"); tempCounty.put(540625, "申扎县"); tempCounty.put(540626, "索县"); tempCounty.put(540627, "班戈县"); tempCounty.put(540628, "巴青县"); tempCounty.put(540629, "尼玛县"); tempCounty.put(540630, "双湖县"); tempCounty.put(542521, "普兰县"); tempCounty.put(542522, "札达县"); tempCounty.put(542523, "噶尔县"); tempCounty.put(542524, "日土县"); tempCounty.put(542525, "革吉县"); tempCounty.put(542526, "改则县"); tempCounty.put(542527, "措勤县"); tempCounty.put(610102, "新城区"); tempCounty.put(610103, "碑林区"); tempCounty.put(610104, "莲湖区"); tempCounty.put(610111, "灞桥区"); tempCounty.put(610112, "未央区"); tempCounty.put(610113, "雁塔区"); tempCounty.put(610114, "阎良区"); tempCounty.put(610115, "临潼区"); tempCounty.put(610116, "长安区"); tempCounty.put(610117, "高陵区"); tempCounty.put(610118, "鄠邑区"); tempCounty.put(610122, "蓝田县"); tempCounty.put(610124, "周至县"); tempCounty.put(610202, "王益区"); tempCounty.put(610203, "印台区"); tempCounty.put(610204, "耀州区"); tempCounty.put(610222, "宜君县"); tempCounty.put(610302, "渭滨区"); tempCounty.put(610303, "金台区"); tempCounty.put(610304, "陈仓区"); tempCounty.put(610322, "凤翔县"); tempCounty.put(610323, "岐山县"); tempCounty.put(610324, "扶风县"); tempCounty.put(610326, "眉县"); tempCounty.put(610327, "陇县"); tempCounty.put(610328, "千阳县"); tempCounty.put(610329, "麟游县"); tempCounty.put(610330, "凤县"); tempCounty.put(610331, "太白县"); tempCounty.put(610402, "秦都区"); tempCounty.put(610403, "杨陵区"); tempCounty.put(610404, "渭城区"); tempCounty.put(610422, "三原县"); tempCounty.put(610423, "泾阳县"); tempCounty.put(610424, "乾县"); tempCounty.put(610425, "礼泉县"); tempCounty.put(610426, "永寿县"); tempCounty.put(610428, "长武县"); tempCounty.put(610429, "旬邑县"); tempCounty.put(610430, "淳化县"); tempCounty.put(610431, "武功县"); tempCounty.put(610481, "兴平市"); tempCounty.put(610482, "彬州市"); tempCounty.put(610502, "临渭区"); tempCounty.put(610503, "华州区"); tempCounty.put(610522, "潼关县"); tempCounty.put(610523, "大荔县"); tempCounty.put(610524, "合阳县"); tempCounty.put(610525, "澄城县"); tempCounty.put(610526, "蒲城县"); tempCounty.put(610527, "白水县"); tempCounty.put(610528, "富平县"); tempCounty.put(610581, "韩城市"); tempCounty.put(610582, "华阴市"); tempCounty.put(610602, "宝塔区"); tempCounty.put(610603, "安塞区"); tempCounty.put(610621, "延长县"); tempCounty.put(610622, "延川县"); tempCounty.put(610625, "志丹县"); tempCounty.put(610626, "吴起县"); tempCounty.put(610627, "甘泉县"); tempCounty.put(610628, "富县"); tempCounty.put(610629, "洛川县"); tempCounty.put(610630, "宜川县"); tempCounty.put(610631, "黄龙县"); tempCounty.put(610632, "黄陵县"); tempCounty.put(610681, "子长市"); tempCounty.put(610702, "汉台区"); tempCounty.put(610703, "南郑区"); tempCounty.put(610722, "城固县"); tempCounty.put(610723, "洋县"); tempCounty.put(610724, "西乡县"); tempCounty.put(610725, "勉县"); tempCounty.put(610726, "宁强县"); tempCounty.put(610727, "略阳县"); tempCounty.put(610728, "镇巴县"); tempCounty.put(610729, "留坝县"); tempCounty.put(610730, "佛坪县"); tempCounty.put(610802, "榆阳区"); tempCounty.put(610803, "横山区"); tempCounty.put(610822, "府谷县"); tempCounty.put(610824, "靖边县"); tempCounty.put(610825, "定边县"); tempCounty.put(610826, "绥德县"); tempCounty.put(610827, "米脂县"); tempCounty.put(610828, "佳县"); tempCounty.put(610829, "吴堡县"); tempCounty.put(610830, "清涧县"); tempCounty.put(610831, "子洲县"); tempCounty.put(610881, "神木市"); tempCounty.put(610902, "汉滨区"); tempCounty.put(610921, "汉阴县"); tempCounty.put(610922, "石泉县"); tempCounty.put(610923, "宁陕县"); tempCounty.put(610924, "紫阳县"); tempCounty.put(610925, "岚皋县"); tempCounty.put(610926, "平利县"); tempCounty.put(610927, "镇坪县"); tempCounty.put(610928, "旬阳县"); tempCounty.put(610929, "白河县"); tempCounty.put(611002, "商州区"); tempCounty.put(611021, "洛南县"); tempCounty.put(611022, "丹凤县"); tempCounty.put(611023, "商南县"); tempCounty.put(611024, "山阳县"); tempCounty.put(611025, "镇安县"); tempCounty.put(611026, "柞水县"); tempCounty.put(620102, "城关区"); tempCounty.put(620103, "七里河区"); tempCounty.put(620104, "西固区"); tempCounty.put(620105, "安宁区"); tempCounty.put(620111, "红古区"); tempCounty.put(620121, "永登县"); tempCounty.put(620122, "皋兰县"); tempCounty.put(620123, "榆中县"); tempCounty.put(620171, "兰州新区"); tempCounty.put(620201, "市辖区"); tempCounty.put(620302, "金川区"); tempCounty.put(620321, "永昌县"); tempCounty.put(620402, "白银区"); tempCounty.put(620403, "平川区"); tempCounty.put(620421, "靖远县"); tempCounty.put(620422, "会宁县"); tempCounty.put(620423, "景泰县"); tempCounty.put(620502, "秦州区"); tempCounty.put(620503, "麦积区"); tempCounty.put(620521, "清水县"); tempCounty.put(620522, "秦安县"); tempCounty.put(620523, "甘谷县"); tempCounty.put(620524, "武山县"); tempCounty.put(620525, "张家川回族自治县"); tempCounty.put(620602, "凉州区"); tempCounty.put(620621, "民勤县"); tempCounty.put(620622, "古浪县"); tempCounty.put(620623, "天祝藏族自治县"); tempCounty.put(620702, "甘州区"); tempCounty.put(620721, "肃南裕固族自治县"); tempCounty.put(620722, "民乐县"); tempCounty.put(620723, "临泽县"); tempCounty.put(620724, "高台县"); tempCounty.put(620725, "山丹县"); tempCounty.put(620802, "崆峒区"); tempCounty.put(620821, "泾川县"); tempCounty.put(620822, "灵台县"); tempCounty.put(620823, "崇信县"); tempCounty.put(620825, "庄浪县"); tempCounty.put(620826, "静宁县"); tempCounty.put(620881, "华亭市"); tempCounty.put(620902, "肃州区"); tempCounty.put(620921, "金塔县"); tempCounty.put(620922, "瓜州县"); tempCounty.put(620923, "肃北蒙古族自治县"); tempCounty.put(620924, "阿克塞哈萨克族自治县"); tempCounty.put(620981, "玉门市"); tempCounty.put(620982, "敦煌市"); tempCounty.put(621002, "西峰区"); tempCounty.put(621021, "庆城县"); tempCounty.put(621022, "环县"); tempCounty.put(621023, "华池县"); tempCounty.put(621024, "合水县"); tempCounty.put(621025, "正宁县"); tempCounty.put(621026, "宁县"); tempCounty.put(621027, "镇原县"); tempCounty.put(621102, "安定区"); tempCounty.put(621121, "通渭县"); tempCounty.put(621122, "陇西县"); tempCounty.put(621123, "渭源县"); tempCounty.put(621124, "临洮县"); tempCounty.put(621125, "漳县"); tempCounty.put(621126, "岷县"); tempCounty.put(621202, "武都区"); tempCounty.put(621221, "成县"); tempCounty.put(621222, "文县"); tempCounty.put(621223, "宕昌县"); tempCounty.put(621224, "康县"); tempCounty.put(621225, "西和县"); tempCounty.put(621226, "礼县"); tempCounty.put(621227, "徽县"); tempCounty.put(621228, "两当县"); tempCounty.put(622901, "临夏市"); tempCounty.put(622921, "临夏县"); tempCounty.put(622922, "康乐县"); tempCounty.put(622923, "永靖县"); tempCounty.put(622924, "广河县"); tempCounty.put(622925, "和政县"); tempCounty.put(622926, "东乡族自治县"); tempCounty.put(622927, "积石山保安族东乡族撒拉族自治县"); tempCounty.put(623001, "合作市"); tempCounty.put(623021, "临潭县"); tempCounty.put(623022, "卓尼县"); tempCounty.put(623023, "舟曲县"); tempCounty.put(623024, "迭部县"); tempCounty.put(623025, "玛曲县"); tempCounty.put(623026, "碌曲县"); tempCounty.put(623027, "夏河县"); tempCounty.put(630102, "城东区"); tempCounty.put(630103, "城中区"); tempCounty.put(630104, "城西区"); tempCounty.put(630105, "城北区"); tempCounty.put(630106, "湟中区"); tempCounty.put(630121, "大通回族土族自治县"); tempCounty.put(630123, "湟源县"); tempCounty.put(630202, "乐都区"); tempCounty.put(630203, "平安区"); tempCounty.put(630222, "民和回族土族自治县"); tempCounty.put(630223, "互助土族自治县"); tempCounty.put(630224, "化隆回族自治县"); tempCounty.put(630225, "循化撒拉族自治县"); tempCounty.put(632221, "门源回族自治县"); tempCounty.put(632222, "祁连县"); tempCounty.put(632223, "海晏县"); tempCounty.put(632224, "刚察县"); tempCounty.put(632321, "同仁县"); tempCounty.put(632322, "尖扎县"); tempCounty.put(632323, "泽库县"); tempCounty.put(632324, "河南蒙古族自治县"); tempCounty.put(632521, "共和县"); tempCounty.put(632522, "同德县"); tempCounty.put(632523, "贵德县"); tempCounty.put(632524, "兴海县"); tempCounty.put(632525, "贵南县"); tempCounty.put(632621, "玛沁县"); tempCounty.put(632622, "班玛县"); tempCounty.put(632623, "甘德县"); tempCounty.put(632624, "达日县"); tempCounty.put(632625, "久治县"); tempCounty.put(632626, "玛多县"); tempCounty.put(632701, "玉树市"); tempCounty.put(632722, "杂多县"); tempCounty.put(632723, "称多县"); tempCounty.put(632724, "治多县"); tempCounty.put(632725, "囊谦县"); tempCounty.put(632726, "曲麻莱县"); tempCounty.put(632801, "格尔木市"); tempCounty.put(632802, "德令哈市"); tempCounty.put(632803, "茫崖市"); tempCounty.put(632821, "乌兰县"); tempCounty.put(632822, "都兰县"); tempCounty.put(632823, "天峻县"); tempCounty.put(632857, "大柴旦行政委员会"); tempCounty.put(640104, "兴庆区"); tempCounty.put(640105, "西夏区"); tempCounty.put(640106, "金凤区"); tempCounty.put(640121, "永宁县"); tempCounty.put(640122, "贺兰县"); tempCounty.put(640181, "灵武市"); tempCounty.put(640202, "大武口区"); tempCounty.put(640205, "惠农区"); tempCounty.put(640221, "平罗县"); tempCounty.put(640302, "利通区"); tempCounty.put(640303, "红寺堡区"); tempCounty.put(640323, "盐池县"); tempCounty.put(640324, "同心县"); tempCounty.put(640381, "青铜峡市"); tempCounty.put(640402, "原州区"); tempCounty.put(640422, "西吉县"); tempCounty.put(640423, "隆德县"); tempCounty.put(640424, "泾源县"); tempCounty.put(640425, "彭阳县"); tempCounty.put(640502, "沙坡头区"); tempCounty.put(640521, "中宁县"); tempCounty.put(640522, "海原县"); tempCounty.put(650102, "天山区"); tempCounty.put(650103, "沙依巴克区"); tempCounty.put(650104, "新市区"); tempCounty.put(650105, "水磨沟区"); tempCounty.put(650106, "头屯河区"); tempCounty.put(650107, "达坂城区"); tempCounty.put(650109, "米东区"); tempCounty.put(650121, "乌鲁木齐县"); tempCounty.put(650202, "独山子区"); tempCounty.put(650203, "克拉玛依区"); tempCounty.put(650204, "白碱滩区"); tempCounty.put(650205, "乌尔禾区"); tempCounty.put(650402, "高昌区"); tempCounty.put(650421, "鄯善县"); tempCounty.put(650422, "托克逊县"); tempCounty.put(650502, "伊州区"); tempCounty.put(650521, "巴里坤哈萨克自治县"); tempCounty.put(650522, "伊吾县"); tempCounty.put(652301, "昌吉市"); tempCounty.put(652302, "阜康市"); tempCounty.put(652323, "呼图壁县"); tempCounty.put(652324, "玛纳斯县"); tempCounty.put(652325, "奇台县"); tempCounty.put(652327, "吉木萨尔县"); tempCounty.put(652328, "木垒哈萨克自治县"); tempCounty.put(652701, "博乐市"); tempCounty.put(652702, "阿拉山口市"); tempCounty.put(652722, "精河县"); tempCounty.put(652723, "温泉县"); tempCounty.put(652801, "库尔勒市"); tempCounty.put(652822, "轮台县"); tempCounty.put(652823, "尉犁县"); tempCounty.put(652824, "若羌县"); tempCounty.put(652825, "且末县"); tempCounty.put(652826, "焉耆回族自治县"); tempCounty.put(652827, "和静县"); tempCounty.put(652828, "和硕县"); tempCounty.put(652829, "博湖县"); tempCounty.put(652871, "库尔勒经济技术开发区"); tempCounty.put(652901, "阿克苏市"); tempCounty.put(652902, "库车市"); tempCounty.put(652922, "温宿县"); tempCounty.put(652924, "沙雅县"); tempCounty.put(652925, "新和县"); tempCounty.put(652926, "拜城县"); tempCounty.put(652927, "乌什县"); tempCounty.put(652928, "阿瓦提县"); tempCounty.put(652929, "柯坪县"); tempCounty.put(653001, "阿图什市"); tempCounty.put(653022, "阿克陶县"); tempCounty.put(653023, "阿合奇县"); tempCounty.put(653024, "乌恰县"); tempCounty.put(653101, "喀什市"); tempCounty.put(653121, "疏附县"); tempCounty.put(653122, "疏勒县"); tempCounty.put(653123, "英吉沙县"); tempCounty.put(653124, "泽普县"); tempCounty.put(653125, "莎车县"); tempCounty.put(653126, "叶城县"); tempCounty.put(653127, "麦盖提县"); tempCounty.put(653128, "岳普湖县"); tempCounty.put(653129, "伽师县"); tempCounty.put(653130, "巴楚县"); tempCounty.put(653131, "塔什库尔干塔吉克自治县"); tempCounty.put(653201, "和田市"); tempCounty.put(653221, "和田县"); tempCounty.put(653222, "墨玉县"); tempCounty.put(653223, "皮山县"); tempCounty.put(653224, "洛浦县"); tempCounty.put(653225, "策勒县"); tempCounty.put(653226, "于田县"); tempCounty.put(653227, "民丰县"); tempCounty.put(654002, "伊宁市"); tempCounty.put(654003, "奎屯市"); tempCounty.put(654004, "霍尔果斯市"); tempCounty.put(654021, "伊宁县"); tempCounty.put(654022, "察布查尔锡伯自治县"); tempCounty.put(654023, "霍城县"); tempCounty.put(654024, "巩留县"); tempCounty.put(654025, "新源县"); tempCounty.put(654026, "昭苏县"); tempCounty.put(654027, "特克斯县"); tempCounty.put(654028, "尼勒克县"); tempCounty.put(654201, "塔城市"); tempCounty.put(654202, "乌苏市"); tempCounty.put(654221, "额敏县"); tempCounty.put(654223, "沙湾县"); tempCounty.put(654224, "托里县"); tempCounty.put(654225, "裕民县"); tempCounty.put(654226, "和布克赛尔蒙古自治县"); tempCounty.put(654301, "阿勒泰市"); tempCounty.put(654321, "布尔津县"); tempCounty.put(654322, "富蕴县"); tempCounty.put(654323, "福海县"); tempCounty.put(654324, "哈巴河县"); tempCounty.put(654325, "青河县"); tempCounty.put(654326, "吉木乃县"); tempCounty.put(659001, "石河子市"); tempCounty.put(659002, "阿拉尔市"); tempCounty.put(659003, "图木舒克市"); tempCounty.put(659004, "五家渠市"); tempCounty.put(659005, "北屯市"); tempCounty.put(659006, "铁门关市"); tempCounty.put(659007, "双河市"); tempCounty.put(659008, "可克达拉市"); tempCounty.put(659009, "昆玉市"); tempCounty.put(659010, "胡杨河市"); COUNTY = Collections.unmodifiableSortedMap(tempCounty); } }
package com.alipay.api.response; import com.alipay.api.internal.mapping.ApiField; import com.alipay.api.AlipayResponse; /** * ALIPAY API: mybank.credit.supplychain.trade.create response. * * @author auto create * @since 1.0, 2022-03-10 16:26:56 */ public class MybankCreditSupplychainTradeCreateResponse extends AlipayResponse { private static final long serialVersionUID = 8687527736276799137L; /** * 业务事件受理的流水号,建议调用方保持此流水号,以方便后续业务处理 */ @ApiField("ev_seq_no") private String evSeqNo; public void setEvSeqNo(String evSeqNo) { this.evSeqNo = evSeqNo; } public String getEvSeqNo( ) { return this.evSeqNo; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id$ */ package org.apache.fop.fo.flow.table; import java.util.LinkedList; import java.util.List; import org.apache.fop.fo.ValidationException; /** * A row group builder accommodating a variable number of columns. More flexible, but less * efficient. */ class VariableColRowGroupBuilder extends RowGroupBuilder { VariableColRowGroupBuilder(Table t) { super(t); } /** * Each event is recorded and will be played once the table is finished, and the final * number of columns known. */ private interface Event { /** * Plays this event * * @param rowGroupBuilder the delegate builder which will actually create the row * groups * @throws ValidationException if a row-spanning cell overflows its parent body */ void play(RowGroupBuilder rowGroupBuilder) throws ValidationException; } /** The queue of events sent to this builder. */ private List events = new LinkedList(); /** {@inheritDoc} */ void addTableCell(final TableCell cell) { events.add(new Event() { public void play(RowGroupBuilder rowGroupBuilder) { rowGroupBuilder.addTableCell(cell); } }); } /** {@inheritDoc} */ void startTableRow(final TableRow tableRow) { events.add(new Event() { public void play(RowGroupBuilder rowGroupBuilder) { rowGroupBuilder.startTableRow(tableRow); } }); } /** {@inheritDoc} */ void endTableRow() { events.add(new Event() { public void play(RowGroupBuilder rowGroupBuilder) { rowGroupBuilder.endTableRow(); } }); } /** {@inheritDoc} */ void endRow(final TablePart part) { events.add(new Event() { public void play(RowGroupBuilder rowGroupBuilder) { rowGroupBuilder.endRow(part); } }); } /** {@inheritDoc} */ void startTablePart(final TablePart part) { events.add(new Event() { public void play(RowGroupBuilder rowGroupBuilder) { rowGroupBuilder.startTablePart(part); } }); } /** {@inheritDoc} */ void endTablePart() throws ValidationException { // TODO catch the ValidationException sooner? events.add(new Event() { public void play(RowGroupBuilder rowGroupBuilder) throws ValidationException { rowGroupBuilder.endTablePart(); } }); } /** {@inheritDoc} */ void endTable() throws ValidationException { RowGroupBuilder delegate = new FixedColRowGroupBuilder(table); for (Object event : events) { ((Event) event).play(delegate); } delegate.endTable(); } }
class sdVxI { public static void gYJhhq40Mq2 (String[] NYs7ZybzX_C) { boolean _pj27wR6ijZ5nW; void[] LZI4dyWhu = -!false.i9oXXEN44lGRI = null.uOuhD0(); void P = !true.rxWNLSWW; { if ( HFUksgL2ec7WF2().WBh) !CVEg1().tQZJ1UiZOb(); return; int[][] R; VQcBSo L; int[][] Zc; { boolean TzX4EnkIx; } int[][][][][][][] emdwrVw3Fs6U; { YC8aqHZ cm6mwRuDsr; } int Bsyhm5b9x; int G4PggKnUmx; boolean[] zRykVUSSVhu; { while ( ( -UUp7FCIjhn_gm.oY_X_9n5JX()).oREwo2ZW) while ( -( !22722591.EJXXWp).Y31fGzi) false[ -8.ntRyHZsRhA()]; } yRuuuH gU7v4YFPZm; boolean u7E5; if ( this.MqvWAqu()) return; if ( this[ -this.Yza]) if ( !!562398501[ zQaa().yoIwOU9B()]) ; b6D4Ntoy[][] pUSmo_jThlua1; return; rX_rcSbPMkONh[][] LMDI1; !new boolean[ new GffAg4V().XmMDY].oBL(); } void I; !-this[ !EQ._Z5FZHIU8NR]; int[][] ppMXvlBB__P4Z; while ( !!( ( !!--79.q6TJCi).C6LS9EbU()).yS1K) { if ( -true.ZPNooy5) return; } int ZPJG4smwv7ld; !b9V().S; mqsJqh_1BAy[] AI88HU78IA; CFDxk5_8v _yIWMRXJl9HPxB = false.x8l5I8yFBvzPB(); { return; SMTK[][][][][][] rlb3b5Si2NsV; new IU8()[ ( true.PnqwAx3oIqq).AncR4()]; ; while ( new int[ !true.IjuFa4()][ !woQb7W8c0i[ false[ -null.LdD()]]]) true.tWLXIWWK(); while ( !moIy9oTGJJlfQ[ new pYGFWhC().h()]) return; ; As[] l8oH; v1I[][] rBZ9Lunes5P; this[ new H8YjHfsbMaXj[ TXgzKsYAY()[ I9hZKRzJJnE[ null.NUeRWYy()]]].Wjeob0]; { int PDfS7_; } void wYb; false.GL(); ; } int[][][] p9hXyS; void[] Z; } public boolean[][][][][] MN7COeX8N654N; public static void ewDjOuPY2d8v0b (String[] V6n8d8jJ79) { return; boolean[] V0j89eEe; return -false.dOeYNfgsoG(); wlFyOEdHZsGU[] goVNrpKwV87z9 = false.wV2Y3YQy6A; void[][][] L3RcmY; } public q0_2W[][] NvqP9WJT; public static void MIikyNP (String[] aPSyPR3) { ; return; while ( 455506.r) if ( !---Xcux1XVB6().xaJM()) { void E; } int Aqv; while ( STEdUoM().X7ImAUWe0n_hXu()) ; return -!!-false[ new OEImxlwigY[ -BM[ ( false.QP0jXVCCPNQ()).Rfj6()]][ -false[ !null[ -CRL()[ new Gm3iBUCO0SW().aM7Pf]]]]]; { { new sV8[ u().NZM()].kl; } } } public boolean[][][][][] M2SNAuhjgXxqE () { void wQMMMB = false.T; int[][] imHAKJhfc4r00X = this.xxxDu; } public int[] ZuIlJ7NKjP; public static void DSY6rR8r0r02e (String[] pi8aCH) { while ( !-!this[ 1630004[ -this.sDladNm]]) if ( true[ false[ --!-false.GVYt6UfNo_O]]) return; ; void[] qsf2wLCQxnDD = ysxkA8URU.g; int mBc5ntdLB1 = !!-!-null[ new void[ --!--( false.YL()).YSKW9o86jo95sp].pPUak()] = !( -true[ !( true[ !false.T()])[ 20.ErkPDlEplOMuW()]]).HboQBn; fBUGwJ2G3[] EJ92 = false[ pOuCD4().iU_CfB2HLETmi] = !this.JjgGSWRm; { if ( new CGaAJFTZla5LvK().vCMkFIQUepG8) { return; } void[] MaIO6oW0Xu5o1H; ; ; NPOp.rBLJ8AknyxAj(); return; QmsYW4Af4jzQxn[][][] d; boolean pRa6q6l; return; -this.svjiPM; -!!new KII2bo4D().L4i8ke(); S3pYWGK[][] jToWGGkIK; ; if ( true.GiSi2_) ; void[] _DpeqxkNz; int PR33Cct7I; int ZuLvjZKN3OUJy9; VNcs_v_e9 TxXLIwS25TML6; EBwbpFqCOW[][] PxSzhO3Vc5uYhM; } void[] ajTRN5xHQ; int fL; { return; } int[] W; boolean[][] _Ld = -false.cw49b(); while ( !false.aV6DN0qbimf7()) { return; } ; { if ( null[ !null[ !-!new RXAid().PUUzyLWUQ3X]]) ; ZLqkx0fvbiZ[][] G; bkFxnd DE344tn; { if ( new boolean[ true[ new IV().vAdlk2]].LydT) while ( !91621.aqKAw0Xe) { void[] lwtZ7ppwWiq; } } int[][][] gkarQCjxz; { void EfTLFWfD5o6j; } iwrrKJDuGl0JN x7yTGGh31; { void[][] ANb; } boolean BMXdFxied; void[][] QRp; ; int[][][] A; return; OQ TtHByre; } if ( ---612580._cDsAE3WyirWSG()) ( true[ false[ false.KA4eSTC()]])[ this[ -false.n()]];else while ( -this[ ---!( -new VDlXoowRFd()[ ( Nlv.u27jP0gk).ALK7_UOjoKHZT])[ !null.Nvy]]) ; void szaOsjUgQh; } } class uUM3Ap5xpV07y { public static void Dy (String[] OQ) throws w3UQR { ; } public void Ch9Kl68k89Q2sz () throws ecrrkQgSiDAh { boolean ZE; return -961684[ !Knziv2.MXLHXBjqIsI5()]; f osMR; if ( new boolean[ 83027908.EOV][ -!( --3.hAVkBT()).kd()]) ;else ; if ( 94604894[ -new ggkOdast[ new boolean[ new void[ -jrS.qC()].Tr].PoQNi()][ !-new l2fXyiuS[ this.IuhI6U5zP3Z].IV()]]) { EcCdnt[] c; } boolean[][][] oI_j8jEn; { if ( true[ ( !new tm2IYPv0BCP[ --new edjmkOJxPGVAr()[ -!false.fLevuc3YFARb]].lohIGwR6).yC6OC1KVeW1]) ; zbi6bL7p__5hXe W45g; boolean[] U; return; return; if ( new TdOI68Md[ true[ !!!!this.OY9M6SCKcY23NF()]].cZG4wxyD8mO4n()) return; void Yj_HjY3ZhTg; if ( -s1xHHrcN[ new OoyXTW().VrGGJCJ()]) return; { int[] LrS2Jtc; } -true.jOroJAHY_; !!true.JOomz; boolean Ayij3rdjrge; if ( new int[ !!-!!-!!-true[ new svGldtQMLK8yX4[ !-!null.CT22H6G].bGUGIHRf]].vnmwMoisKX) while ( new sv3CxR5iNbSA().ICX4R3jUu5bl89()) { ; } if ( !this[ -ggpjPDJt2rgwb[ -false[ ---this[ this.A0Ea8IGrZgFKSu()]]]]) while ( -0396327[ !!true.JmtIsvED]) -!Vn.Lum7born11; ; int[][][][][] xGYinnpvON; } } public boolean[][] lDBrB; public hiN90CWUGUxsp EJ0avpWTN; public int[][][][][][][][] OF (MXQMRvi3S[][][] bbu5aDrUnn37T, void H_3l_GI, void[][][] dE5ewnnjzgYOZ, Dod7jOgOR3Jo[] yhrNOOQI, int f4TQPRLQN2uxW8, int DFVRPot_Gr, void[][][] EF7c) throws z46GM { int VQBb; int r7G8tGP0l; return; boolean vpyKK = !true.KkXG4R2aYZp() = -023[ NX()[ -!---new Kf()[ !--new JX16().kS]]]; wKdhmvlo6 Q9 = new Ks28tRgvcvKh()[ !-144[ !!XtWhl9LhZQ[ !this[ true.o5f()]]]]; } } class rzInlS_cJi2vFC { public void[][] LMA7dzGecuv () { if ( --new EVS9_4cN6fWd().MW) if ( fKhQobtNb0[ false.hoca_iNKg5ib65]) new int[ this.WMU3lJMLWFiBug()].Xq; } public static void PbqiU (String[] Sf_DWuJ) { { if ( !true.gfJJQZIhnkUbY()) { BmNfrAu[] gWS; } Hdo[][][] nMkQlQ6ypFGP; void n7XDDCwJll4S; ; void[] qCrLxi85Yo; boolean[] svasHMytmed4d; } void[][] d0J7Fsv6Qm; g2WkwgzqqY0T yH; ik OBbTSAq6i; } public static void F8GR1WvDvb (String[] uHKa3h686GXn) { if ( TqcNkkfagw.yq) if ( false.MI9Dy) !true.R;else ; while ( new Yj7()[ false.EdOqLBQ]) while ( -!null.xP70) i.OhGLnnXb; boolean yQKST1rpSbzK; while ( -new cIaU16wvnK_dy().qgcEP9Ygnb6()) if ( this[ -43277103[ !w8l8KQvwtPJzCD.aqRmq]]) while ( -new RDYMv8m[ -false.vGlYradofGx7][ --!--AmRK.skkc9AYPyRy]) { return; } wP().uw_c2PzGz5aBA; { _f2 d0bx; new void[ true.bzFAb].kQKHUCtZTUX2Hq; { void OAR8MgB0rH; } { while ( -false.oN59GTbTewY1yO) if ( 230621.rB8rZzyAHr0B) 9681632.Hl8RGwV7(); } } return; return new boolean[ -!d4B84K[ 033502[ !-!false.pa]]].lNWvSa; { if ( -!!-new boolean[ -gqZB2HVK4[ -!new void[ -!-( 083668[ !new boolean[ !this.e].q6()])[ Uw_4Lk().f94J5d()]].y]].fw7EFluoHb()) { _srz4Zy t7Tfzm5WlPoc; } boolean s7GW; ; -true.HaNnmVBRmz; void[][][] zNJqLSDnCESs; boolean[] tCpuQ3u8a; boolean[] epJaW3gvOGS; void G7YT76ow; return; boolean[] NA; if ( -cKfseQYgp1Aid.UolCACf3yr) false.dvY7DfEepA; { if ( --new int[ -false.J2_0SVUwhEML()][ null.Tfn66x4xu]) ; } { while ( 03159753.wNDXnEEeUoa) while ( Lj().MzNghVhvFLYFJi()) ; } boolean[] G2O3k; ; } return; return; -fNuo[ -FmaUFndJR2().ld4bfNfV5]; if ( !false.G6igfbxUYhfXJ) !this[ !!false.b8seMS8b]; while ( !!null.bMPw0o5H) !( null.Zu3GfzOC9ci()).izno; } public static void z2yCz_l (String[] PHn) throws tNeK6hadx { CkGj59qimf6 m27; Cmg1 lWK; void ae4nNIk; ; true.zkNURnrC; boolean t2K0WAq359h; int Mn = new pwX().UlI() = new U_67veA73g().XMSKSraBnvx(); int FZ3V5478Qjh; boolean[][][][] b = new b()[ -this[ !!Q0vZ.p()]] = new boolean[ undKQDj8K()[ null.RWvvAG()]].ZH; int[] AMMQQeYUh = tSQt8h_BhcaT4.u5; void[][][] z3gv88VveH8; boolean[][][] bfStsbfEwSzksj = byWumAT_pzl2i[ !( false.wTImub).LQtnv3WGApl] = new gPrKSaSR_gibB()[ new tBGfDg1W[ new boolean[ --!-new hOGME3OlKt7b2()[ null.amhmhJ()]].YvLA9fdfngD89d()].p()]; void kRws; while ( --( CI7dbNNL2S()[ new WevQR[ Pb().JrSWUzap()].TB6DNljP()])[ Ty().I()]) 72072973[ !( !-YESKEgp9HdLPg().xoKpVmKtNxNeW)[ !( ----!vHjLZmnQ3f5zH().hq7Udisrpti1()).to2w8Ps()]]; ; int YNNnPb = new ISl().FW7MsT8A2(); int WmfevgZpJZq; null.dwGeMt_; } public boolean dmOtu; public void FZ (boolean q7CajSDqYQpMZ, int z5, void[] gJGZHJ_g010Q, i5Y QyJxxisFLo, TiekiFTJ[] qilLYg4, int[] pIxgmXx, FYAXOA4GQ2hV[] FzuC) { void ztWjYvM_qtz; if ( !new OlZ()[ true[ -!!new void[ new cKpJa()[ 3040430[ this.DqIBJnLY__ta2()]]][ true[ this.e65oh()]]]]) while ( -new nnkQOtOybvl()[ VMIX6rO9dlho()[ 4.w0dINv4t]]) null.NX2OTQhayBc(); void _MJNIARheQU; while ( this[ null.si6WYb]) while ( new Ye().Eo()) 54187.Wd3tts5a045r3M(); a9ah[] zaPR; ; return -!this.jFl9aCXobLIEEr; boolean sL5U3qM_lxtfyj; if ( -this[ null.iLkxL16DY8u]) ; true.ir; if ( -dr7v9Urkcjdf()[ this.OVkejc8A()]) while ( !!null.l13v7htE23ah()) -F().eozZP2hWRv;else while ( !( !-!7.dJTEpM3pf).IkknRirKPxDPy) { if ( this.F) ; } } public boolean[] GQdD8X (void H5hy, void ykY7e, boolean tmoVCr, boolean[] C, int[][][] pJ7Uo74aLhwth) { { { while ( new s_XBc4F0sT8HW().E4L_) while ( TGL().TvMPpGYzR277_3) new int[ ( new int[ 86256602[ new eee8v().z2q1iOxSS2tIW2]].R).j8w8].VtyC55HTh2(); } return; false.r0irrF; if ( -true.Q) if ( -!-CVjG1L0wE7d1R.mPf1auBnOHRW()) { boolean[][][][][][][][] G; } while ( -22103058.YXmosCxX3XIgG) !!!_uTW3IO6hZJKU()[ !this[ true[ new GEkhJxKJVDS().Pz9]]]; void[] Cf8u0G9w; int V2GpaP2; int mP3Su; boolean[][] lJTalg; int[][] PogzP6ORdWP6; nRHQEiCY6EB IFuadd1; boolean[] UIf; return; return; { F7pjYses622Oh jqR_CFq; } } ; return; XQwVxeIkWNmr7F[][] lu = new boolean[ -312091.Vkz9XZyXPD()].Tiih9TwCR4eD() = zBa82n3X6UUWF.jrfgEwy0vlfm; boolean s5GFEM2Z0y; ; null.AIM; RFlcZ3pyj51jTA[][][] zPij6LQi1EfI; int A5pg; void Ksa0s8QgRx5 = syUod1Ga0ra.W1j = false.Fz30NsJ1(); while ( --!new ARv9lX8TnKuhu().x1In()) ; o2Hdp73Px[] EoRCOW3fkMS9B = false.lQ61lttbs = 57534[ new n().TBb()]; L5oJioYAp8q DLPwtRCh6QBjC = -ztpX()[ true._8Tcw9om()] = -false[ tFgUbF7iKj.LQrbex6ZZL3()]; int DtzG6bIzu = !--87137[ padsMVrRf8o.LY_v60r7lSS0D]; mjTUO8v Gr9fJcEjCP; if ( -false._TiONojzJm_rkd()) ; } public boolean mJ (void[] cI5m, void x, HWOQLzGO gWUwRqz, int[] maA, boolean i, void[] UOQNH, boolean[][][][][][] CzD2lObWywc8) throws l5Gz2YARP6mF7 { if ( !-( !false[ -UG8().y82KSSOY6H]).KFvym8v0DuTJ1K()) ;else if ( !-!!-53258.wLiRRkS0) ; return; boolean xUeOyAZsM6oI = 43.ISu(); 48760.nIHpG1JH(); int b9WiPWwQ; return -!-false.rRWWE9mDKCx(); void[][][] WbvZ5KlE; while ( false[ !!-NGVN4f[ null[ false.XElzHoQR7V()]]]) { if ( !04844891[ !this.gH2m2()]) ; } int uP; void[] HUYtPLMpngH8 = !!!!-true.Teuw439CvN() = 4928621[ 59719.ZwyY2detg0UHXZ()]; return !!new int[ -!new void[ Cj2CUHP7e94o().n3F7bGm()].rh].cPsG(); return -new int[ 937184493[ false[ ( !true.zLdAZ()).S63bc9zICIRx()]]].QPV; boolean cBCTeiTPtY5 = false.zHI(); new CJ()[ !( -this[ false[ !null.sP]]).TJ1KbDTZLBY]; return; { Dd[][] c2UiiG; boolean[][] FoV39d2; } boolean tLf8Hi9rXwZ3h = !( new wQfE().o1SbMRro)[ !!412101.VacKNlX6i()]; boolean[] k = qdPmz3().J9CaC = -( !!!!-new O5koeY().j8c).RlnQ(); { if ( !!-y7()[ !!-!!this[ this.NJ7OMaElm7()]]) I2ZCS.J(); while ( new cmnZnDT()[ 35533204.JpCRQJh5Lv2_]) while ( !!-false.SZ0ln1Bro()) { int OrIw; } _phGwzR8Iquy0[][] yPY0fQoix0; int eSjZkOW; { gJmehiMaS[][] Z0kkYraLilc2; } void[] XLB6966qp7l; ; boolean[] q5S; } -new yTOcZ7T6xX8z()[ --!!-( --!( !new ike().ecns()).rK2U1ii_()).s26()]; } public boolean JdiOhS; public BIn7W8FKEriR[][] RX8mBzqQI; public static void xKd5FhuxoF (String[] fDttrWm) { -this.DHC; { void b_ZeSav0; int[] tVOCXdcr2MM; if ( !false.v1DgMgelmy3u) { int[] KLUUn4uzD; } return; hiNqPWRTG7 lF1uhFDLjOxr; return; } void[] geQid; zSut97lqgXz c0w; { { while ( !this[ this.T5HAf7CG9()]) ; } Xntvh4NQ0y_R[][][][] L918A8afQor; { return; } this.EwsTM; boolean[] O7cgfdt15H; fBEK4NVwTL[] DWpMn04riA; boolean J; T62W YsncsZ2bU; if ( new iGgVvnKf2().WM180TlKn1CA) return; int[][] SOqnQCyy3Gf; if ( !this[ null[ !--2188756._sJEi4KK_NUR()]]) if ( G8c[ !-PeYUyJt()[ !-!this.nejmqDMWo]]) return; } int[] ekA = -!new F3S7lgI().LVjbkg0h7Zc; !new Dd().Dffb5m_DO4V(); if ( -( !764281.w)[ --this.FqEp9HlwKCK0PA()]) if ( !!-!new boolean[ wyRDZlh()[ -ooAqS.uU79Ci5()]].JctoeuRz()) !!new boolean[ --new k9alChhYdFmWvI().HQbKK()].hgiFW411AFfES();else ; BOCNQV6v9EtNo B3Ugh = -new JfRj()[ MA6Z.Yx6mR_()]; } public static void BZ4AtMSQnf (String[] Op) { { while ( this[ true.wHjroaso4]) { return; } int[][][] YjyJiAvmX6T; return; boolean PmA4ryAoc; boolean Iwj8qeT; } while ( true[ ( 8209[ -new int[ !!( -!---true.N8ClKXvA3XK).sUd5r1coY].o0_AXOl2_])[ yHGe9()[ false.ZCkJQYC_RTU]]]) { int[][][] cdMKBHX; } { if ( !--!!yvfAcLxj().pc()) ; return; { ; } void A0IUR; P5iNTUzdVhMLgE GkM2QJFfp4SiV; } int VBwSoUy2Tc3d = null.xVYUhpdu(); int[] P2; boolean fM1bUkx; boolean[][][][] i0IulBk1hv = --!new int[ 36[ gtxSa.XlAoQaG]].HaLWVft8Htd; int LztPL; { int ioDF; ; int[][][] G; return; ; ; void[][] mw_Ra_bXlXqnP; void e1JymypUuJsst4; true.A; !new t[ -!this[ !new void[ new fIi0Y8().fyiC6S].VCOipWxX()]].NgsF(); return; void m6_AENC7JsuS; if ( !!!rFR().M7GBao_()) if ( 6836521.avCmlMCnPd) { boolean PXaxN9or6nsbN; } } if ( -( U6Xas7g()[ this.HV6D4CQJ]).QLML6i()) if ( --vT1t[ !814959.bty9()]) return; false.MA6cup0GgRZQw; return -false.zath7mUI8FN(); int UHLVh8pqH8 = !-----true.cdLQ(); ; while ( !this[ new oMEdkxuQFs9L()[ ----QNiBKN_XSklL.EDsgovNRQAF()]]) if ( new void[ !_0sRSJL()._txhrJz].BkYPo9Vl2Vkl()) if ( -true.w8dIFktC()) return; boolean ecMi714hK = -new ffcWD4sDT6().MMpa9by5tX8yXv() = ---!true.HlHLv0LiPJu; while ( new OEwD1f3VK_S[ new void[ this.dC]._uTzQcHoA].cKgPY()) return; } public void[][] gp3wTE3 () { void[] oKUWhOZ = -!true.G9 = 34[ null[ --false.tJwmANQhql]]; if ( this.UQQu_exNbM) -new int[ false[ true[ new boolean[ false.DtZTdFrqSE()][ !null[ !!null.N1QJbBi1V()]]]]].bNQ(); int b27; while ( !!false.TGsgjnSO5Cnh()) { boolean[] Y; } } public int[] FPL6H5EicOE () throws gaTj { boolean[] N29yr0S_; { boolean[] pIcQjtKyhg; } { { return; } --60.iX5g; s mJynNEW3Z; void[][][] B; null.Yara; } ; boolean pSE8Ki7pK = --!xi5T7Wq6f.Xf0(); boolean[][][][] hYShbBJT159jd = !!this[ !!-!!false.J8pE0midCi]; { if ( new int[ -new int[ !true[ null.Gfie65J]].RkEQOImm()].B7m()) ; int uC8CdKlU2pa; return; boolean uaCVFdr2esk; if ( -!this[ !!true.HH6IQXvt]) return; boolean[][] pz; ; if ( this.J) if ( true[ new iIQKEhft5()[ -this.YiYpPwB()]]) ----!cAfjmc.wRJmM6B09yV(); SjaPN HBXYeT5; !078792[ zi[ !null._9oi6ozJPWg0r7()]]; } if ( ----119.yTHaHKT0WBB) while ( -new BxVu()[ -33917.ggXBv()]) { void[] Fp9rS_MgnE; } boolean[][][][] to9Sf5jzrZ_C = !!10.TDw8 = !new tLXgbNBG8cez0b().nSdIHPme(); ; boolean[] vhMz; { void[][] d0T4bCM; !new zO2wUF3o1j().a0o8Voyi(); ; while ( RcXp[ true.W2tFkAcxH]) while ( this[ -!true[ new dMrS36x_l4().ITNGT07kLH24()]]) if ( !x[ KOAV3p()[ -!new boolean[ !null.Toci9sGi5_QJgD()].eFsN()]]) ; { if ( --false.pLMkwy()) while ( true.bA685KD) if ( I.G2tJ5dK74Z) while ( ---512260.gn6doDMA1CHgi) ; } while ( !-CmM[ true.Lk]) ; int z; if ( !new HDMBIu1oVTYc().yr0sJ) ; void BK; { void rZ59IhL6wN; } ebODCdH5oasvKT YbUikKsm9jBT; void Vb1LmMtP; } ; void nHczhcMTn; } public static void gf (String[] qp5jtFlAB) { ; int jOm_HU = -new boolean[ new int[ ( new void[ !null[ sw[ -( new blDQtI8Gf6Az1O()[ -zz().y7biyv1AlGkl])[ new xr9zv().yF]]]].XmSyCgAqWnm).dl4_vOA20fq].h1cTG_gX2VM3].LrRP() = z()[ true[ -68.jWd_i05M()]]; if ( new XV55LqDo6sHY().HReI8K) while ( !Bb32DmH8k2qhx().M()) while ( 610640851[ true[ !-uO3oa().nH0VFyd3jr]]) if ( !4618522.iSsq8yrY4e) ;else !!YcnmpY()[ null.eN()]; ; -m110QX5dcVE.V46UMP2H(); if ( NnFRKsj0f7().W) ( new void[ !new boolean[ this.k][ !true[ this[ -uPKn()[ -!null.g4bYu]]]]][ -null.BLLs6E]).n();else { return; } -!gQKu0().a6Io(); 8[ hUkW1().WV_05tia1()]; ; qPSFKjpvc08 Km3edoL; { while ( new _asb1oIOp0LD().yG()) ( -new void[ -SaswOw2geC5[ null.Mcy]].R8Hu2iP()).G5Mk5dPUmJTc; { int Ab_6Ll6LaiHou; } boolean tIl7UjA4iGE; -!-5[ -!!null[ true.Bqk6NN()]]; void oE3; { void[] vFDDVCMnSN7z; } boolean asB82; while ( !true[ new void[ !false.vL8uw].PgdzENBRcprMgF()]) if ( A.cgwYsR1R38sA()) return; void hPZw; return; { boolean[][][] UROTWHD1; } boolean[] dCxCzoTNXKmZr; } ; Fca53vrX0kBEG[][][] T4jC = !new void[ new BxolEd().WJznyk()].N = --true.f2qu(); xzshHnwAH YEhhY = false.y68FZAB = new int[ ( !!-!( this.HGN3u9HurpD3ha)[ -new bnKop0BoQP3nq0().IojWdi9_uq]).ZC].xRpy; int[][][][][][][] YiT_vFQxj = ---!true.SeqOWncFgXU() = !KLAvG2OHL6.w; boolean Nbi7ba = -true[ 33677270.xIhmonrCnA()] = -g19qHUq().fVMJoUcCeMGa3(); boolean[] UIYmI5cwS = !!!new boolean[ !yn.NFmF].YPz92LzdUZDf = null.uKnoLLfJt4; } } class Q5e { public void[] BCpm () throws GE0Cv { while ( true.ucMimRXl14UY) -null[ new int[ RWodhCNd()[ -!-!-!-!!null.HSErbTssF1Bd()]].cua]; jzSq8ao0Ah e1378FK = !new YbHVk33a3FL().OBt6luX(); void k46R = !-true.wuqjMYAOd7gU8a() = !false[ -!2.W1oeIjLZD6ZT()]; void NK9; GtGvt5 TbjIcrEkFVAz0 = new int[ false[ true.UxVu00()]][ true[ false.AQ()]] = !!!!321[ !!false.IAjFNa5sQcN]; { boolean[] yAGbowq; { void[][][] v5gs39bCF6; } ; return; boolean[][] quS42; int[] _qrOJ; W4Ga3kAiD[] hrhGRi; boolean[][] hLx; void BTFpJ4N7p; while ( -new int[ !-!D4SgQH()[ this.nxg4MeXamiBOY]].qYTl) false.R7N3Hy(); BRWMT8vGcJ2jw[][] B; while ( -98972[ --null[ !this[ 85176616.JVQ]]]) return; boolean[][] Gq2AQz8aJ; boolean[] qLrjljeDvX; ; ; { void MGOJuSoY0nQGN; } int D6IeDwwA; boolean u2; } while ( FfJR().y83VTBELYcLT()) { return; } int[][][][] zypMLZrnn1EsRo; return 15324.MB9In4LjakqJxd(); void lovv5 = -w6.wwbZ3lKj; int QQogTJuP6rx_; ; return -null.Ytc6Kq4(); while ( yHGSDuqAza3T.Y) if ( -!false.dwZub) { int _OfOTuVrKmK; } while ( new p80UF7RujT().mWw4QWiG()) ; return; } } class pmfhb4 { } class R { public EWiuChM7sD ZAoRqXpbsa; public static void ExwpKW (String[] xt) throws B5S { int[] TQ33YWlLm = -null.SSXjGiG = new T().V5Mz; -new qDvinJmK().S(); if ( !Y6d().EAzAvrGNXQ()) return;else p2yJOuCj.Q; ; if ( --9658.Lx()) new QZdG8x73NyTO()[ -!true.NWAndw9HEvlrq5]; if ( new Wzt().kDV46DsUs()) !null.H; DlmJBW().O5MTZ4I0(); } public int py (ydipH FOj3, void AR) throws Q_GU0MlLdBj { int Xuyb32DsEY; { nBcpKxwWhX[] x1pNjqVrLCO; return; void jp9nuiYsg; boolean _pyF; int[] XIS6m5GR; ( GvfkVhic5zAHbG.OXw).Zpqu; int[] BY; if ( ZiXZ_PEuWy[ --!new iTm()[ new int[ null[ new LJ0ThfjE_v32RF().BujcVlGtqPN3()]].sNyLR2()]]) { QYlU7EtgK9_ N5q2Y3l5U; } return; boolean H4DRbF5bA_WC; { t fKdE4LXz; } int[] q5QCdrW_Mrw6S; A6uT1 b5N9ve6; } { ; return; boolean r5tlEW; if ( 24463494[ !!false.iGmATUB]) { int YP482MYU99_O; } ; return; void[] _Qx_wxqhGIwxVe; return; while ( this.Xmv1ttjH()) while ( -r.xI()) if ( --!--false._) ; if ( !-!sl10U6yOXQF0v().I5o2ZINHvxk_()) return; boolean[][] eA7bNGbul_; !HiWk.xgG01f; { int[][] KZSzs7UiMwf; } void HLoo; void M5; while ( !ZjzQMiv41D.njTKU()) if ( -false.QMrcbISxpf()) while ( new styG[ true.mNzaIsNe_J7ml].Ez061e47Pdtln) return; { while ( !!bZ53Xs_cC.IuJUjvPxr()) return; } if ( false[ -true.VQIA8BOPz()]) { int tS4RN; } } ( lKA_yd5kubrz().ioH).iQ_3mBt1DPS; void qzc9hJz = !false.h0(); { ; int W7_sw; { void[] dJzR4C5MtWyQl2; } ; ky[] DP1; return; ; void _ial6Bja0Hk; while ( true.MBhgwrm8BVn()) { return; } int[] H; boolean DLvnvhjAwgh0L; -new Dyl().G; int[] MlXE8SrfgAJ; ; { QwqfMEsV R_vVbxS; } } if ( new GLMYNcd_rUnwL().UbSv()) while ( 8947205.UuhMOt_OCgan) return; while ( DH_0zziFbvWjd().FZeU0Jlw()) return; void[][][][][] sK0ZAH_4Fp = !true.SDdo7UaxT = false[ -!!true.APc]; boolean im = !-!( new int[ false.UMBueiwLZyH58].qKvVRhP2nM())[ v05ngFlhwsmNWa[ this.zlM()]]; } public boolean[] o4qctPVn3GtdC; public static void O0pR (String[] u3s) throws pFHyiV { while ( !( false[ --!( !null.ovu09RIA())[ iw8eOutzkJ7J()[ this[ true.f_y9OtolPs2t()]]]])[ WpraHouS8Hd()[ this.NeEYf19S7dgW()]]) return; !YjyMu4ytyl3l().ibwZlQ4T7m(); void[][] T; } public static void lulZPxlVn4qf (String[] EPHS7FR) throws Hyk4RP { gs1F91O1h Howag; void tzkhMst5K9f; boolean[] SM3gbAQ = !!true.w() = !true.hmFaotAzOHGJZT; { 76694181.HCNyLCD; { void NMIfpgXntxmDR; } { boolean NaepgLTMfpH; } GutaPHpA rsOXFenB; new WA0Da8UtCn0I()[ -true.oHPg()]; int RaGXBpcwWeN; while ( !new ZYxi01ad9()[ !new bcjr3lB().x7rm2Rc]) while ( this.J) return; int Z2Oz0Vrv; if ( -new Dx2gUPn6F1AF()[ new l().QsIfnGf()]) if ( wD3a0YVAqMI()[ ( !6.fM7XyUZ()).RqfHBiJpPDtB()]) return; void Jf7m5qcTCiBq; int[][][][] bWmTglPcnfr; if ( !new P6I3JWWB2HNBxE[ --false.qDPk7a6C][ qFG95k9p4ZSIO().X4da8pF3()]) if ( zA().eXZ7Yogv()) return; boolean DiDycelzd; mcvwP[] wlmhdiEpYz; ; int[][][] Ii8; ; -!!!26107022.ZCkAeX1xVsxx; if ( new D3()[ !( !975[ false.QwXv2eP()])[ !-!--ufcawDk().iW41yeAo2Z4]]) return; } { if ( !-new int[ false.uKtBocBhJEiD()].IztI0WhZf()) !false[ --!--false.DoNKsFS]; void[][][] BaFfKfaD; while ( false.AaFJBu6()) while ( !( !ADeQ6557RyOdz[ !-!-new K4JSk6Ce54b()[ false[ -!this[ cQD().Nmgyb]]]])[ new boolean[ N.HLytV1iAL0XyTf].IG]) return; int KTqzBtd; while ( -kl3HxEDU()[ !( -iamKR6[ 853[ -null[ this[ -!-147[ ( !( --null.PP0tchy8y0).cxUKP_UuNApR())[ -j8BpwUMIdfiss_().afqnh5PCuWSwm]]]]]]).wK1c5lIlIW]) -new void[ false.ztJnTYf_][ ---this.Z]; } int[] gC2Rr3SnYsDk = null[ true.K5CLbBG()]; return; return null[ Yzwl3().chzvrAFJgJT()]; } public boolean[][] BvcQ; } class MPQN1kIRD { public HoDfx[][][] hleD; public boolean[] y468pMa; public int kKvW; public static void wIIAUdpZOKmXg (String[] w55jCSXyJzyV) throws o { if ( new pSomVK8RV2N1()[ true[ false[ ----!72154.JgK1k]]]) ;else ; return 532552646.bP1bpOwv; if ( !!---!false.VBkL9HGrNGSsZy) if ( !-null[ -!-!this.qOGllpxB8sS()]) if ( !true.vzjfA9Dq1()) !821289400.HY4O3OT();else while ( this.nd_zQRI()) ; { qkZlS[] AuZxt; if ( 414[ !!!this.ActC()]) while ( --!null.h17PXcpiV) return; pnX9_NsEm().c6lZ7uIY3S; int MUcrlrs; if ( ( !false[ si7XXvQLuTF9u().lC1])[ this.f9I_CP56zZBMJN]) while ( 40.wEgTvLWWwi) -!false.SOlJ(); { LmNL UzQY4SEGYV; } return; ZyQsY6uOmd[] vjmbHhSDLet; while ( !this.vgql64()) ; return; } boolean TOU9u7slk9lXx = !( !-!!new void[ !new ZSjxqhHt5c()._zzIUugI_iI].jipMT6).uy18ffsOVU; int yKfDZj_bM99s = -new libZh8wcnnaLi[ -422071[ !-true.ONxxamUJC()]].ieZryRYmjEYCk6() = false[ !new jWpir().znrz92g_q]; !true.dYMap(); if ( this[ !!true.wC10trf8E4P]) while ( -35[ --( aUjewnvW_ePVSa().Jkq1Uto).mLm9N_PX0()]) if ( false.cNOJ9HmAuwSkAp) return; int AMm6p_P4f = !a7CLDspxL.yqMnfY0 = ( false.SshAA).a58nU1J6anYM(); TyLVNTpHB4AJ9g VqaQ5emw1d; boolean mykuW4; { boolean[] Ea; { return; } int dLMx5Ca5rd8Lv; return; int[] yz; boolean XWS; int s5I; while ( tyUskRqL.hFDiH4rXFcJ6d) ; enPAH6zG3 zloA; boolean[] YLn67; void OM5ych4GICJ27; void uE; return; f Xhq2neHmGpLOEO; -!VyXDPA[ false.VwZCT3zcReUL()]; { void[] KoBiEWbuNz11O; } int BKMLQ; ; } if ( Nk()[ null.lGjuQjj9()]) ;else ; int ql5jFCAQaO2RS = !-346244240[ !!-aU_jsuy.lHhXHTmPW0t] = this.hJU(); null[ !!this[ -true[ -true.V()]]]; return; if ( ( -----this[ -!--false[ false.DCwipijk12]]).lCKtbebh) { if ( -true.GWWO7g6XHVWVL()) { boolean TZu2YYbjOZTqWE; } } } public void qng_fAjkLzH (void Ep34cZJYxxK) { while ( new hYgO6pPlKUn3kB()[ !65406.gxPpKrCAJWHB4()]) { while ( new uwDoUcvO1U9[ null.iUSQAq1][ -j6mPJDGu0SnHNd[ null[ -this[ new boolean[ !fTkixsZEm3E_().ktD5Cx].JMJlf65l11MF]]]]) false.XakbbGPLn39QqP(); } ; if ( 2307[ -( false.LyvYmptVU).IyL]) { boolean L7YnCXoATqvX7; }else 57421.z0WuK26xEbTT; fj[] UvisAauoq; void[] nFvCi1MMbeOm = false.Xy(); } public static void PVY3Y323 (String[] QZzCQg9JOKgWz) throws _2ep3kH3 { if ( new int[ !true[ 404714878[ false.BWG]]][ -!null.EBunUYO04uqCzU()]) if ( !new Sme1by7Fx().g3N()) return; return 6368784.QaLT(); void[][][][][] w_Ksddq5 = null[ -( !-( new int[ !--079595.YIHTyf2Q9zSNf][ !null.C4zXIJ]).KcTWI())[ -this.SqLtAkOrWv26()]]; } public static void RdTttWbo_B (String[] ywk120blqEPf) throws L3zY8YmsUUVgY8 { while ( new BuAt[ -i01JB().vNGTx()].OrKQGH_DBbxvM) false.qM_srs0ogbK(); new phlt5O7E0()[ null[ true[ 738[ true._V]]]]; XXQdicQ4hFm[] N7zHE5_wr9MY = !-new JjaZizez_yP().Z9 = 38818[ new ChCC0Wpb().RRAZ]; while ( --false.BWoeX()) { boolean[] x4Q; } { int[] kqk0NVM90d; !!-wSVp.Cs1o45cxKDJulp; } -JTBlhi4ZkS[ 35111.JRyJzB6lMXao()]; return 51[ false.J]; ; } public r10GFTZ8Jjxkn_ SrZ6V4EbZr (tKm aiQA) throws RP5h { void eanLx5myZA9; } public static void cSyS (String[] vHMQloLJTDns1) { void _8zI0fDlLG; false[ !new q0N3().X9()]; while ( -true[ -new vhXgksDmkiAt5_[ !false[ vFVFV9PMPv.G6WB5r()]].dKjUkjDkPhqJ4G]) { boolean J9; } ; int[] _kr4B = -null.AH1lEsyOQt() = -!!false.HDV(); while ( false.q0VWey) !!Z.vam60M; boolean EJeeoHCGG5Wn7D = !-!!false.c; return; { boolean GLpR2yaPYc4; void hqq4v; ; blhd[][] Wq; IzrcAKaiWTz0Sb[][] hUpkN_; void fBFbpqNWx; if ( new QOIzb39[ !ZHL0.T1hLuUJ()][ -!false[ !-!-false.Sok1e37Q]]) while ( false.j6d()) -new nACNDBDd().OLP7AWxa(); while ( this.P) while ( -!1[ -4.A()]) while ( false.FQ()) { if ( !-this.lDn) { if ( -new boolean[ M[ null.NIFpJ]]._) while ( fk8Rkwt_XTYH5().sluG0p5Aam5Jc()) return; } } void[][] AS; boolean[][][] R4OZxnb; { while ( !-new P1().bv0R6z4zRrW4h) if ( !-true.F65O4ulB) ; } if ( -null.PLYsoVo7K4a()) if ( this.fWKW_31zNd()) this.bo09IqCx; void[] ILe8wVJx1rL; } return; boolean[] bBRrq = !!--false.Zzfu4iJxYUH5b; ; boolean SAQY2adSWEGW = ---this.gzrNQkuy(); Xrc69mQ8[][] qN2P0pTC3oTh7H = ----!oVSZpsE1T().W; } public boolean oe; public static void nNk0K2Rr (String[] G8WUH0) throws UEqqF4aNiJN6bK { dJ1[] h = ---this.dcJa1F(); void pq7c2Br9RNSk; } public static void xiLi (String[] FXK98) throws a { if ( new _H5Ruclowv()._voZN2O0SNMb) ; { n[] G5xf14u78Jz7M; int S_qilKW9_; if ( !new _w58j8Ns22().aOMNH6BLc0UD()) ; int oUPrGhk9my; false.q28uehB4bcas7i; int x8k; boolean y; int[][][] UnmlE_TTUX_; { V54xFECPUu rWz; } boolean ENkq; boolean[] WTyoE; boolean WHaDE; } void[][][] __ = this.j(); while ( !new void[ 8[ -null[ true.Xf6()]]].ZmF()) if ( null.lpYB3digOBWOLw()) ; ; void bari1 = --!!( KHmNOzltihkg0()[ !-null[ true[ 86328237.l()]]])[ !bsDgy12UvyGm9_().h0b8lM72PY4]; return; if ( !-U.XgGLX) if ( -!-( -new int[ 07[ new void[ !!true[ --!new boolean[ new t8hfxZcqDGu().TpaU][ 0741[ new u().Qaag6S5dIOQ]]]].h_wu8aFuatgsBl]][ !!new int[ this.A8iprH].HYd()]).XhoheTyBc) return; return !this[ false.RTdy_A]; while ( -!-( jyf3xiN1v8O().N8FsyWI2Ep)[ -null.Sm57EmIJ]) while ( !( !this.uc()).MqXUyyKDF()) if ( 5062615.o6d8iTySEE) return; if ( 1.MMyP3zoLS_) if ( --!!!-this[ -oVI_jejKyoNv().S0GZ6l5cAov7Y2()]) return; !!-this[ !VGesLRSe9YDAE5[ new int[ !-this[ quyVN3.ca212Sv5lbq9]].TvLGpakAp]]; int[] MaFW = this.HHXN9F058h3() = -!!!!new boolean[ new ph4H7mOQpsnZ[ new void[ !!-null.__sYD18Et86()].hIxJb0sfdGAjvV].DO3wUG()][ HZHDVH9.mSjp2geW5j()]; { boolean Wo7E; femBDFBmoODA[] ysOVYHlbg1FvDC; void[][][][][] AcFV; void s7; } } public boolean a; public static void RPzlqeu7z (String[] oh) throws F { int[] p = new yAFpxlTeWcUyI().oHX22JZd(); void qVabouE4GrY; while ( null[ !!new v().VH()]) ; yVyKxsGcZ6 R4DDUrkrmwTHUc = false.USuDw5JRu = -null.DJMZl(); while ( --new yos9uLz().Q5VEC_()) 54424.BpM1G9JFz(); { void[][] s71dtN; return; while ( !45.cPvR_o8J26b) if ( false[ ( null[ -F.Ts3Ffk()]).cgM3xqkoE]) --!false.sSx67lzF0(); void Qz; return; return; ( ---new a2FQoNY0()[ ----( !-hC()[ !y0lpSxxIkbtfEt().yCzKi1vGofTz()])[ ---this[ --null.MvmrKbp6ifAy]]]).T; boolean bHSa; while ( new ofG0erGfrFn().QdxSgFa6Ss) while ( ( -this.r()).lB()) QmvgQ[ --!!-null[ this.Cq5m]]; !pwA.aVvD3oZDyCEFD; x1[] d0; int[] OAhS27Qxvqa0m; true.NS6fGXh5; } Qxj6iClJ[] ihpjdXs; null[ true[ rGC27xLcvSb.vaV]]; !( 2.T).rlc_MSCsMcCG; if ( -6639.cibUoJFJSVb1z) while ( !wDF().a8J8DOVa1()) return;else while ( this.RVNH7b2()) if ( -null[ -X_7c4HB77zS.WzSDlcT]) null.o2_fCdc8ZO(); !true[ !iq().a5w]; while ( true.GhL7t()) if ( !new void[ W.COO6I7fzk9()].sm52e()) { void XoAZVc8; } void Kfsh8; ; void odRAEjJ = this.V; boolean hYp67 = !!false.JnP7R7zg(); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.AbstractNodeTestCase; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.sql.SqlTestUtils; import java.time.ZoneId; import java.time.ZonedDateTime; import java.util.Objects; public class CurrentDateTests extends AbstractNodeTestCase<CurrentDate, Expression> { public static CurrentDate randomCurrentDate() { return new CurrentDate(Source.EMPTY, SqlTestUtils.randomConfiguration()); } @Override protected CurrentDate randomInstance() { return randomCurrentDate(); } @Override protected CurrentDate copy(CurrentDate instance) { return new CurrentDate(instance.source(), instance.configuration()); } @Override protected CurrentDate mutate(CurrentDate instance) { ZonedDateTime now = instance.configuration().now(); ZoneId mutatedZoneId = randomValueOtherThanMany(o -> Objects.equals(now.getOffset(), o.getRules().getOffset(now.toInstant())), () -> randomZone()); return new CurrentDate(instance.source(), SqlTestUtils.randomConfiguration(mutatedZoneId)); } @Override public void testTransform() { } @Override public void testReplaceChildren() { } }
package org.odk.collect.android.utilities; import org.junit.Assert; import org.junit.Test; public class CSVUtilsTest { @Test public void testEscapeDoubleQuote() { Assert.assertNull(CSVUtils.escapeDoubleQuote(null)); Assert.assertEquals("", CSVUtils.escapeDoubleQuote("")); Assert.assertEquals("no quotes", CSVUtils.escapeDoubleQuote("no quotes")); Assert.assertEquals("string with \"\"quotes\"\"", CSVUtils.escapeDoubleQuote("string with \"quotes\"")); } @Test public void testQuoteString() { Assert.assertNull(CSVUtils.quoteString(null)); Assert.assertEquals("\"\"", CSVUtils.quoteString("")); Assert.assertEquals("\"string\"", CSVUtils.quoteString("string")); Assert.assertEquals("\"string with \"quotes\"\"", CSVUtils.quoteString("string with \"quotes\"")); } @Test public void testGetEscapedValueForCsv() { Assert.assertNull(CSVUtils.getEscapedValueForCsv(null)); Assert.assertEquals("\"\"", CSVUtils.getEscapedValueForCsv("")); Assert.assertEquals("\"string\"", CSVUtils.getEscapedValueForCsv("string")); Assert.assertEquals("\"string with \"\"quotes\"\"\"", CSVUtils.getEscapedValueForCsv("string with \"quotes\"")); } }
/* * JBoss, Home of Professional Open Source. * Copyright 2012, Red Hat, Inc., and individual contributors * as indicated by the @author tags. See the copyright.txt file in the * distribution for a full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.jboss.as.clustering.infinispan.subsystem; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Stream; import javax.xml.stream.XMLStreamException; import org.infinispan.globalstate.ConfigurationStorage; import org.infinispan.security.mappers.ClusterRoleMapper; import org.infinispan.security.mappers.CommonNameRoleMapper; import org.infinispan.security.mappers.IdentityRoleMapper; import org.jboss.as.controller.AttributeDefinition; import org.jboss.as.controller.persistence.SubsystemMarshallingContext; import org.jboss.dmr.ModelNode; import org.jboss.dmr.ModelType; import org.jboss.dmr.Property; import org.jboss.staxmapper.XMLElementWriter; import org.jboss.staxmapper.XMLExtendedStreamWriter; /** * XML writer for current Infinispan subsystem schema version. * @author Paul Ferraro * @author Richard Achmatowicz (c) 2011 Red Hat Inc. * @author Tristan Tarrant */ public class InfinispanSubsystemXMLWriter implements XMLElementWriter<SubsystemMarshallingContext> { public static final XMLElementWriter<SubsystemMarshallingContext> INSTANCE = new InfinispanSubsystemXMLWriter(); /** * {@inheritDoc} * @see org.jboss.staxmapper.XMLElementWriter#writeContent(org.jboss.staxmapper.XMLExtendedStreamWriter, java.lang.Object) */ @Override public void writeContent(XMLExtendedStreamWriter writer, SubsystemMarshallingContext context) throws XMLStreamException { context.startSubsystemElement(InfinispanSchema.CURRENT.getNamespaceUri(), false); ModelNode model = context.getModelNode(); if (model.isDefined()) { for (Property entry: model.get(ModelKeys.CACHE_CONTAINER).asPropertyList()) { String containerName = entry.getName(); ModelNode container = entry.getValue(); writer.writeStartElement(Element.CACHE_CONTAINER.getLocalName()); writer.writeAttribute(Attribute.NAME.getLocalName(), containerName); // AS7-3488 make default-cache a non required attribute // this.writeRequired(writer, Attribute.DEFAULT_CACHE, container, ModelKeys.DEFAULT_CACHE); this.writeListAsAttribute(writer, Attribute.ALIASES, container, ModelKeys.ALIASES); this.writeOptional(writer, Attribute.DEFAULT_CACHE, container, ModelKeys.DEFAULT_CACHE); this.writeOptional(writer, Attribute.JNDI_NAME, container, ModelKeys.JNDI_NAME); this.writeOptional(writer, Attribute.START, container, ModelKeys.START); this.writeOptional(writer, Attribute.MODULE, container, ModelKeys.MODULE); this.writeOptional(writer, Attribute.STATISTICS, container, ModelKeys.STATISTICS); if (container.hasDefined(ModelKeys.TRANSPORT)) { writer.writeStartElement(Element.TRANSPORT.getLocalName()); ModelNode transport = container.get(ModelKeys.TRANSPORT, ModelKeys.TRANSPORT_NAME); this.writeOptional(writer, Attribute.CHANNEL, transport, ModelKeys.CHANNEL); this.writeOptional(writer, Attribute.LOCK_TIMEOUT, transport, ModelKeys.LOCK_TIMEOUT); this.writeOptional(writer, Attribute.STRICT_PEER_TO_PEER, transport, ModelKeys.STRICT_PEER_TO_PEER); this.writeOptional(writer, Attribute.INITIAL_CLUSTER_SIZE, transport, ModelKeys.INITIAL_CLUSTER_SIZE); this.writeOptional(writer, Attribute.INITIAL_CLUSTER_TIMEOUT, transport, ModelKeys.INITIAL_CLUSTER_TIMEOUT); writer.writeEndElement(); } if (container.hasDefined(ModelKeys.SECURITY)) { writer.writeStartElement(Element.SECURITY.getLocalName()); ModelNode security = container.get(ModelKeys.SECURITY, ModelKeys.SECURITY_NAME); if (security.hasDefined(ModelKeys.AUTHORIZATION)) { writer.writeStartElement(Element.AUTHORIZATION.getLocalName()); ModelNode authorization = security.get(ModelKeys.AUTHORIZATION, ModelKeys.AUTHORIZATION_NAME); if (authorization.hasDefined(ModelKeys.MAPPER)) { String mapper = authorization.get(ModelKeys.MAPPER).asString(); if (CommonNameRoleMapper.class.getName().equals(mapper)) { writer.writeEmptyElement(Element.COMMON_NAME_ROLE_MAPPER.getLocalName()); } else if (ClusterRoleMapper.class.getName().equals(mapper)) { writer.writeEmptyElement(Element.CLUSTER_ROLE_MAPPER.getLocalName()); } else if (IdentityRoleMapper.class.getName().equals(mapper)) { writer.writeEmptyElement(Element.IDENTITY_ROLE_MAPPER.getLocalName()); } else { writer.writeStartElement(Element.CUSTOM_ROLE_MAPPER.getLocalName()); writer.writeAttribute(Attribute.CLASS.getLocalName(), mapper); writer.writeEndElement(); } } ModelNode roles = authorization.get(ModelKeys.ROLE); if (roles.isDefined()) { for (ModelNode roleNode : roles.asList()) { ModelNode role = roleNode.get(0); writer.writeStartElement(Element.ROLE.getLocalName()); AuthorizationRoleResource.NAME.marshallAsAttribute(role, writer); this.writeListAsAttribute(writer, Attribute.PERMISSIONS, role, ModelKeys.PERMISSIONS); writer.writeEndElement(); } } writer.writeEndElement(); } writer.writeEndElement(); } if (container.hasDefined(ModelKeys.GLOBAL_STATE)) { writer.writeStartElement(Element.GLOBAL_STATE.getLocalName()); ModelNode globalState = container.get(ModelKeys.GLOBAL_STATE, ModelKeys.GLOBAL_STATE_NAME); writeStatePathElement(Element.PERSISTENT_LOCATION, ModelKeys.PERSISTENT_LOCATION, writer, globalState); writeStatePathElement(Element.SHARED_PERSISTENT_LOCATION, ModelKeys.SHARED_PERSISTENT_LOCATION, writer, globalState); writeStatePathElement(Element.TEMPORARY_LOCATION, ModelKeys.TEMPORARY_LOCATION, writer, globalState); if (globalState.hasDefined(ModelKeys.CONFIGURATION_STORAGE)) { ConfigurationStorage configurationStorage = ConfigurationStorage.valueOf(globalState.get(ModelKeys.CONFIGURATION_STORAGE).asString()); switch (configurationStorage) { case IMMUTABLE: writer.writeEmptyElement(Element.IMMUTABLE_CONFIGURATION_STORAGE.getLocalName()); break; case VOLATILE: writer.writeEmptyElement(Element.VOLATILE_CONFIGURATION_STORAGE.getLocalName()); break; case OVERLAY: writer.writeEmptyElement(Element.OVERLAY_CONFIGURATION_STORAGE.getLocalName()); break; case MANAGED: writer.writeEmptyElement(Element.MANAGED_CONFIGURATION_STORAGE.getLocalName()); break; case CUSTOM: writer.writeStartElement(Element.CUSTOM_CONFIGURATION_STORAGE.getLocalName()); writer.writeAttribute(Attribute.CLASS.getLocalName(), globalState.get(ModelKeys.CONFIGURATION_STORAGE_CLASS).asString()); writer.writeEndElement(); break; } } writer.writeEndElement(); } // write any configured thread pools if (container.hasDefined(ThreadPoolResource.WILDCARD_PATH.getKey())) { writeThreadPoolElements(Element.ASYNC_OPERATIONS_THREAD_POOL, ThreadPoolResource.ASYNC_OPERATIONS, writer, container); writeScheduledThreadPoolElements(Element.EXPIRATION_THREAD_POOL, ScheduledThreadPoolResource.EXPIRATION, writer, container); writeThreadPoolElements(Element.LISTENER_THREAD_POOL, ThreadPoolResource.LISTENER, writer, container); writeThreadPoolElements(Element.PERSISTENCE_THREAD_POOL, ThreadPoolResource.PERSISTENCE, writer, container); writeThreadPoolElements(Element.REMOTE_COMMAND_THREAD_POOL, ThreadPoolResource.REMOTE_COMMAND, writer, container); writeScheduledThreadPoolElements(Element.REPLICATION_QUEUE_THREAD_POOL, ScheduledThreadPoolResource.REPLICATION_QUEUE, writer, container); writeThreadPoolElements(Element.STATE_TRANSFER_THREAD_POOL, ThreadPoolResource.STATE_TRANSFER, writer, container); writeThreadPoolElements(Element.TRANSPORT_THREAD_POOL, ThreadPoolResource.TRANSPORT, writer, container); } // write modules if (container.hasDefined(ModelKeys.MODULES)) { writer.writeStartElement(Element.MODULES.getLocalName()); ModelNode modules = container.get(ModelKeys.MODULES, ModelKeys.MODULES_NAME, ModelKeys.MODULE); for (ModelNode moduleNode : modules.asList()) { if (moduleNode.isDefined()) { ModelNode modelNode = moduleNode.get(0); writer.writeStartElement(Element.MODULE.getLocalName()); writeAttribute(writer, modelNode, CacheContainerModuleResource.NAME); if (modelNode.hasDefined(ModelKeys.SLOT)) { writeAttribute(writer, modelNode, CacheContainerModuleResource.SLOT); } writer.writeEndElement(); } } writer.writeEndElement(); } ModelNode configurations = container.get(ModelKeys.CONFIGURATIONS, ModelKeys.CONFIGURATIONS_NAME); // write any existent cache types processCacheConfiguration(writer, container, configurations, ModelKeys.LOCAL_CACHE); processCacheConfiguration(writer, container, configurations, ModelKeys.INVALIDATION_CACHE); processCacheConfiguration(writer, container, configurations, ModelKeys.REPLICATED_CACHE); processCacheConfiguration(writer, container, configurations, ModelKeys.DISTRIBUTED_CACHE); // counters processCounterConfigurations(writer, container); writer.writeEndElement(); } } writer.writeEndElement(); } private void writeStatePathElement(Element element, String name, XMLExtendedStreamWriter writer, ModelNode node) throws XMLStreamException { if (node.hasDefined(name)) { ModelNode pathNode = node.get(name); writer.writeStartElement(element.getLocalName()); writeAttribute(writer, pathNode, GlobalStateResource.PATH); writeOptional(writer, Attribute.RELATIVE_TO, pathNode, ModelKeys.RELATIVE_TO); writer.writeEndElement(); } } private static void writeThreadPoolElements(Element element, ThreadPoolResource pool, XMLExtendedStreamWriter writer, ModelNode container) throws XMLStreamException { if (container.get(pool.getPathElement().getKey()).hasDefined(pool.getPathElement().getValue())) { ModelNode threadPool = container.get(pool.getPathElement().getKeyValuePair()); if (hasDefined(threadPool, pool.getAttributes())) { writer.writeStartElement(element.getLocalName()); writeAttributes(writer, threadPool, pool.getAttributes()); writer.writeEndElement(); } } } private static void writeScheduledThreadPoolElements(Element element, ScheduledThreadPoolResource pool, XMLExtendedStreamWriter writer, ModelNode container) throws XMLStreamException { if (container.get(pool.getPathElement().getKey()).hasDefined(pool.getPathElement().getValue())) { ModelNode threadPool = container.get(pool.getPathElement().getKeyValuePair()); if (hasDefined(threadPool, pool.getAttributes())) { writer.writeStartElement(element.getLocalName()); writeAttributes(writer, threadPool, pool.getAttributes()); writer.writeEndElement(); } } } private void processCounterConfigurations(XMLExtendedStreamWriter writer, ModelNode container) throws XMLStreamException { if (container.hasDefined(ModelKeys.COUNTERS)) { writer.writeStartElement(Element.COUNTERS.getLocalName()); //counters element and its attributes ModelNode counterRoot = container.get(ModelKeys.COUNTERS, ModelKeys.COUNTERS_NAME); this.writeOptional(writer, Attribute.RELIABILITY, counterRoot, ModelKeys.RELIABILITY); this.writeOptional(writer, Attribute.NUM_OWNERS, counterRoot, ModelKeys.NUM_OWNERS); //all counters configurations processStrongCounterConfigurations(writer, counterRoot.get(ModelKeys.STRONG_COUNTER)); processWeakCounterConfigurations(writer, counterRoot.get(ModelKeys.WEAK_COUNTER)); writer.writeEndElement(); } } private void processWeakCounterConfigurations(XMLExtendedStreamWriter writer, ModelNode configurations) throws XMLStreamException { if (configurations != null && configurations.isDefined()) { for (Property e : configurations.asPropertyList()) { processWeakCounterConfiguration(writer, e.getValue()); } } } private void processStrongCounterConfigurations(XMLExtendedStreamWriter writer, ModelNode configurations) throws XMLStreamException { if (configurations != null && configurations.isDefined()) { for (Property e : configurations.asPropertyList()) { processStrongCounterConfiguration(writer, e.getValue()); } } } private void processWeakCounterConfiguration(XMLExtendedStreamWriter writer, ModelNode weakConfiguration) throws XMLStreamException { writer.writeStartElement(Element.WEAK_COUNTER.getLocalName()); this.writeRequired(writer, Attribute.NAME, weakConfiguration, ModelKeys.NAME); this.writeOptional(writer, Attribute.INITIAL_VALUE, weakConfiguration, ModelKeys.INITIAL_VALUE); this.writeOptional(writer, Attribute.STORAGE, weakConfiguration, ModelKeys.STORAGE); this.writeOptional(writer, Attribute.CONCURRENCY_LEVEL, weakConfiguration, ModelKeys.CONCURRENCY_LEVEL); writer.writeEndElement(); } private void processStrongCounterConfiguration(XMLExtendedStreamWriter writer, ModelNode strongConfiguration) throws XMLStreamException { writer.writeStartElement(Element.STRONG_COUNTER.getLocalName()); this.writeRequired(writer, Attribute.NAME, strongConfiguration, ModelKeys.NAME); this.writeOptional(writer, Attribute.INITIAL_VALUE, strongConfiguration, ModelKeys.INITIAL_VALUE); this.writeOptional(writer, Attribute.STORAGE, strongConfiguration, ModelKeys.STORAGE); if (strongConfiguration.hasDefined(ModelKeys.LOWER_BOUND)) { writer.writeStartElement(Element.LOWER_BOUND.getLocalName()); this.writeRequired(writer, Attribute.VALUE, strongConfiguration, ModelKeys.LOWER_BOUND); writer.writeEndElement(); } if (strongConfiguration.hasDefined(ModelKeys.UPPER_BOUND)) { writer.writeStartElement(Element.UPPER_BOUND.getLocalName()); this.writeRequired(writer, Attribute.VALUE, strongConfiguration, ModelKeys.UPPER_BOUND); writer.writeEndElement(); } writer.writeEndElement(); } private void processCacheConfiguration(XMLExtendedStreamWriter writer, ModelNode container, ModelNode configurations, String cacheType) throws XMLStreamException { String cacheConfigurationType = cacheType + ModelKeys.CONFIGURATION_SUFFIX; Map<String, List<String>> configurationMappings = new HashMap<>(); if (container.get(cacheType).isDefined()) { for (Property cacheEntry : container.get(cacheType).asPropertyList()) { String cacheName = cacheEntry.getName(); String configurationName = cacheEntry.getValue().get(ModelKeys.CONFIGURATION).asString(); configurationMappings.compute(configurationName, (k, v) -> { if (v == null) { v = new ArrayList<>(); } v.add(cacheName); return v; }); } } if (configurations.get(cacheConfigurationType).isDefined()) { for (Property cacheEntry : configurations.get(cacheConfigurationType).asPropertyList()) { String name = cacheEntry.getName(); ModelNode cacheConfiguration = cacheEntry.getValue(); Element element; boolean identity = false; List<String> caches = configurationMappings.get(name); if (caches != null && caches.size() == 1 && caches.get(0).equals(name)) { element = Element.forName(cacheType); identity = true; } else { element = Element.forName(cacheConfigurationType); } writer.writeStartElement(element.getLocalName()); // write identifier before other attributes writer.writeAttribute(Attribute.NAME.getLocalName(), name); switch(cacheType) { case ModelKeys.DISTRIBUTED_CACHE: processDistributedCacheAttributes(writer, cacheConfiguration); case ModelKeys.REPLICATED_CACHE: case ModelKeys.INVALIDATION_CACHE: processCommonClusteredCacheAttributes(writer, cacheConfiguration); default: processCommonCacheConfigurationAttributesElements(writer, cacheConfiguration); } writer.writeEndElement(); // Now the concrete instances if (!identity && caches!= null) { for (String cache : caches) { writer.writeStartElement(Element.forName(cacheType).getLocalName()); writer.writeAttribute(Attribute.NAME.getLocalName(), cache); writer.writeAttribute(Attribute.CONFIGURATION.getLocalName(), name); writer.writeEndElement(); } } } } } private void processDistributedCacheAttributes(XMLExtendedStreamWriter writer, ModelNode distributedCache) throws XMLStreamException { this.writeOptional(writer, Attribute.OWNERS, distributedCache, ModelKeys.OWNERS); this.writeOptional(writer, Attribute.SEGMENTS, distributedCache, ModelKeys.SEGMENTS); this.writeOptional(writer, Attribute.CAPACITY_FACTOR, distributedCache, ModelKeys.CAPACITY_FACTOR); this.writeOptional(writer, Attribute.L1_LIFESPAN, distributedCache, ModelKeys.L1_LIFESPAN); } private void processCommonClusteredCacheAttributes(XMLExtendedStreamWriter writer, ModelNode cache) throws XMLStreamException { this.writeOptional(writer, Attribute.MODE, cache, ModelKeys.MODE); this.writeOptional(writer, Attribute.REMOTE_TIMEOUT, cache, ModelKeys.REMOTE_TIMEOUT); } private void processCommonCacheConfigurationAttributesElements(XMLExtendedStreamWriter writer, ModelNode cache) throws XMLStreamException { this.writeOptional(writer, Attribute.CONFIGURATION, cache, ModelKeys.CONFIGURATION); this.writeOptional(writer, Attribute.START, cache, ModelKeys.START); this.writeOptional(writer, Attribute.BATCHING, cache, ModelKeys.BATCHING); this.writeOptional(writer, Attribute.JNDI_NAME, cache, ModelKeys.JNDI_NAME); this.writeOptional(writer, Attribute.MODULE, cache, ModelKeys.MODULE); this.writeOptional(writer, Attribute.SIMPLE_CACHE, cache, ModelKeys.SIMPLE_CACHE); this.writeOptional(writer, Attribute.STATISTICS, cache, ModelKeys.STATISTICS); this.writeOptional(writer, Attribute.STATISTICS_AVAILABLE, cache, ModelKeys.STATISTICS_AVAILABLE); if (cache.get(ModelKeys.BACKUP).isDefined()) { writer.writeStartElement(Element.BACKUPS.getLocalName()); for (Property property : cache.get(ModelKeys.BACKUP).asPropertyList()) { writer.writeStartElement(Element.BACKUP.getLocalName()); writer.writeAttribute(Attribute.SITE.getLocalName(), property.getName()); ModelNode backup = property.getValue(); BackupSiteConfigurationResource.FAILURE_POLICY.marshallAsAttribute(backup, writer); BackupSiteConfigurationResource.STRATEGY.marshallAsAttribute(backup, writer); BackupSiteConfigurationResource.REPLICATION_TIMEOUT.marshallAsAttribute(backup, writer); BackupSiteConfigurationResource.ENABLED.marshallAsAttribute(backup, writer); if (backup.hasDefined(ModelKeys.TAKE_BACKUP_OFFLINE_AFTER_FAILURES) || backup.hasDefined(ModelKeys.TAKE_BACKUP_OFFLINE_MIN_WAIT)) { writer.writeStartElement(Element.TAKE_OFFLINE.getLocalName()); BackupSiteConfigurationResource.TAKE_OFFLINE_AFTER_FAILURES.marshallAsAttribute(backup, writer); BackupSiteConfigurationResource.TAKE_OFFLINE_MIN_WAIT.marshallAsAttribute(backup, writer); writer.writeEndElement(); } if (backup.get(ModelKeys.STATE_TRANSFER, ModelKeys.STATE_TRANSFER_NAME).isDefined()) { ModelNode stateTransfer = backup.get(ModelKeys.STATE_TRANSFER, ModelKeys.STATE_TRANSFER_NAME); if (stateTransfer.hasDefined(ModelKeys.CHUNK_SIZE) || stateTransfer.hasDefined(ModelKeys.TIMEOUT) || stateTransfer.hasDefined(ModelKeys.MAX_RETRIES) || stateTransfer.hasDefined(ModelKeys.WAIT_TIME)) { writer.writeStartElement(Element.STATE_TRANSFER.getLocalName()); BackupSiteStateTransferConfigurationResource.STATE_TRANSFER_CHUNK_SIZE.marshallAsAttribute(stateTransfer, writer); BackupSiteStateTransferConfigurationResource.STATE_TRANSFER_TIMEOUT.marshallAsAttribute(stateTransfer, writer); BackupSiteStateTransferConfigurationResource.STATE_TRANSFER_MAX_RETRIES.marshallAsAttribute(stateTransfer, writer); BackupSiteStateTransferConfigurationResource.STATE_TRANSFER_WAIT_TIME.marshallAsAttribute(stateTransfer, writer); writer.writeEndElement(); } } writer.writeEndElement(); } writer.writeEndElement(); } ModelNode dataType = cache.get(ModelKeys.ENCODING); if (dataType.isDefined()) { writer.writeStartElement(Element.DATA_TYPE.getLocalName()); ModelNode key = dataType.get(ModelKeys.KEY); if (key.isDefined()) { writer.writeStartElement(Element.KEY.getLocalName()); this.writeOptional(writer, Attribute.MEDIA_TYPE, key, ModelKeys.MEDIA_TYPE); writer.writeEndElement(); } ModelNode value = dataType.get(ModelKeys.VALUE); if (value.isDefined()) { writer.writeStartElement(Element.VALUE.getLocalName()); this.writeOptional(writer, Attribute.MEDIA_TYPE, value, ModelKeys.MEDIA_TYPE); writer.writeEndElement(); } writer.writeEndElement(); } if (cache.get(ModelKeys.REMOTE_CACHE).isDefined() || cache.get(ModelKeys.REMOTE_SITE).isDefined()) { writer.writeStartElement(Element.BACKUP_FOR.getLocalName()); CacheConfigurationResource.REMOTE_CACHE.marshallAsAttribute(cache, writer); CacheConfigurationResource.REMOTE_SITE.marshallAsAttribute(cache, writer); writer.writeEndElement(); } if (cache.get(ModelKeys.LOCKING, ModelKeys.LOCKING_NAME).isDefined()) { writer.writeStartElement(Element.LOCKING.getLocalName()); ModelNode locking = cache.get(ModelKeys.LOCKING, ModelKeys.LOCKING_NAME); this.writeOptional(writer, Attribute.ISOLATION, locking, ModelKeys.ISOLATION); this.writeOptional(writer, Attribute.STRIPING, locking, ModelKeys.STRIPING); this.writeOptional(writer, Attribute.ACQUIRE_TIMEOUT, locking, ModelKeys.ACQUIRE_TIMEOUT); this.writeOptional(writer, Attribute.CONCURRENCY_LEVEL, locking, ModelKeys.CONCURRENCY_LEVEL); writer.writeEndElement(); } if (cache.get(ModelKeys.TRANSACTION, ModelKeys.TRANSACTION_NAME).isDefined()) { writer.writeStartElement(Element.TRANSACTION.getLocalName()); ModelNode transaction = cache.get(ModelKeys.TRANSACTION, ModelKeys.TRANSACTION_NAME); this.writeOptional(writer, Attribute.STOP_TIMEOUT, transaction, ModelKeys.STOP_TIMEOUT); this.writeOptional(writer, Attribute.MODE, transaction, ModelKeys.MODE); this.writeOptional(writer, Attribute.LOCKING, transaction, ModelKeys.LOCKING); this.writeOptional(writer, Attribute.NOTIFICATIONS, transaction, ModelKeys.NOTIFICATIONS); writer.writeEndElement(); } ModelNode memory = cache.get(ModelKeys.MEMORY); if (memory.isDefined()) { ModelNode memoryValues; writer.writeStartElement(Element.MEMORY.getLocalName()); if ((memoryValues = memory.get(ModelKeys.BINARY_NAME)).isDefined()) { writer.writeStartElement(Element.BINARY.getLocalName()); this.writeOptional(writer, Attribute.SIZE, memoryValues, ModelKeys.SIZE); this.writeOptional(writer, Attribute.STRATEGY, memoryValues, ModelKeys.STRATEGY); this.writeOptional(writer, Attribute.EVICTION, memoryValues, ModelKeys.EVICTION); writer.writeEndElement(); } else if ((memoryValues = memory.get(ModelKeys.OBJECT_NAME)).isDefined()) { writer.writeStartElement(Element.OBJECT.getLocalName()); this.writeOptional(writer, Attribute.SIZE, memoryValues, ModelKeys.SIZE); this.writeOptional(writer, Attribute.STRATEGY, memoryValues, ModelKeys.STRATEGY); writer.writeEndElement(); } else if ((memoryValues = memory.get(ModelKeys.OFF_HEAP_NAME)).isDefined()) { writer.writeStartElement(Element.OFF_HEAP.getLocalName()); this.writeOptional(writer, Attribute.SIZE, memoryValues, ModelKeys.SIZE); this.writeOptional(writer, Attribute.STRATEGY, memoryValues, ModelKeys.STRATEGY); this.writeOptional(writer, Attribute.EVICTION, memoryValues, ModelKeys.EVICTION); this.writeOptional(writer, Attribute.ADDRESS_COUNT, memoryValues, ModelKeys.ADDRESS_COUNT); writer.writeEndElement(); } writer.writeEndElement(); } if (cache.get(ModelKeys.EXPIRATION, ModelKeys.EXPIRATION_NAME).isDefined()) { writer.writeStartElement(Element.EXPIRATION.getLocalName()); ModelNode expiration = cache.get(ModelKeys.EXPIRATION, ModelKeys.EXPIRATION_NAME); this.writeOptional(writer, Attribute.MAX_IDLE, expiration, ModelKeys.MAX_IDLE); this.writeOptional(writer, Attribute.LIFESPAN, expiration, ModelKeys.LIFESPAN); this.writeOptional(writer, Attribute.INTERVAL, expiration, ModelKeys.INTERVAL); writer.writeEndElement(); } if (cache.get(ModelKeys.COMPATIBILITY).isDefined()) { ModelNode compatibility = cache.get(ModelKeys.COMPATIBILITY, ModelKeys.COMPATIBILITY_NAME); writer.writeStartElement(Element.COMPATIBILITY.getLocalName()); CompatibilityConfigurationResource.ENABLED.marshallAsAttribute(compatibility, writer); CompatibilityConfigurationResource.MARSHALLER.marshallAsAttribute(compatibility, writer); writer.writeEndElement(); } if (cache.hasDefined(ModelKeys.SECURITY)) { writer.writeStartElement(Element.SECURITY.getLocalName()); ModelNode security = cache.get(ModelKeys.SECURITY, ModelKeys.SECURITY_NAME); if (security.hasDefined(ModelKeys.AUTHORIZATION)) { writer.writeStartElement(Element.AUTHORIZATION.getLocalName()); ModelNode authorization = security.get(ModelKeys.AUTHORIZATION, ModelKeys.AUTHORIZATION_NAME); CacheAuthorizationConfigurationResource.ENABLED.marshallAsAttribute(authorization, writer); this.writeListAsAttribute(writer, Attribute.ROLES, authorization, ModelKeys.ROLES); writer.writeEndElement(); } writer.writeEndElement(); } ModelNode persistence = cache.get(ModelKeys.PERSISTENCE, ModelKeys.PERSISTENCE_NAME); String[] attrStrings = Arrays.stream(PersistenceConfigurationResource.ATTRIBUTES).map(AttributeDefinition::getName).toArray(String[]::new); boolean attrOrElementExists = Stream.of(attrStrings, PersistenceConfigurationResource.LOADER_KEYS, PersistenceConfigurationResource.STORE_KEYS) .flatMap(Arrays::stream) .anyMatch(persistence::hasDefined); // If attribute exists, or a child element exists then we must write the persistence element if (attrOrElementExists) { writer.writeStartElement(ModelKeys.PERSISTENCE); this.writeOptional(writer, Attribute.AVAILABILITY_INTERVAL, persistence, ModelKeys.AVAILABILITY_INTERVAL); this.writeOptional(writer, Attribute.CONNECTION_ATTEMPTS, persistence, ModelKeys.CONNECTION_ATTEMPTS); this.writeOptional(writer, Attribute.CONNECTION_INTERVAL, persistence, ModelKeys.CONNECTION_INTERVAL); this.writeOptional(writer, Attribute.PASSIVATION, persistence, ModelKeys.PASSIVATION); writePersistence(writer, persistence); writer.writeEndElement(); } if (cache.get(ModelKeys.INDEXING, ModelKeys.INDEXING_NAME).isDefined()) { ModelNode indexing = cache.get(ModelKeys.INDEXING, ModelKeys.INDEXING_NAME); writer.writeStartElement(Element.INDEXING.getLocalName()); IndexingConfigurationResource.INDEXING.marshallAsAttribute(indexing, writer); IndexingConfigurationResource.INDEXING_AUTO_CONFIG.marshallAsAttribute(indexing, writer); if (indexing.get(ModelKeys.KEY_TRANSFORMERS).isDefined()) { writer.writeStartElement(Element.KEY_TRANSFORMERS.getLocalName()); IndexingConfigurationResource.KEY_TRANSFORMERS.marshallAsElement(indexing, writer); writer.writeEndElement(); } if (indexing.get(ModelKeys.INDEXED_ENTITIES).isDefined()) { writer.writeStartElement(Element.INDEXED_ENTITIES.getLocalName()); IndexingConfigurationResource.INDEXED_ENTITIES.marshallAsElement(indexing, writer); writer.writeEndElement(); } IndexingConfigurationResource.INDEXING_PROPERTIES.marshallAsElement(indexing, writer); writer.writeEndElement(); } if (cache.get(ModelKeys.STATE_TRANSFER, ModelKeys.STATE_TRANSFER_NAME).isDefined()) { ModelNode stateTransfer = cache.get(ModelKeys.STATE_TRANSFER, ModelKeys.STATE_TRANSFER_NAME); writer.writeStartElement(Element.STATE_TRANSFER.getLocalName()); this.writeOptional(writer, Attribute.AWAIT_INITIAL_TRANSFER, stateTransfer, ModelKeys.AWAIT_INITIAL_TRANSFER); this.writeOptional(writer, Attribute.ENABLED, stateTransfer, ModelKeys.ENABLED); this.writeOptional(writer, Attribute.TIMEOUT, stateTransfer, ModelKeys.TIMEOUT); this.writeOptional(writer, Attribute.CHUNK_SIZE, stateTransfer, ModelKeys.CHUNK_SIZE); writer.writeEndElement(); } if (cache.get(ModelKeys.PARTITION_HANDLING, ModelKeys.PARTITION_HANDLING_NAME).isDefined()) { ModelNode partitionHandling = cache.get(ModelKeys.PARTITION_HANDLING, ModelKeys.PARTITION_HANDLING_NAME); writer.writeStartElement(Element.PARTITION_HANDLING.getLocalName()); this.writeOptional(writer, Attribute.WHEN_SPLIT, partitionHandling, ModelKeys.WHEN_SPLIT); this.writeOptional(writer, Attribute.MERGE_POLICY, partitionHandling, ModelKeys.MERGE_POLICY); writer.writeEndElement(); } } private void writePersistence(XMLExtendedStreamWriter writer, ModelNode cache) throws XMLStreamException { if (cache.get(ModelKeys.LOADER).isDefined()) { for (Property clusterLoaderEntry : cache.get(ModelKeys.LOADER).asPropertyList()) { ModelNode loader = clusterLoaderEntry.getValue(); writer.writeStartElement(Element.LOADER.getLocalName()); // write identifier before other attributes ModelNode name = new ModelNode(); name.get(ModelKeys.NAME).set(clusterLoaderEntry.getName()); LoaderConfigurationResource.NAME.marshallAsAttribute(name, false, writer); this.writeRequired(writer, Attribute.CLASS, loader, ModelKeys.CLASS); this.writeLoaderAttributes(writer, loader); this.writeStoreProperties(writer, loader); writer.writeEndElement(); } } if (cache.get(ModelKeys.CLUSTER_LOADER).isDefined()) { for (Property clusterLoaderEntry : cache.get(ModelKeys.CLUSTER_LOADER).asPropertyList()) { ModelNode loader = clusterLoaderEntry.getValue(); writer.writeStartElement(Element.CLUSTER_LOADER.getLocalName()); // write identifier before other attributes ModelNode name = new ModelNode(); name.get(ModelKeys.NAME).set(clusterLoaderEntry.getName()); ClusterLoaderConfigurationResource.NAME.marshallAsAttribute(name, false, writer); this.writeOptional(writer, Attribute.REMOTE_TIMEOUT, loader, ModelKeys.REMOTE_TIMEOUT); this.writeLoaderAttributes(writer, loader); this.writeStoreProperties(writer, loader); writer.writeEndElement(); } } if (cache.get(ModelKeys.STORE).isDefined()) { for (Property storeEntry : cache.get(ModelKeys.STORE).asPropertyList()) { ModelNode store = storeEntry.getValue(); writer.writeStartElement(Element.STORE.getLocalName()); this.writeRequired(writer, Attribute.CLASS, store, ModelKeys.CLASS); this.writeStoreAttributes(writer, store); this.writeStoreWriteBehind(writer, store); this.writeStoreProperties(writer, store); writer.writeEndElement(); } } if (cache.get(ModelKeys.FILE_STORE).isDefined()) { for (Property fileStoreEntry : cache.get(ModelKeys.FILE_STORE).asPropertyList()) { ModelNode store = fileStoreEntry.getValue(); writer.writeStartElement(Element.FILE_STORE.getLocalName()); this.writeOptional(writer, Attribute.MAX_ENTRIES, store, ModelKeys.MAX_ENTRIES); this.writeOptional(writer, Attribute.RELATIVE_TO, store, ModelKeys.RELATIVE_TO); this.writeOptional(writer, Attribute.PATH, store, ModelKeys.PATH); this.writeStoreAttributes(writer, store); this.writeStoreWriteBehind(writer, store); this.writeStoreProperties(writer, store); writer.writeEndElement(); } } if (cache.get(ModelKeys.STRING_KEYED_JDBC_STORE).isDefined()) { for (Property stringKeyedJDBCStoreEntry : cache.get(ModelKeys.STRING_KEYED_JDBC_STORE).asPropertyList()) { ModelNode store = stringKeyedJDBCStoreEntry.getValue(); writer.writeStartElement(Element.STRING_KEYED_JDBC_STORE.getLocalName()); this.writeJdbcStoreAttributes(writer, store); this.writeStoreWriteBehind(writer, store); this.writeStoreProperties(writer, store); this.writeJDBCStoreTable(writer, Element.STRING_KEYED_TABLE, store, ModelKeys.STRING_KEYED_TABLE); writer.writeEndElement(); } } if (cache.get(ModelKeys.ROCKSDB_STORE).isDefined()) { for (Property rocksDbStoreEntry : cache.get(ModelKeys.ROCKSDB_STORE).asPropertyList()) { ModelNode store = rocksDbStoreEntry.getValue(); writer.writeStartElement(Element.ROCKSDB_STORE.getLocalName()); this.writeOptional(writer, Attribute.RELATIVE_TO, store, ModelKeys.RELATIVE_TO); this.writeOptional(writer, Attribute.PATH, store, ModelKeys.PATH); this.writeOptional(writer, Attribute.BLOCK_SIZE, store, ModelKeys.BLOCK_SIZE); this.writeOptional(writer, Attribute.CACHE_SIZE, store, ModelKeys.CACHE_SIZE); this.writeOptional(writer, Attribute.CLEAR_THRESHOLD, store, ModelKeys.CLEAR_THRESHOLD); this.writeStoreAttributes(writer, store); this.writeStoreWriteBehind(writer, store); this.writeRocksDBStoreExpiration(writer, store); this.writeRocksDBStoreCompression(writer, store); this.writeStoreProperties(writer, store); writer.writeEndElement(); } } if (cache.get(ModelKeys.REMOTE_STORE).isDefined()) { for (Property remoteStoreEntry : cache.get(ModelKeys.REMOTE_STORE).asPropertyList()) { ModelNode store = remoteStoreEntry.getValue(); writer.writeStartElement(Element.REMOTE_STORE.getLocalName()); this.writeOptional(writer, Attribute.CACHE, store, ModelKeys.CACHE); this.writeOptional(writer, Attribute.HOTROD_WRAPPING, store, ModelKeys.HOTROD_WRAPPING); this.writeOptional(writer, Attribute.RAW_VALUES, store, ModelKeys.RAW_VALUES); this.writeOptional(writer, Attribute.SOCKET_TIMEOUT, store, ModelKeys.SOCKET_TIMEOUT); this.writeOptional(writer, Attribute.TCP_NO_DELAY, store, ModelKeys.TCP_NO_DELAY); this.writeOptional(writer, Attribute.PROTOCOL_VERSION, store, ModelKeys.PROTOCOL_VERSION); this.writeStoreAttributes(writer, store); this.writeStoreWriteBehind(writer, store); this.writeStoreProperties(writer, store); for (ModelNode remoteServer: store.require(ModelKeys.REMOTE_SERVERS).asList()) { writer.writeStartElement(Element.REMOTE_SERVER.getLocalName()); writer.writeAttribute(Attribute.OUTBOUND_SOCKET_BINDING.getLocalName(), remoteServer.get(ModelKeys.OUTBOUND_SOCKET_BINDING).asString()); writer.writeEndElement(); } if (store.get(ModelKeys.AUTHENTICATION, ModelKeys.AUTHENTICATION_NAME).isDefined()) { ModelNode authentication = store.get(ModelKeys.AUTHENTICATION, ModelKeys.AUTHENTICATION_NAME); writer.writeStartElement(Element.AUTHENTICATION.getLocalName()); switch(authentication.get(ModelKeys.MECHANISM).asString()) { case "PLAIN": { writer.writeStartElement(Element.PLAIN.getLocalName()); this.writeRequired(writer, Attribute.USERNAME, authentication, ModelKeys.USERNAME); this.writeRequired(writer, Attribute.PASSWORD, authentication, ModelKeys.PASSWORD); writer.writeEndElement(); break; } case "DIGEST-MD5": { writer.writeStartElement(Element.DIGEST.getLocalName()); this.writeRequired(writer, Attribute.USERNAME, authentication, ModelKeys.USERNAME); this.writeRequired(writer, Attribute.PASSWORD, authentication, ModelKeys.PASSWORD); this.writeRequired(writer, Attribute.REALM, authentication, ModelKeys.REALM); this.writeOptional(writer, Attribute.SERVER_NAME, authentication, ModelKeys.SERVER_NAME); writer.writeEndElement(); break; } case "EXTERNAL": { writer.writeEmptyElement(Element.EXTERNAL.getLocalName()); break; } } writer.writeEndElement(); } if (store.get(ModelKeys.ENCRYPTION, ModelKeys.ENCRYPTION_NAME).isDefined()) { ModelNode encryption = store.get(ModelKeys.ENCRYPTION, ModelKeys.ENCRYPTION_NAME); writer.writeStartElement(Element.ENCRYPTION.getLocalName()); this.writeRequired(writer, Attribute.SECURITY_REALM, encryption, ModelKeys.SECURITY_REALM); this.writeOptional(writer, Attribute.SNI_HOSTNAME, encryption, ModelKeys.SNI_HOSTNAME); writer.writeEndElement(); } writer.writeEndElement(); } } if (cache.get(ModelKeys.REST_STORE).isDefined()) { for (Property restStoreEntry : cache.get(ModelKeys.REST_STORE).asPropertyList()) { ModelNode store = restStoreEntry.getValue(); writer.writeStartElement(Element.REST_STORE.getLocalName()); this.writeOptional(writer, Attribute.CACHE_NAME, store, ModelKeys.CACHE_NAME); this.writeStoreAttributes(writer, store); this.writeStoreWriteBehind(writer, store); this.writeStoreProperties(writer, store); for (ModelNode remoteServer: store.require(ModelKeys.REMOTE_SERVERS).asList()) { writer.writeStartElement(Element.REMOTE_SERVER.getLocalName()); writer.writeAttribute(Attribute.OUTBOUND_SOCKET_BINDING.getLocalName(), remoteServer.get(ModelKeys.OUTBOUND_SOCKET_BINDING).asString()); writer.writeEndElement(); } if (store.hasDefined(ModelKeys.CONNECTION_POOL)) { ModelNode pool = store.get(ModelKeys.CONNECTION_POOL); writer.writeStartElement(Element.CONNECTION_POOL.getLocalName()); this.writeOptional(writer, Attribute.CONNECTION_TIMEOUT, pool, ModelKeys.CONNECTION_TIMEOUT); this.writeOptional(writer, Attribute.MAX_CONNECTIONS_PER_HOST, pool, ModelKeys.MAX_CONNECTIONS_PER_HOST); this.writeOptional(writer, Attribute.MAX_TOTAL_CONNECTIONS, pool, ModelKeys.MAX_TOTAL_CONNECTIONS); this.writeOptional(writer, Attribute.BUFFER_SIZE, pool, ModelKeys.BUFFER_SIZE); this.writeOptional(writer, Attribute.SOCKET_TIMEOUT, pool, ModelKeys.SOCKET_TIMEOUT); this.writeOptional(writer, Attribute.TCP_NO_DELAY, pool, ModelKeys.TCP_NO_DELAY); writer.writeEndElement(); } writer.writeEndElement(); } } if (cache.get(ModelKeys.SOFT_INDEX_FILE_STORE).isDefined()) { for (Property softIndexEntry : cache.get(ModelKeys.SOFT_INDEX_FILE_STORE).asPropertyList()) { ModelNode store = softIndexEntry.getValue(); writer.writeStartElement(Element.SOFT_INDEX_FILE_STORE.getLocalName()); this.writeOptional(writer, Attribute.COMPACTION_THRESHOLD, store, ModelKeys.COMPACTION_THRESHOLD); this.writeOptional(writer, Attribute.OPEN_FILES_LIMIT, store, ModelKeys.OPEN_FILES_LIMIT); this.writeStoreAttributes(writer, store); if (store.hasDefined(ModelKeys.DATA)) { ModelNode data = store.get(SoftIndexConfigurationResource.DATA_PATH.getKeyValuePair()); writer.writeStartElement(Element.DATA.getLocalName()); this.writeOptional(writer, Attribute.PATH, data, ModelKeys.PATH); this.writeOptional(writer, Attribute.MAX_FILE_SIZE, data, ModelKeys.MAX_FILE_SIZE); this.writeOptional(writer, Attribute.SYNC_WRITES, data, ModelKeys.SYNC_WRITES); writer.writeEndElement(); } if (store.hasDefined(ModelKeys.INDEX)) { ModelNode index = store.get(SoftIndexConfigurationResource.INDEX_PATH.getKeyValuePair()); writer.writeStartElement(Element.INDEX.getLocalName()); this.writeOptional(writer, Attribute.PATH, index, ModelKeys.PATH); this.writeOptional(writer, Attribute.MAX_NODE_SIZE, index, ModelKeys.MAX_NODE_SIZE); this.writeOptional(writer, Attribute.MIN_NODE_SIZE, index, ModelKeys.MIN_NODE_SIZE); this.writeOptional(writer, Attribute.MAX_QUEUE_LENGTH, index, ModelKeys.MAX_QUEUE_LENGTH); this.writeOptional(writer, Attribute.SEGMENTS, index, ModelKeys.SEGMENTS); writer.writeEndElement(); } this.writeStoreWriteBehind(writer, store); this.writeStoreProperties(writer, store); writer.writeEndElement(); } } } private void writeListAsAttribute(XMLExtendedStreamWriter writer, Attribute attribute, ModelNode node, String key) throws XMLStreamException { if (node.hasDefined(key)) { StringBuilder result = new StringBuilder() ; ModelNode list = node.get(key); if (list.isDefined() && list.getType() == ModelType.LIST) { List<ModelNode> nodeList = list.asList(); for (int i = 0; i < nodeList.size(); i++) { result.append(nodeList.get(i).asString()); if (i < nodeList.size()-1) { result.append(" "); } } writer.writeAttribute(attribute.getLocalName(), result.toString()); } } } private void writeJDBCStoreTable(XMLExtendedStreamWriter writer, Element element, ModelNode store, String key) throws XMLStreamException { if (store.hasDefined(key)) { ModelNode table = store.get(key); writer.writeStartElement(element.getLocalName()); this.writeOptional(writer, Attribute.PREFIX, table, ModelKeys.PREFIX); this.writeOptional(writer, Attribute.BATCH_SIZE, table, ModelKeys.BATCH_SIZE); this.writeOptional(writer, Attribute.FETCH_SIZE, table, ModelKeys.FETCH_SIZE); this.writeOptional(writer, Attribute.CREATE_ON_START, table, ModelKeys.CREATE_ON_START); this.writeOptional(writer, Attribute.DROP_ON_EXIT, table, ModelKeys.DROP_ON_EXIT); this.writeJDBCStoreColumn(writer, Element.ID_COLUMN, table, ModelKeys.ID_COLUMN); this.writeJDBCStoreColumn(writer, Element.DATA_COLUMN, table, ModelKeys.DATA_COLUMN); this.writeJDBCStoreColumn(writer, Element.TIMESTAMP_COLUMN, table, ModelKeys.TIMESTAMP_COLUMN); writer.writeEndElement(); } } private void writeJDBCStoreColumn(XMLExtendedStreamWriter writer, Element element, ModelNode table, String key) throws XMLStreamException { if (table.hasDefined(key)) { ModelNode column = table.get(key); writer.writeStartElement(element.getLocalName()); this.writeOptional(writer, Attribute.NAME, column, ModelKeys.NAME); this.writeOptional(writer, Attribute.TYPE, column, ModelKeys.TYPE); writer.writeEndElement(); } } private void writeLoaderAttributes(XMLExtendedStreamWriter writer, ModelNode store) throws XMLStreamException { this.writeOptional(writer, Attribute.SHARED, store, ModelKeys.SHARED); this.writeOptional(writer, Attribute.PRELOAD, store, ModelKeys.PRELOAD); } private void writeJdbcStoreAttributes(XMLExtendedStreamWriter writer, ModelNode store) throws XMLStreamException { this.writeRequired(writer, Attribute.DATASOURCE, store, ModelKeys.DATASOURCE); this.writeOptional(writer, Attribute.DB_MAJOR_VERSION, store, ModelKeys.DB_MAJOR_VERSION); this.writeOptional(writer, Attribute.DB_MINOR_VERSION, store, ModelKeys.DB_MINOR_VERSION); this.writeOptional(writer, Attribute.DIALECT, store, ModelKeys.DIALECT); this.writeStoreAttributes(writer, store); } private void writeStoreAttributes(XMLExtendedStreamWriter writer, ModelNode store) throws XMLStreamException { this.writeOptional(writer, Attribute.NAME, store, ModelKeys.NAME); this.writeOptional(writer, Attribute.SHARED, store, ModelKeys.SHARED); this.writeOptional(writer, Attribute.PRELOAD, store, ModelKeys.PRELOAD); this.writeOptional(writer, Attribute.FETCH_STATE, store, ModelKeys.FETCH_STATE); this.writeOptional(writer, Attribute.PURGE, store, ModelKeys.PURGE); this.writeOptional(writer, Attribute.READ_ONLY, store, ModelKeys.READ_ONLY); this.writeOptional(writer, Attribute.MAX_BATCH_SIZE, store, ModelKeys.MAX_BATCH_SIZE); } private void writeStoreWriteBehind(XMLExtendedStreamWriter writer, ModelNode store) throws XMLStreamException { if (store.get(ModelKeys.WRITE_BEHIND, ModelKeys.WRITE_BEHIND_NAME).isDefined()) { ModelNode writeBehind = store.get(ModelKeys.WRITE_BEHIND, ModelKeys.WRITE_BEHIND_NAME); writer.writeStartElement(Element.WRITE_BEHIND.getLocalName()); this.writeOptional(writer, Attribute.MODIFICATION_QUEUE_SIZE, writeBehind, ModelKeys.MODIFICATION_QUEUE_SIZE); this.writeOptional(writer, Attribute.THREAD_POOL_SIZE, writeBehind, ModelKeys.THREAD_POOL_SIZE); writer.writeEndElement(); } } private void writeStoreProperties(XMLExtendedStreamWriter writer, ModelNode store) throws XMLStreamException { if (store.hasDefined(ModelKeys.PROPERTY)) { // the format of the property elements // "property" => { // "relative-to" => {"value" => "fred"}, // } for (Property property: store.get(ModelKeys.PROPERTY).asPropertyList()) { writer.writeStartElement(Element.PROPERTY.getLocalName()); writer.writeAttribute(Attribute.NAME.getLocalName(), property.getName()); Property complexValue = property.getValue().asProperty(); writer.writeCharacters(complexValue.getValue().asString()); writer.writeEndElement(); } } } private void writeRocksDBStoreExpiration(XMLExtendedStreamWriter writer, ModelNode store) throws XMLStreamException { if (store.get(ModelKeys.EXPIRATION, ModelKeys.EXPIRATION_NAME).isDefined()) { ModelNode expiration = store.get(ModelKeys.EXPIRATION, ModelKeys.EXPIRATION_NAME); writer.writeStartElement(Element.EXPIRATION.getLocalName()); this.writeOptional(writer, Attribute.PATH, expiration, ModelKeys.PATH); this.writeOptional(writer, Attribute.RELATIVE_TO, expiration, ModelKeys.RELATIVE_TO); this.writeOptional(writer, Attribute.QUEUE_SIZE, expiration, ModelKeys.QUEUE_SIZE); writer.writeEndElement(); } } private void writeRocksDBStoreCompression(XMLExtendedStreamWriter writer, ModelNode store) throws XMLStreamException { if (store.get(ModelKeys.COMPRESSION, ModelKeys.COMPRESSION_NAME).isDefined()) { ModelNode compression = store.get(ModelKeys.COMPRESSION, ModelKeys.COMPRESSION_NAME); writer.writeStartElement(Element.COMPRESSION.getLocalName()); this.writeOptional(writer, Attribute.TYPE, compression, ModelKeys.TYPE); writer.writeEndElement(); } } private void writeOptional(XMLExtendedStreamWriter writer, Attribute attribute, ModelNode model, String key) throws XMLStreamException { if (model.hasDefined(key)) { writer.writeAttribute(attribute.getLocalName(), model.get(key).asString()); } } private void writeRequired(XMLExtendedStreamWriter writer, Attribute attribute, ModelNode model, String key) throws XMLStreamException { writer.writeAttribute(attribute.getLocalName(), model.require(key).asString()); } private static boolean hasDefined(ModelNode model, Iterable<? extends AttributeDefinition> attributes) { for (AttributeDefinition attribute : attributes) { if (model.hasDefined(attribute.getName())) return true; } return false; } private static void writeAttributes(XMLExtendedStreamWriter writer, ModelNode model, Iterable<? extends AttributeDefinition> attributes) throws XMLStreamException { for (AttributeDefinition attribute : attributes) { writeAttribute(writer, model, attribute); } } private static void writeAttribute(XMLExtendedStreamWriter writer, ModelNode model, AttributeDefinition attribute) throws XMLStreamException { attribute.getMarshaller().marshallAsAttribute(attribute, model, true, writer); } }
// Copyright (c) 2014 Readium Foundation and/or its licensees. All rights reserved. // Redistribution and use in source and binary forms, with or without modification, // are permitted provided that the following conditions are met: // 1. Redistributions of source code must retain the above copyright notice, this // list of conditions and the following disclaimer. // 2. Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation and/or // other materials provided with the distribution. // 3. Neither the name of the organization nor the names of its contributors may be // used to endorse or promote products derived from this software without specific // prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. // IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, // INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, // BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE // OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED // OF THE POSSIBILITY OF SUCH DAMAGE package org.readium.sdk.android.launcher; import org.readium.sdk.android.components.navigation.NavigationTable; public class ListOfLandmarksActivity extends NavigationTableActivity { protected NavigationTable getNavigationTable() { NavigationTable navigationTable = null; if (pckg != null) { navigationTable = pckg.getListOfLandmarks(); } return (navigationTable != null) ? navigationTable : new NavigationTable("landmarks", "", ""); } }
package com.javarush.task.task09.task0911; import java.util.HashMap; /* Исключение при работе с коллекциями Map */ public class Solution { public static void main(String[] args) { try{ HashMap<String, String> map = new HashMap<String, String>(null); map.put(null, null); map.remove(null); } catch (NullPointerException e) { System.out.println(e.getClass()); } } }
/* * Copyright (c) 2020 GeekXYZ. * All rights reserved. */ package io.geekshop.custom.graphql; import io.geekshop.common.Constant; import io.geekshop.mapper.*; import io.geekshop.resolver.dataloader.*; import io.geekshop.types.address.Address; import io.geekshop.types.administrator.Administrator; import io.geekshop.types.asset.Asset; import io.geekshop.types.collection.Collection; import io.geekshop.types.customer.Customer; import io.geekshop.types.customer.CustomerGroup; import io.geekshop.types.facet.Facet; import io.geekshop.types.facet.FacetValue; import io.geekshop.types.order.Fulfillment; import io.geekshop.types.order.Order; import io.geekshop.types.order.OrderItem; import io.geekshop.types.order.OrderLine; import io.geekshop.types.payment.Payment; import io.geekshop.types.payment.Refund; import io.geekshop.types.product.Product; import io.geekshop.types.product.ProductOption; import io.geekshop.types.product.ProductOptionGroup; import io.geekshop.types.product.ProductVariant; import io.geekshop.types.promotion.Promotion; import io.geekshop.types.role.Role; import io.geekshop.types.shipping.ShippingMethod; import io.geekshop.types.user.AuthenticationMethod; import io.geekshop.types.user.User; import graphql.kickstart.execution.context.DefaultGraphQLContext; import graphql.kickstart.execution.context.GraphQLContext; import graphql.kickstart.servlet.context.DefaultGraphQLServletContext; import graphql.kickstart.servlet.context.DefaultGraphQLWebSocketContext; import graphql.kickstart.servlet.context.GraphQLServletContextBuilder; import lombok.RequiredArgsConstructor; import org.dataloader.DataLoader; import org.dataloader.DataLoaderRegistry; import org.springframework.stereotype.Component; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.websocket.Session; import javax.websocket.server.HandshakeRequest; import java.util.List; /** * Created on Nov, 2020 by @author bobo */ @Component @RequiredArgsConstructor public class CustomGraphQLContextBuilder implements GraphQLServletContextBuilder { private final UserEntityMapper userEntityMapper; private final AdministratorEntityMapper administratorEntityMapper; private final CustomerEntityMapper customerEntityMapper; private final AddressEntityMapper addressEntityMapper; private final CustomerGroupJoinEntityMapper customerGroupJoinEntityMapper; private final CustomerGroupEntityMapper customerGroupEntityMapper; private final UserRoleJoinEntityMapper userRoleJoinEntityMapper; private final RoleEntityMapper roleEntityMapper; private final AuthenticationMethodEntityMapper authenticationMethodEntityMapper; private final FacetEntityMapper facetEntityMapper; private final FacetValueEntityMapper facetValueEntityMapper; private final ProductOptionEntityMapper productOptionEntityMapper; private final ProductOptionGroupEntityMapper productOptionGroupEntityMapper; private final AssetEntityMapper assetEntityMapper; private final ProductAssetJoinEntityMapper productAssetJoinEntityMapper; private final ProductVariantEntityMapper productVariantEntityMapper; private final ProductOptionGroupJoinEntityMapper productOptionGroupJoinEntityMapper; private final ProductFacetValueJoinEntityMapper productFacetValueJoinEntityMapper; private final ProductEntityMapper productEntityMapper; private final ProductVariantAssetJoinEntityMapper productVariantAssetJoinEntityMapper; private final ProductVariantProductOptionJoinEntityMapper productVariantProductOptionJoinEntityMapper; private final ProductVariantFacetValueJoinEntityMapper productVariantFacetValueJoinEntityMapper; private final CollectionAssetJoinEntityMapper collectionAssetJoinEntityMapper; private final CollectionEntityMapper collectionEntityMapper; private final OrderEntityMapper orderEntityMapper; private final FulfillmentEntityMapper fulfillmentEntityMapper; private final OrderItemEntityMapper orderItemEntityMapper; private final OrderPromotionJoinEntityMapper orderPromotionJoinEntityMapper; private final PromotionEntityMapper promotionEntityMapper; private final PaymentEntityMapper paymentEntityMapper; private final ShippingMethodEntityMapper shippingMethodEntityMapper; private final OrderLineEntityMapper orderLineEntityMapper; private final RefundEntityMapper refundEntityMapper; @Override public GraphQLContext build(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse) { DefaultGraphQLServletContext defaultGraphQLServletContext = DefaultGraphQLServletContext.createServletContext(buildDataLoaderRegistry(), null) .with(httpServletRequest).with(httpServletResponse).build(); return new CustomGraphQLServletContext(defaultGraphQLServletContext); } @Override public GraphQLContext build(Session session, HandshakeRequest handshakeRequest) { return DefaultGraphQLWebSocketContext.createWebSocketContext(buildDataLoaderRegistry(), null) .with(session).with(handshakeRequest).build(); } @Override public GraphQLContext build() { return new DefaultGraphQLContext(buildDataLoaderRegistry(), null); } private DataLoaderRegistry buildDataLoaderRegistry() { DataLoaderRegistry dataLoaderRegistry = new DataLoaderRegistry(); DataLoader<Long, User> administratorUserDataLoader = DataLoader.newMappedDataLoader( new AdministratorUserDataLoader(this.userEntityMapper, this.administratorEntityMapper) ); dataLoaderRegistry.register(Constant.DATA_LOADER_NAME_ADMINISTRATOR_USER, administratorUserDataLoader); DataLoader<Long, Administrator> historyEntryAdministratorDataLoader = DataLoader.newMappedDataLoader( new HistoryEntryAdministratorDataLoader(this.administratorEntityMapper) ); dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_HISTORY_ENTRY_ADMINISTRATOR, historyEntryAdministratorDataLoader); DataLoader<Long, User> customerUserDataLoader = DataLoader.newMappedDataLoader( new CustomerUserDataLoader(this.userEntityMapper, this.customerEntityMapper) ); dataLoaderRegistry.register(Constant.DATA_LOADER_NAME_CUSTOMER_USER, customerUserDataLoader); DataLoader<Long, List<Address>> customerAddressesDataLoader = DataLoader.newMappedDataLoader( new CustomerAddressesDataLoader(this.addressEntityMapper) ); dataLoaderRegistry.register(Constant.DATA_LOADER_NAME_CUSTOMER_ADDRESSES, customerAddressesDataLoader); DataLoader<Long, List<CustomerGroup>> customerGroupDataLoader = DataLoader.newMappedDataLoader( new CustomerGroupsDataLoader(this.customerGroupJoinEntityMapper, this.customerGroupEntityMapper) ); dataLoaderRegistry.register(Constant.DATA_LOADER_NAME_CUSTOMER_GROUPS, customerGroupDataLoader); DataLoader<Long, List<Role>> userRolesDataLoader = DataLoader.newMappedDataLoader( new UserRolesDataLoader(this.userRoleJoinEntityMapper, this.roleEntityMapper) ); dataLoaderRegistry.register(Constant.DATA_LOADER_NAME_USER_ROLES, userRolesDataLoader); DataLoader<Long, List<AuthenticationMethod>> userAuthenticationMethodsDataLoader = DataLoader.newMappedDataLoader( new UserAuthenticationMethodsDataLoader(this.authenticationMethodEntityMapper)); dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_USER_AUTHENTICATION_METHODS, userAuthenticationMethodsDataLoader); DataLoader<Long, Facet> facetValueFacetDataLoader = DataLoader.newMappedDataLoader( new FacetValueFacetDataLoader((this.facetEntityMapper)) ); dataLoaderRegistry.register(Constant.DATA_LOADER_NAME_FACET_VALUE_FACET, facetValueFacetDataLoader); DataLoader<Long, List<FacetValue>> facetValuesDataLoader = DataLoader.newMappedDataLoader( new FacetValuesDataLoader(this.facetValueEntityMapper) ); dataLoaderRegistry.register(Constant.DATA_LOADER_NAME_FACET_VALUES, facetValuesDataLoader); DataLoader<Long, List<ProductOption>> productOptionsDataLoader = DataLoader.newMappedDataLoader( new ProductOptionsDataLoader(this.productOptionEntityMapper) ); dataLoaderRegistry.register(Constant.DATA_LOADER_NAME_PRODUCT_OPTIONS, productOptionsDataLoader); DataLoader<Long, ProductOptionGroup> productOptionGroupDataLoader = DataLoader.newMappedDataLoader( new ProductOptionGroupDataLoader(this.productOptionGroupEntityMapper) ); dataLoaderRegistry.register(Constant.DATA_LOADER_NAME_PRODUCT_OPTION_GROUP, productOptionGroupDataLoader); DataLoader<Long, Asset> productFeaturedAssetDataLoader = DataLoader.newMappedDataLoader( new FeaturedAssetDataLoader(this.assetEntityMapper) ) ; dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_PRODUCT_FEATURED_ASSET, productFeaturedAssetDataLoader ); DataLoader<Long, List<Asset>> productAssetsDataLoader = DataLoader.newMappedDataLoader( new ProductAssetsDataLoader(this.productAssetJoinEntityMapper, this.assetEntityMapper) ); dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_PRODUCT_ASSETS, productAssetsDataLoader ); DataLoader<Long, List<ProductVariant>> productVariantsDataLoader = DataLoader.newMappedDataLoader( new ProductVariantsDataLoader(this.productVariantEntityMapper) ); dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_PRODUCT_VARIANTS, productVariantsDataLoader ); DataLoader<Long, List<ProductOptionGroup>> productOptionGroupsDataLoader = DataLoader.newMappedDataLoader( new ProductOptionGroupsDataLoader( this.productOptionGroupJoinEntityMapper, this.productOptionGroupEntityMapper) ); dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_PRODUCT_OPTION_GROUPS, productOptionGroupsDataLoader ); DataLoader<Long, List<FacetValue>> productFacetValuesDataLoader = DataLoader.newMappedDataLoader( new ProductFacetValuesDataLoader( this.productFacetValueJoinEntityMapper, this.facetValueEntityMapper, this.facetEntityMapper) ); dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_PRODUCT_FACET_VALUES, productFacetValuesDataLoader ); DataLoader<Long, Asset> productVariantFeaturedAssetDataLoader = DataLoader.newMappedDataLoader( new FeaturedAssetDataLoader(this.assetEntityMapper) ) ; dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_PRODUCT_VARIANT_FEATURED_ASSET, productVariantFeaturedAssetDataLoader ); DataLoader<Long, Product> productVariantProductDataLoader = DataLoader.newMappedDataLoader( new ProductVariantProductDataLoader(this.productEntityMapper) ); dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_PRODUCT_VARIANT_PRODUCT, productVariantProductDataLoader ); DataLoader<Long, List<Asset>> productVariantAssetsDataLoader = DataLoader.newMappedDataLoader( new ProductVariantAssetsDataLoader(this.productVariantAssetJoinEntityMapper, this.assetEntityMapper) ); dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_PRODUCT_VARIANT_ASSETS, productVariantAssetsDataLoader ); DataLoader<Long, List<ProductOption>> productVariantOptionsDataLoader = DataLoader.newMappedDataLoader( new ProductVariantOptionsDataLoader( this.productVariantProductOptionJoinEntityMapper,this.productOptionEntityMapper) ); dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_PRODUCT_VARIANT_OPTIONS, productVariantOptionsDataLoader ); DataLoader<Long, List<FacetValue>> productVariantFacetValuesDataLoader = DataLoader.newMappedDataLoader( new ProductVariantFacetValuesDataLoader( this.productVariantFacetValueJoinEntityMapper, this.facetValueEntityMapper, this.facetEntityMapper ) ); dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_PRODUCT_VARIANT_FACET_VALUES, productVariantFacetValuesDataLoader ); DataLoader<Long, Asset> collectionFeaturedAssetDataLoader = DataLoader.newMappedDataLoader( new FeaturedAssetDataLoader(this.assetEntityMapper) ) ; dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_COLLECTION_FEATURED_ASSET, collectionFeaturedAssetDataLoader ); DataLoader<Long, List<Asset>> collectionAssetsDataLoader = DataLoader.newMappedDataLoader( new CollectionAssetsDataLoader(this.collectionAssetJoinEntityMapper, this.assetEntityMapper) ); dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_COLLECTION_ASSETS, collectionAssetsDataLoader ); DataLoader<Long, Collection> collectionParentDataLoader = DataLoader.newMappedDataLoader( new CollectionParentDataLoader(this.collectionEntityMapper) ); dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_COLLECTION_PARENT, collectionParentDataLoader ); DataLoader<Long, List<Collection>> collectionChildrenDataLoader = DataLoader.newMappedDataLoader( new CollectionChildrenDataLoader(this.collectionEntityMapper) ); dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_COLLECTION_CHILDREN, collectionChildrenDataLoader ); DataLoader<Long, ProductVariant> orderLineProductVariantDataLoader = DataLoader.newMappedDataLoader( new ProductVariantDataLoader(this.productVariantEntityMapper) ); dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_ORDER_LINE_PRODUCT_VARIANT, orderLineProductVariantDataLoader ); DataLoader<Long, Asset> orderLineFeaturedAssetDataLoader = DataLoader.newMappedDataLoader( new FeaturedAssetDataLoader(this.assetEntityMapper) ) ; dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_ORDER_LINE_FEATURED_ASSET, orderLineFeaturedAssetDataLoader ); DataLoader<Long, Order> orderLineOrderDataLoader = DataLoader.newMappedDataLoader( new OrderDataLoader(this.orderEntityMapper) ); dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_ORDER_LINE_ORDER, orderLineOrderDataLoader ); DataLoader<Long, Fulfillment> orderItemFulfillmentDataLoader = DataLoader.newMappedDataLoader( new FulfillmentDataLoader(this.fulfillmentEntityMapper) ); dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_ORDER_ITEM_FULFILLMENT, orderItemFulfillmentDataLoader ); DataLoader<Long, List<OrderItem>> fulfillmentOrderItemsDataLoader = DataLoader.newMappedDataLoader( new FulfillmentOrderItemsDataLoder(this.orderItemEntityMapper) ); dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_FULFILLMENT_ORDER_ITEMS, fulfillmentOrderItemsDataLoader ); DataLoader<Long, Customer> orderCustomerDataLoader = DataLoader.newMappedDataLoader( new CustomerDataLoader(this.customerEntityMapper) ); dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_ORDER_CUSTOMER, orderCustomerDataLoader ); DataLoader<Long, List<Promotion>> orderPromotionsDataLoader = DataLoader.newMappedDataLoader( new OrderPromotionsDataLoader( this.orderPromotionJoinEntityMapper, this.promotionEntityMapper ) ); dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_ORDER_PROMOTIONS, orderPromotionsDataLoader ); DataLoader<Long, List<Payment>> orderPaymentsDataLoader = DataLoader.newMappedDataLoader( new OrderPaymentsDataLoader(this.paymentEntityMapper) ); dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_ORDER_PAYMENTS, orderPaymentsDataLoader ); DataLoader<Long, ShippingMethod> orderShippingMethodDataLoader = DataLoader.newMappedDataLoader( new ShippingMethodDataLoader(this.shippingMethodEntityMapper) ); dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_ORDER_SHIPPING_METHOD, orderShippingMethodDataLoader ); DataLoader<Long, List<OrderItem>> orderLineItemsDataLoader = DataLoader.newMappedDataLoader( new OrderLineItemsDataLoader(this.orderItemEntityMapper) ); dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_ORDER_LINE_ITEMS, orderLineItemsDataLoader ); DataLoader<Long, List<OrderLine>> orderLinesDataLoader = DataLoader.newMappedDataLoader( new OrderLinesDataLoader(this.orderLineEntityMapper, this.orderItemEntityMapper) ); dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_ORDER_LINES, orderLinesDataLoader ); DataLoader<Long, List<Refund>> paymentRefundsDataLoader = DataLoader.newMappedDataLoader( new PaymentRefundsDataLoader(this.refundEntityMapper) ); dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_PAYMENT_REFUNDS, paymentRefundsDataLoader ); DataLoader<Long, List<OrderItem>> refundOrderItemsDataLoader = DataLoader.newMappedDataLoader( new RefundOrderItemsDataLoader(this.orderItemEntityMapper) ); dataLoaderRegistry.register( Constant.DATA_LOADER_NAME_REFUND_ORDER_ITEMS, refundOrderItemsDataLoader ); return dataLoaderRegistry; } }
package com.adamk33n3r.runelite.watchdog.notifications; import com.adamk33n3r.runelite.watchdog.WatchdogPlugin; import lombok.extern.slf4j.Slf4j; import net.runelite.client.config.FlashNotification; import net.runelite.client.util.ColorUtil; import java.awt.Color; @Slf4j public class ScreenFlash extends Notification { public Color color = ColorUtil.fromHex("#46FF0000"); public FlashNotification flashNotification = FlashNotification.SOLID_TWO_SECONDS; @Override protected void fireImpl() { WatchdogPlugin.getInstance().getFlashOverlay().flash(this); } }
/** * * Copyright 2003-2007 Jive Software. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.smackx.iqprivate; import java.io.IOException; import java.util.Hashtable; import java.util.Map; import java.util.WeakHashMap; import org.jivesoftware.smack.Manager; import org.jivesoftware.smack.SmackException.NoResponseException; import org.jivesoftware.smack.SmackException.NotConnectedException; import org.jivesoftware.smack.XMPPConnection; import org.jivesoftware.smack.XMPPException.XMPPErrorException; import org.jivesoftware.smack.packet.IQ; import org.jivesoftware.smack.packet.StanzaError.Condition; import org.jivesoftware.smack.packet.XmlEnvironment; import org.jivesoftware.smack.provider.IQProvider; import org.jivesoftware.smackx.iqprivate.packet.DefaultPrivateData; import org.jivesoftware.smackx.iqprivate.packet.PrivateData; import org.jivesoftware.smackx.iqprivate.packet.PrivateDataIQ; import org.jivesoftware.smackx.iqprivate.provider.PrivateDataProvider; import org.jxmpp.util.XmppStringUtils; import org.xmlpull.v1.XmlPullParser; import org.xmlpull.v1.XmlPullParserException; /** * Manages private data, which is a mechanism to allow users to store arbitrary XML * data on an XMPP server. Each private data chunk is defined by a element name and * XML namespace. Example private data: * * <pre> * &lt;color xmlns="http://example.com/xmpp/color"&gt; * &lt;favorite&gt;blue&lt;/blue&gt; * &lt;leastFavorite&gt;puce&lt;/leastFavorite&gt; * &lt;/color&gt; * </pre> * * {@link PrivateDataProvider} instances are responsible for translating the XML into objects. * If no PrivateDataProvider is registered for a given element name and namespace, then * a {@link DefaultPrivateData} instance will be returned.<p> * * Warning: this is an non-standard protocol documented by * <a href="http://www.xmpp.org/extensions/jep-0049.html">XEP-49</a>. Because this is a * non-standard protocol, it is subject to change. * * @author Matt Tucker */ public final class PrivateDataManager extends Manager { private static final Map<XMPPConnection, PrivateDataManager> instances = new WeakHashMap<XMPPConnection, PrivateDataManager>(); public static synchronized PrivateDataManager getInstanceFor(XMPPConnection connection) { PrivateDataManager privateDataManager = instances.get(connection); if (privateDataManager == null) { privateDataManager = new PrivateDataManager(connection); } return privateDataManager; } /** * Map of provider instances. */ private static final Map<String, PrivateDataProvider> privateDataProviders = new Hashtable<>(); /** * Returns the private data provider registered to the specified XML element name and namespace. * For example, if a provider was registered to the element name "prefs" and the * namespace "http://www.xmppclient.com/prefs", then the following stanza would trigger * the provider: * * <pre> * &lt;iq type='result' to='joe@example.com' from='mary@example.com' id='time_1'&gt; * &lt;query xmlns='jabber:iq:private'&gt; * &lt;prefs xmlns='http://www.xmppclient.com/prefs'&gt; * &lt;value1&gt;ABC&lt;/value1&gt; * &lt;value2&gt;XYZ&lt;/value2&gt; * &lt;/prefs&gt; * &lt;/query&gt; * &lt;/iq&gt;</pre> * * <p>Note: this method is generally only called by the internal Smack classes. * * @param elementName the XML element name. * @param namespace the XML namespace. * @return the PrivateData provider. */ public static PrivateDataProvider getPrivateDataProvider(String elementName, String namespace) { String key = XmppStringUtils.generateKey(elementName, namespace); return privateDataProviders.get(key); } /** * Adds a private data provider with the specified element name and name space. The provider * will override any providers loaded through the classpath. * * @param elementName the XML element name. * @param namespace the XML namespace. * @param provider the private data provider. */ public static void addPrivateDataProvider(String elementName, String namespace, PrivateDataProvider provider) { String key = XmppStringUtils.generateKey(elementName, namespace); privateDataProviders.put(key, provider); } /** * Removes a private data provider with the specified element name and namespace. * * @param elementName The XML element name. * @param namespace The XML namespace. */ public static void removePrivateDataProvider(String elementName, String namespace) { String key = XmppStringUtils.generateKey(elementName, namespace); privateDataProviders.remove(key); } /** * Creates a new private data manager. * * @param connection an XMPP connection which must have already undergone a * successful login. */ private PrivateDataManager(XMPPConnection connection) { super(connection); instances.put(connection, this); } /** * Returns the private data specified by the given element name and namespace. Each chunk * of private data is uniquely identified by an element name and namespace pair.<p> * * If a PrivateDataProvider is registered for the specified element name/namespace pair then * that provider will determine the specific object type that is returned. If no provider * is registered, a {@link DefaultPrivateData} instance will be returned. * * @param elementName the element name. * @param namespace the namespace. * @return the private data. * @throws XMPPErrorException * @throws NoResponseException * @throws NotConnectedException * @throws InterruptedException */ public PrivateData getPrivateData(final String elementName, final String namespace) throws NoResponseException, XMPPErrorException, NotConnectedException, InterruptedException { // Create an IQ packet to get the private data. IQ privateDataGet = new PrivateDataIQ(elementName, namespace); PrivateDataIQ response = connection().createStanzaCollectorAndSend( privateDataGet).nextResultOrThrow(); return response.getPrivateData(); } /** * Sets a private data value. Each chunk of private data is uniquely identified by an * element name and namespace pair. If private data has already been set with the * element name and namespace, then the new private data will overwrite the old value. * * @param privateData the private data. * @throws XMPPErrorException * @throws NoResponseException * @throws NotConnectedException * @throws InterruptedException */ public void setPrivateData(final PrivateData privateData) throws NoResponseException, XMPPErrorException, NotConnectedException, InterruptedException { // Create an IQ packet to set the private data. IQ privateDataSet = new PrivateDataIQ(privateData); connection().createStanzaCollectorAndSend(privateDataSet).nextResultOrThrow(); } private static final PrivateData DUMMY_PRIVATE_DATA = new PrivateData() { @Override public String getElementName() { return "smackDummyPrivateData"; } @Override public String getNamespace() { return "https://igniterealtime.org/projects/smack/"; } @Override public CharSequence toXML() { return '<' + getElementName() + " xmlns='" + getNamespace() + "'/>"; } }; /** * Check if the service supports private data. * * @return true if the service supports private data, false otherwise. * @throws NoResponseException * @throws NotConnectedException * @throws InterruptedException * @throws XMPPErrorException * @since 4.2 */ public boolean isSupported() throws NoResponseException, NotConnectedException, InterruptedException, XMPPErrorException { // This is just a primitive hack, since XEP-49 does not specify a way to determine if the // service supports it try { setPrivateData(DUMMY_PRIVATE_DATA); return true; } catch (XMPPErrorException e) { if (e.getStanzaError().getCondition() == Condition.service_unavailable) { return false; } else { throw e; } } } /** * An IQ provider to parse IQ results containing private data. */ public static class PrivateDataIQProvider extends IQProvider<PrivateDataIQ> { @Override public PrivateDataIQ parse(XmlPullParser parser, int initialDepth, XmlEnvironment xmlEnvironment) throws XmlPullParserException, IOException { PrivateData privateData = null; boolean done = false; while (!done) { int eventType = parser.next(); if (eventType == XmlPullParser.START_TAG) { String elementName = parser.getName(); String namespace = parser.getNamespace(); // See if any objects are registered to handle this private data type. PrivateDataProvider provider = getPrivateDataProvider(elementName, namespace); // If there is a registered provider, use it. if (provider != null) { privateData = provider.parsePrivateData(parser); } // Otherwise, use a DefaultPrivateData instance to store the private data. else { DefaultPrivateData data = new DefaultPrivateData(elementName, namespace); boolean finished = false; while (!finished) { int event = parser.next(); if (event == XmlPullParser.START_TAG) { String name = parser.getName(); // If an empty element, set the value with the empty string. if (parser.isEmptyElementTag()) { data.setValue(name,""); } // Otherwise, get the the element text. else { event = parser.next(); if (event == XmlPullParser.TEXT) { String value = parser.getText(); data.setValue(name, value); } } } else if (event == XmlPullParser.END_TAG) { if (parser.getName().equals(elementName)) { finished = true; } } } privateData = data; } } else if (eventType == XmlPullParser.END_TAG) { if (parser.getName().equals("query")) { done = true; } } } return new PrivateDataIQ(privateData); } } }
package backend.foritech.backend.beans; public class SignInRequest { private String personEmail; private String password; public String getPersonEmail() { return personEmail; } public void setPersonEmail(String personEmail) { this.personEmail = personEmail; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } }
package github.banana.letcode; import github.banana.common.TreeNode; /** * 114. 二叉树展开为链表 * <p> * 给定一个二叉树, 原地将它展开为链表 * * <pre> * 1 * / \ * 2 5 * / \ \ * 3 4 6 * * 1 * \ * 2 * \ * 3 * \ * 4 * \ * 5 * \ * 6 * </pre> */ public class Flatten { public static void main(String[] args) { } public void flatten(TreeNode root) { // 往右子树递归拼接 if (root == null) { return; } flatten(root.left); flatten(root.right); if (root.left != null) { // 记录右节点 TreeNode right = root.right; root.right = root.left; // 将左节点置空 root.left = null; // 遍历右节点到最右叶子节点 // 此时的 root.right 是上一次的左右左子树 TreeNode node = root.right; while (node.right != null) { node = node.right; } node.right = right; } } }
package com.kotor4j.resourcemanager.gff.fields; import com.kotor4j.resourcemanager.gff.GffLoadContext; import com.kotor4j.resourcemanager.gff.GffStructure; /** * @author sad */ public class GffStruct extends GffFieldValue { private GffStructure value; public GffStructure getValue() { return value; } @Override public void load(GffLoadContext loadContext, int offset) { value = loadContext.getStructs()[offset]; } }
/* * Copyright 2018 olivier. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tatsinktechnologic.smpp.gateway; import com.tatsinktechnologic.beans.DeliveryMessage; import com.tatsinktechnologic.beans.Message_Exchg; import com.tatsinktechnologic.beans.StateMenu; import com.tatsinktechnologic.beans.USSDType; import com.tatsinktechnologic.beans.UssdMenu; import com.tatsinktechnologic.beans.UssdMessage; import com.tatsinktechnologic.config.ConfigLoader; import com.tatsinktechnologic.resfull.services.API_USSDService; import com.tatsinktechnologic.utils.ConverterJSON; import com.tatsinktechnologic.utils.Generator; import com.tatsinktechnologic.xml.kafka.USSD_Conf; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.security.Provider.Service; import java.text.SimpleDateFormat; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Properties; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.apache.commons.lang.SerializationUtils; import org.apache.commons.lang.StringUtils; import org.apache.kafka.clients.producer.Callback; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.RecordMetadata; import org.apache.log4j.Logger; import org.jsmpp.bean.AlertNotification; import org.jsmpp.bean.DataSm; import org.jsmpp.bean.DeliverSm; import org.jsmpp.bean.DeliveryReceipt; import org.jsmpp.bean.MessageType; import org.jsmpp.extra.ProcessRequestException; import org.jsmpp.session.DataSmResult; import org.jsmpp.session.MessageReceiverListener; import org.jsmpp.session.Session; import org.jsmpp.util.HexUtil; import org.jsmpp.util.InvalidDeliveryReceiptException; /** * * @author olivier.tatsinkou */ public class ReceiveSMS implements MessageReceiverListener { /* USSDMSG_TYPE_SUB_SEND_REQ = 100; USSDMSG_TYPE_SUB_SEND_RSP = 101; USSDMSG_TYPE_SUB_CANCEL = 102; USSDMSG_TYPE_TRANS_ERR = 104; USSDMSG_TYPE_SUB_RECV_OK = 103; USSDMSG_TYPE_APP_SEND_MENU = 202; USSDMSG_TYPE_APP_SEND_RSP = 203; USSDMSG_TYPE_APP_SEND_NOTIFY_FIRST = 201; USSDMSG_TYPE_APP_CANCEL = 205; USSDMSG_TYPE_APP_CLOSE_TRANS = 206; USSDMSG_TYPE_APP_SEND_REQ_FIRST = 200; USSDMSG_TYPE_APP_SEND_NOTIFY = 204; */ private static final ExecutorService pool = Executors.newFixedThreadPool(10); private static final Logger logger = Logger.getLogger(ReceiveSMS.class); private static ConfigLoader communConf = ConfigLoader.getConfigurationLoader(); private static HashMap<String, String> SET_STATE; private static HashMap<String, StateMenu> PROCESSING_STATE = new HashMap<String, StateMenu>(); private static HashMap<String, UssdMenu> setUssdMenu; private static USSD_Conf ussd_conf; private static Properties props = new Properties(); private static Producer<String, String> producer; private static ProducerCallback callback; private static ProducerRecord<String, String> kafka_data; private static int ussdMsg_timeout; private static int smpp_enableReport; private static String smpp_dateFormat; private static String delv_topic; private SMSGateway senderGateway; static { setUssdMenu = communConf.getSetUssdMenu(); props = communConf.getProduct_props(); callback = new ProducerCallback(); ussd_conf = communConf.getUssdconfig(); ussdMsg_timeout = ussd_conf.getApi_conf().getUssdMessage_Timeout(); smpp_enableReport = ussd_conf.getSmpp_conf().getEnable_delivery(); smpp_dateFormat = ussd_conf.getSmpp_conf().getDate_format(); delv_topic = ussd_conf.getSmpp_conf().getDeliviery_topic(); SET_STATE = new HashMap<String, String>(); } public ReceiveSMS(String user, String id) { super(); Properties producerprops = new Properties(); producerprops = (Properties) SerializationUtils.clone(props); String new_client_id = producerprops.getProperty("client.id"); new_client_id = new_client_id + "_" + id; producerprops.put("client.id", new_client_id); producer = new KafkaProducer<String, String>(producerprops); senderGateway = SMSGateway.getSenderGateway(); } public ReceiveSMS(Session s, String id) { this("default_user", id); } @Override public DataSmResult onAcceptDataSm(DataSm arg0, Session arg1) throws ProcessRequestException { logger.info("Received SMS on onAcceptDataSm : " + arg0); return null; } @Override public void onAcceptAlertNotification(AlertNotification arg0) { logger.info("Received SMS on onAcceptAlertNotification : " + arg0); } @Override public void onAcceptDeliverSm(DeliverSm arg0) throws ProcessRequestException { if (smpp_enableReport == 1) { if (MessageType.SMSC_DEL_RECEIPT.containedIn(arg0.getEsmClass())) { logger.info("-------------- START RECEIVE DELIVERY SMS -----------"); // this message is delivery receipt try { DeliveryReceipt delReceipt = arg0.getShortMessageAsDeliveryReceipt(); // lets cover the id to hex string format long id = Long.parseLong(delReceipt.getId()) & 0xffffffff; String messageId = Long.toString(id, 16).toUpperCase(); /* * you can update the status of your submitted message on the * database based on messageId */ DeliveryMessage delivery_msg = null; delivery_msg = new DeliveryMessage(messageId, arg0.getDestAddress(), arg0.getSourceAddr(), delReceipt.getId(), String.valueOf(delReceipt.getSubmitted()), String.valueOf(delReceipt.getDelivered()), convertToDate(delReceipt.getSubmitDate()), convertToDate(delReceipt.getDoneDate()), delReceipt.getFinalStatus().name(), delReceipt.getError(), "" ); String message_send = ConverterJSON.convertDeliveryToJson(delivery_msg); kafka_data = new ProducerRecord<String, String>(delv_topic, message_send); producer.send(kafka_data, callback); logger.info("Delivery Receive : " + message_send); logger.info("SUCCESS DELIVERY report topic -->" + delv_topic + " | message: msg_id = " + messageId + " from " + arg0.getSourceAddr() + " --> " + arg0.getDestAddress()); logger.info("SUCCESS DELIVERY information : " + delReceipt); logger.info("SUCCESS send delivery report : " + delivery_msg); } catch (InvalidDeliveryReceiptException e) { logger.error("Failed getting delivery receipt", e); } logger.info("SMSC_DEL_RECEIPT : " + arg0.toString()); } else { logger.info("-------------- START RECEIVE MO SMS -----------"); runUSSD(arg0); } } else { if (!MessageType.SMSC_DEL_RECEIPT.containedIn(arg0.getEsmClass())) { logger.info("-------------- START RECEIVE MO SMS -----------"); runUSSD(arg0); } else { try { DeliveryReceipt delReceipt = arg0.getShortMessageAsDeliveryReceipt(); // lets cover the id to hex string format long id = Long.parseLong(delReceipt.getId()) & 0xffffffff; String messageId = Long.toString(id, 16).toUpperCase(); /* * you can update the status of your submitted message on the * database based on messageId */ DeliveryMessage delivery_msg = null; delivery_msg = new DeliveryMessage(messageId, arg0.getDestAddress(), arg0.getSourceAddr(), delReceipt.getId(), String.valueOf(delReceipt.getSubmitted()), String.valueOf(delReceipt.getDelivered()), convertToDate(delReceipt.getSubmitDate()), convertToDate(delReceipt.getDoneDate()), delReceipt.getFinalStatus().name(), delReceipt.getError(), "" ); String message_send = ConverterJSON.convertDeliveryToJson(delivery_msg); logger.info("########## SMSGW ARE NOT SETTING TO FOWARD DELIVERY REPORT ################## "); logger.info("Delivery Receive : " + message_send); logger.info("SUCCESS DELIVERY report topic -->" + delv_topic + " | message: msg_id = " + messageId + " from " + arg0.getSourceAddr() + " --> " + arg0.getDestAddress()); logger.info("SUCCESS DELIVERY information : " + delReceipt); logger.info("SUCCESS send delivery report : " + delivery_msg); } catch (Exception e) { logger.error("Failed getting delivery receipt", e); } } } } private void runUSSD(DeliverSm arg0) { String shortcode = arg0.getDestAddress(); String msisdn = arg0.getSourceAddr(); byte[] data = arg0.getShortMessage(); String ussd_message = null; if (arg0.getShortMessage() != null) { if (arg0.getDataCoding() == (byte) 8) { ussd_message = HexUtil.convertBytesToHexString(data, 0, data.length); } else { ussd_message = new String(data); } } String menuServ = ""; UssdMessage ussdRequest = new UssdMessage(); ussdRequest.setMsisdn(msisdn); String transaction_id = ussdRequest.getTransId(); int message_type = ussdRequest.getType(); UssdMessage ussdResponse = new UssdMessage(); ussdResponse.setCharSet(ussdRequest.getCharSet()); ussdResponse.setConnectorId(ussdRequest.getConnectorId()); ussdResponse.setDlgId(ussdRequest.getDlgId()); ussdResponse.setEncryptedUssdString(ussdRequest.getEncryptedUssdString()); ussdResponse.setHlrGT(ussdRequest.getHlrGT()); ussdResponse.setImsi(ussdRequest.getImsi()); ussdResponse.setLoggedString(ussdRequest.getLoggedString()); ussdResponse.setMsisdn(ussdRequest.getMsisdn()); ussdResponse.setSendRecvTime(ussdRequest.getSendRecvTime()); ussdResponse.setTransId(ussdRequest.getTransId()); String stateMenu_key; UssdMenu ussdMenu; StateMenu state; String shorCode; switch (message_type) { case USSDType.USSDMSG_TYPE_SUB_SEND_REQ: // first message stateMenu_key = ussd_message; shorCode = getShortCode(ussd_message); state = new StateMenu(); state.setShort_code(shorCode); state.setMsisdn(msisdn); state.setInput(stateMenu_key); state.setTransaction_id(transaction_id); if (PROCESSING_STATE != null && PROCESSING_STATE.size() > 0 && PROCESSING_STATE.containsKey(transaction_id)) { logger.info("FIST REQUEST : Remove --> state : " + PROCESSING_STATE.get(transaction_id) + " -----> Transaction : " + transaction_id); PROCESSING_STATE.remove(transaction_id); } ussdMenu = setUssdMenu.get(stateMenu_key); if (ussdMenu != null) { // check if menu exist String menuResp = ussdMenu.getResp(); String menuAct = ussdMenu.getAction(); String menuTopic = ussdMenu.getTopic(); menuServ = ussdMenu.getService(); String menuStatus = ussdMenu.getStatus(); if (menuStatus.equals("START-MENU")) { PROCESSING_STATE.put(transaction_id, state); if (!StringUtils.isBlank(menuResp)) { ussdResponse.setUssdString(menuResp); ussdResponse.setType(USSDType.USSDMSG_TYPE_APP_SEND_MENU); } if (!StringUtils.isBlank(menuAct)) { processAction(menuTopic, menuServ, msisdn, shorCode, menuAct); } logger.info("START-MENU USSD MENU : " + ussdMenu + " -----> Transaction : " + transaction_id); } else if (menuStatus.equals("START-END")) { if (!StringUtils.isBlank(menuResp)) { ussdResponse.setUssdString(menuResp); ussdResponse.setType(USSDType.USSDMSG_TYPE_APP_SEND_RSP); } if (!StringUtils.isBlank(menuAct)) { processAction(menuTopic, menuServ, msisdn, shorCode, menuAct); } logger.info("START-END USSD MENU : " + ussdMenu + " -----> Transaction : " + transaction_id); } else { logger.info("THIS USSD REQUEST CANNOT BE PROCESS : " + ussdMenu + " -----> Transaction : " + transaction_id); } } else { logger.info("THIS USSD REQUEST CANNOT BE PROCESS : " + ussdMenu + " -----> Transaction : " + transaction_id); } break; case USSDType.USSDMSG_TYPE_SUB_SEND_RSP: if (PROCESSING_STATE != null && PROCESSING_STATE.size() > 0 && !PROCESSING_STATE.containsKey(transaction_id)) { logger.info("FIST REQUEST : Remove --> state : " + PROCESSING_STATE.get(transaction_id) + " -----> Transaction : " + transaction_id); state = PROCESSING_STATE.get(transaction_id); stateMenu_key = state.getInput() + "-" + ussd_message; shorCode = state.getShort_code(); state.setInput(stateMenu_key); ussdMenu = setUssdMenu.get(stateMenu_key); if (ussdMenu != null) { String menuResp = ussdMenu.getResp(); String menuAct = ussdMenu.getAction(); String menuTopic = ussdMenu.getTopic(); menuServ = ussdMenu.getService(); String menuStatus = ussdMenu.getStatus(); if (menuStatus.equals("MENU")) { PROCESSING_STATE.put(transaction_id, state); if (!StringUtils.isBlank(menuResp)) { ussdResponse.setUssdString(menuResp); ussdResponse.setType(USSDType.USSDMSG_TYPE_APP_SEND_MENU); } if (!StringUtils.isBlank(menuAct)) { processAction(menuTopic, menuServ, msisdn, shorCode, menuAct); } logger.info("USSD MENU : " + ussdMenu + " -----> Transaction : " + transaction_id); } else if (menuStatus.equals("END")) { if (!StringUtils.isBlank(menuResp)) { ussdResponse.setUssdString(menuResp); ussdResponse.setType(USSDType.USSDMSG_TYPE_APP_SEND_RSP); } if (!StringUtils.isBlank(menuAct)) { processAction(menuTopic, menuServ, msisdn, shorCode, menuAct); } logger.info("END USSD MENU : " + ussdMenu + " -----> Transaction : " + transaction_id); } else { logger.info("THIS USSD REQUEST CANNOT BE PROCESS : " + ussdMenu + " -----> Transaction : " + transaction_id); } } else { logger.info("THIS USSD REQUEST CANNOT BE PROCESS : " + ussdMenu + " -----> Transaction : " + transaction_id); } } break; case USSDType.USSDMSG_TYPE_SUB_CANCEL: if (PROCESSING_STATE != null && PROCESSING_STATE.size() > 0 && !PROCESSING_STATE.containsKey(transaction_id)) { logger.info("CANCEL REQUEST : Remove --> state : " + PROCESSING_STATE.get(transaction_id) + " -----> Transaction : " + transaction_id); PROCESSING_STATE.remove(transaction_id); } else { logger.info("TRANSACTION NOT EXIST : -----> Transaction : " + transaction_id); } break; case USSDType.USSDMSG_TYPE_SUB_RECV_OK: break; case USSDType.USSDMSG_TYPE_TRANS_ERR: if (PROCESSING_STATE != null && PROCESSING_STATE.size() > 0 && !PROCESSING_STATE.containsKey(transaction_id)) { logger.info("ERROR REQUEST : Remove --> state : " + PROCESSING_STATE.get(transaction_id) + " -----> Transaction : " + transaction_id); PROCESSING_STATE.remove(transaction_id); } else { logger.info("TRANSACTION NOT EXIST : -----> Transaction : " + transaction_id); } break; default: logger.info("USSDMSG_TYPE NOT DEFINE : " + message_type + " -----> Transaction : " + transaction_id); break; } List<String> response = senderGateway.sendSMS(menuServ, shortcode, msisdn, ussdResponse.getUssdString(), "ussd_client"); } private String convertToDate(Date dateValue) { String result = null; SimpleDateFormat formatter = new SimpleDateFormat("yyMMddHHmm"); if (smpp_dateFormat != null) { formatter = new SimpleDateFormat(smpp_dateFormat); try { result = formatter.format(dateValue); } catch (Exception e) { logger.error("cannot convert " + dateValue + " to String following format : " + smpp_dateFormat); formatter = new SimpleDateFormat("yyMMddHHmm"); try { result = formatter.format(dateValue); } catch (Exception e1) { logger.error("cannot convert " + dateValue + " to String following format yyMMddHHmm", e1); } } } else { try { result = formatter.format(dateValue); } catch (Exception e) { logger.error("cannot convert " + dateValue + " to String following format yyMMddHHmm", e); } } return result; } private void processAction(String topic, String srv_name, String msisdn, String short_code, String content) { String trans_ID = Generator.getTransaction(); Message_Exchg msg_exch = new Message_Exchg(trans_ID, srv_name, "", msisdn, short_code, content, "USSD"); // get and check user from config file String message_send = ConverterJSON.convertMsgExchToJson(msg_exch); kafka_data = new ProducerRecord<String, String>(topic, trans_ID, message_send); producer.send(kafka_data, callback); logger.info("USSD Receive : " + message_send); logger.info("SUCCESS SEND TO APPLICATION : topic -->" + topic + " | " + msisdn + " --> " + short_code + " [msg = " + content + " ]"); } private String getShortCode(String ussdMsg) { String result = null; String val1 = null; String val2 = null; if ((ussdMsg.startsWith("*")) || (ussdMsg.startsWith("#"))) { val1 = ussdMsg.substring(1); val2 = substringBeforeLast(val1, "*"); result = substringBeforeLast(val1, "*"); if (val2.equals(val1)) { result = substringBeforeLast(val1, "#"); } } return result; } private static String substringBeforeLast(String str, String separator) { if ((StringUtils.isBlank(str)) || (StringUtils.isBlank(separator))) { return str; } int pos = str.indexOf(separator); if (pos == -1) { return str; } return str.substring(0, pos); } private static class ProducerCallback implements Callback { @Override public void onCompletion(RecordMetadata recordMetadata, Exception e) { if (e != null) { logger.error("Error while producing message to topic :" + recordMetadata, e); } else { String message = String.format("Producer client ID : " + props.getProperty("client.id") + " -- Topic: %s -- Partition: %s -- offset: %s", recordMetadata.topic(), recordMetadata.partition(), recordMetadata.offset()); logger.info(message); } } } }
// VeriBlock NodeCore // Copyright 2017-2019 Xenios SEZC // All rights reserved. // https://www.veriblock.org // Distributed under the MIT software license, see the accompanying // file LICENSE or http://www.opensource.org/licenses/mit-license.php. package nodecore.api.ucp.arguments; public class UCPArgumentBlockHash extends UCPArgument { private final UCPArgument.UCPType type = UCPType.BLOCK_HASH; private final String data; public void throwValidationError(String data) { throw new IllegalArgumentException("\"" + data + "\" did not pass the preliminary validation of " + getClass().getCanonicalName() + " (" + type.getPreliminaryValidationPattern() + ")"); } /** * Constructor for parsing the serialized data type, useful when parsing a command. * Also the Constructor for serializing the actual type, useful when creating a command. * @param data */ public UCPArgumentBlockHash(String data) { if (data == null) { throw new IllegalArgumentException(getClass().getCanonicalName() + "'s constructor cannot be called with null data!"); } // Check the data against the initial sanity checks built into the type enum if (!type.preliminaryValidation(data)) { throwValidationError(data); } this.data = data; } /** * Gets the processed data contained within this argument, which has a Java type appropriate to the UCP type represented by this argument. * * @return The processed data */ public String getData() { return data; } /** * Gets the serialized version of this string which could be used to create an identical copy of this object. * @return The original data "sent over the wire" used to create this argument, or the serialized version created for sending over the wire. */ @Override public String getSerialized() { return data; } /** * Get the corresponding UCP type which this class represents * @return The corresponding UCP type represented by this argument implementation */ @Override public UCPType getType() { return type; } /** * Gets the string representation of this argument's data: passthrough to .toString() for the underlying datatype * of the processed data (or the equivalent of the autoboxed version's toString if the processed data is a primitive). * @return String representation of the data represented by this argument */ @Override public String toString() { return data.toString(); } }
package uk.ac.liv.mzidlib.mzidobjects; import java.io.Serializable; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.*; /** * Class using reflection to access all field */ public class ObjectReflection implements Serializable { /** * Map of methods and methods names */ public Map<String,Method> methods = new HashMap<>(); /** * Map of Fields and Fields names */ public Map<String,Field> fields = new HashMap<>(); /** * Object to reflect */ public Object object; /** * Create object reflection and setting all accessible * @param object */ public ObjectReflection(Object object){ assert !object.equals(null); this.object = object; List<Method> templistmethods = new ArrayList<Method> (Arrays.asList(object.getClass().getDeclaredMethods())); List<Field> templistfields = new ArrayList<Field> (Arrays.asList(object.getClass().getDeclaredFields())); for (Method method :templistmethods){ methods.put(method.getName(),method); } for (Field field :templistfields){ fields.put(field.getName(),field); } assert !fields.isEmpty(); assert !methods.isEmpty(); assert templistfields.size() == fields.size(); assert templistmethods.size() == methods.size(); assert fields.values().containsAll(Arrays.asList(object.getClass().getDeclaredFields())); assert methods.values().containsAll(Arrays.asList(object.getClass().getDeclaredMethods())); for (String keym :methods.keySet()){ methods.get(keym).setAccessible(true); assert methods.get(keym).isAccessible(); } for (String keyf :fields.keySet()){ fields.get(keyf).setAccessible(true); assert fields.get(keyf).isAccessible(); } } /** * Getter for Field by name * @param fieldname * @return Field * @throws NoSuchFieldException */ public Field getField(String fieldname) throws NoSuchFieldException { assert fields.containsKey(fieldname); Field tempfield = fields.get(fieldname); assert fields.containsValue(tempfield); assert tempfield.getName() == fieldname; return tempfield; } /** * Getter of the object of a field by name * @param fieldname * @return Object * @throws NoSuchFieldException * @throws IllegalAccessException */ public Object getFieldObject(String fieldname) throws NoSuchFieldException, IllegalAccessException { assert (fieldname != "") && (fieldname != null); if (!fields.containsKey(fieldname)) throw new AssertionError(fieldname+" Field not in"); Object tempobject; Field tempfield = fields.get(fieldname); assert fields.containsValue(tempfield); assert tempfield.getName()==fieldname; tempobject = tempfield.get(object); return tempobject; } /** * Retuns the object applying reflection to * @return Object */ public Object getObject() { return object; } /** * Get method by name and params * @param methodname * @param paramCount * @param params * @return Method */ public Method getMethod(String methodname,int paramCount, Object... params) { assert methodname != "" && methodname != null; assert methods.containsKey(methodname); assert paramCount > 0; assert params.length > 0; Method tempmethod = null; Object[] parameters = new Object[paramCount]; Class<?>[] classArray = new Class<?>[paramCount]; for (int i = 0; i < paramCount; i++) { parameters[i] = params[i]; classArray[i] = params[i].getClass(); } assert classArray.length>0; assert classArray.length == params.length; assert classArray.length == parameters.length; try { tempmethod = object.getClass().getDeclaredMethod(methodname, classArray); assert tempmethod.isVarArgs(); } catch (NoSuchMethodException e) { e.printStackTrace(); } catch (IllegalArgumentException e) { e.printStackTrace(); } return tempmethod; } /** * Getter for method without params * @param methodname * @return Method * @throws NoSuchMethodException */ public Method getMethod(String methodname) throws NoSuchMethodException { assert (methodname != "") && (methodname != null); assert methods.containsKey(methodname); Method tempmethod = methods.get(methodname); assert methods.containsValue(tempmethod); assert tempmethod.getName()==methodname; assert !tempmethod.isVarArgs(); return tempmethod; } /** * Setting all fields accessible */ public void setAllAccessible(){ for (String keym :methods.keySet()){ methods.get(keym).setAccessible(true); assert methods.get(keym).isAccessible(); } for (String keyf :fields.keySet()){ fields.get(keyf).setAccessible(true); assert fields.get(keyf).isAccessible(); } } /** * Invoking method by name and params * @param methodname * @param paramCount * @param params * @return Object */ public Object invokeMethodVarArgs(String methodname,int paramCount, Object... params) { assert (methodname != "") && (methodname != null); assert methods.containsKey(methodname); Method method; Object requiredObj = null; Object[] parameters = new Object[paramCount]; Class<?>[] classArray = new Class<?>[paramCount]; for (int i = 0; i < paramCount; i++) { parameters[i] = params[i]; classArray[i] = params[i].getClass(); } try { method = object.getClass().getDeclaredMethod(methodname, classArray); method.setAccessible(true); assert method.isVarArgs(); requiredObj = method.invoke(object, params); } catch (NoSuchMethodException e) { e.printStackTrace(); } catch (IllegalArgumentException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } catch (InvocationTargetException e) { e.printStackTrace(); } return requiredObj; } /** * Invoke method without params * @param methodname * @return Object */ public Object invokeMethod(String methodname) { assert (methodname != "") && (methodname != null); assert methods.containsKey(methodname); Method method; Object requiredObj = null; try { method = object.getClass().getDeclaredMethod(methodname); method.setAccessible(true); assert !method.isVarArgs(); requiredObj = method.invoke(object); } catch (NoSuchMethodException e) { e.printStackTrace(); } catch (IllegalArgumentException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } catch (InvocationTargetException e) { e.printStackTrace(); } return requiredObj; } /** * To string * @return String */ @Override public String toString(){ assert !object.equals(null); String tostring = object.getClass().getCanonicalName()+"--"+object.getClass().getSimpleName()+"--"; tostring+= "\nFields names :"; assert !fields.isEmpty(); assert !methods.isEmpty(); for (String keyf :fields.keySet()){ tostring+="\n"+fields.get(keyf).getName(); } tostring+= "\nMethods names :"; for (String keym :methods.keySet()){ tostring+="\n"+methods.get(keym).getName(); } assert !tostring.equals(""); return tostring; } }
/* * Copyright 2011 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.messaging.remote.internal; import org.gradle.messaging.remote.internal.protocol.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.HashSet; import java.util.Set; import java.util.UUID; public class ReceiveProtocol implements Protocol<Message> { private static final Logger LOGGER = LoggerFactory.getLogger(ReceiveProtocol.class); private final UUID id; private final String displayName; private final String channelKey; private final Set<Object> producers = new HashSet<Object>(); private ProtocolContext<Message> context; private boolean stopping; public ReceiveProtocol(UUID id, String displayName, String channelKey) { this.id = id; this.displayName = displayName; this.channelKey = channelKey; } public void start(ProtocolContext<Message> context) { this.context = context; LOGGER.debug("Starting receiver {}.", id); context.dispatchOutgoing(new ConsumerAvailable(id, displayName, channelKey)); } public void handleIncoming(Message message) { if (message instanceof ProducerReady) { LOGGER.debug("Producer ready: {}", message); ProducerReady producerReady = (ProducerReady) message; producers.add(producerReady.getProducerId()); context.dispatchOutgoing(new ConsumerReady(id, producerReady.getProducerId())); } else if (message instanceof ProducerStopped) { LOGGER.debug("Producer stopped: {}", message); ProducerStopped producerStopped = (ProducerStopped) message; context.dispatchOutgoing(new ConsumerStopped(id, producerStopped.getProducerId())); removeProducer(producerStopped.getProducerId()); } else if (message instanceof ProducerUnavailable) { LOGGER.debug("Producer unavailable: {}", message); ProducerUnavailable producerUnavailable = (ProducerUnavailable) message; removeProducer(producerUnavailable.getId()); } else if (message instanceof ProducerAvailable) { // Ignore these broadcasts return; } else if (message instanceof Request) { context.dispatchIncoming(message); } else { throw new IllegalArgumentException(String.format("Unexpected incoming message received: %s", message)); } } private void removeProducer(Object producerId) { producers.remove(producerId); if (stopping && producers.isEmpty()) { LOGGER.debug("All producers finished. Stopping now."); allProducersFinished(); } } public void handleOutgoing(Message message) { if (message instanceof WorkerStopping) { workerStopped(); } else if (message instanceof MessageCredits) { LOGGER.debug("Discarding {}.", message); } else { throw new IllegalArgumentException(String.format("Unexpected outgoing message dispatched: %s", message)); } } private void workerStopped() { stopping = true; if (producers.isEmpty()) { LOGGER.debug("No producers. Stopping now."); allProducersFinished(); return; } LOGGER.debug("Waiting for producers to finish. Stopping later. Producers: {}", producers); for (Object producer : producers) { context.dispatchOutgoing(new ConsumerStopping(id, producer)); } } private void allProducersFinished() { context.dispatchOutgoing(new ConsumerUnavailable(id)); context.dispatchIncoming(new EndOfStreamEvent()); } public void stopRequested() { assert stopping; context.stopped(); } }
package aima.core.environment.eightpuzzle; import aima.core.agent.Action; import aima.core.search.framework.problem.BidirectionalProblem; import aima.core.search.framework.problem.GeneralProblem; import aima.core.search.framework.problem.Problem; import java.util.function.Predicate; /** * @author Ruediger Lunde * */ public class BidirectionalEightPuzzleProblem extends GeneralProblem<EightPuzzleBoard, Action> implements BidirectionalProblem<EightPuzzleBoard, Action> { private final Problem<EightPuzzleBoard, Action> reverseProblem; public BidirectionalEightPuzzleProblem(EightPuzzleBoard initialState) { // Ejercicio 7 //super(initialState, EightPuzzleFunctions::getActions, EightPuzzleFunctions::getResult, // Predicate.isEqual(EightPuzzleFunctions.GOAL_STATE)); super(initialState, EightPuzzleFunctions::getActions, EightPuzzleFunctions::getResult, Predicate.isEqual(EightPuzzleFunctions.GOAL_STATE), EightPuzzleFunctions::stepCostFunction); reverseProblem = new GeneralProblem<>(EightPuzzleFunctions.GOAL_STATE, EightPuzzleFunctions::getActions, EightPuzzleFunctions::getResult, Predicate.isEqual(initialState)); } public Problem<EightPuzzleBoard, Action> getOriginalProblem() { return this; } public Problem<EightPuzzleBoard, Action> getReverseProblem() { return reverseProblem; } }
package org.zabica.webcontest.common.store; import java.util.ArrayList; import java.util.Date; import java.util.List; import org.hypergraphdb.HGHandle; import org.hypergraphdb.HGQuery.hg; import org.hypergraphdb.HyperGraph; import org.hypergraphdb.indexing.ByPartIndexer; import org.zabica.webcontest.common.user.User; import org.zabica.webcontest.common.venue.Conference; public class PersistentStore { private String storeFile; private HyperGraph graph = null; public PersistentStore() { } public PersistentStore(String storeFile) { this.setStoreFile(storeFile); init(); } public void init() { System.out.println("Store file: " + this.storeFile); if (this.graph == null) { this.graph = new HyperGraph(this.storeFile); HGHandle userTypeH = graph.getTypeSystem().getTypeHandle(User.class); graph.getIndexManager().register( new ByPartIndexer<>(userTypeH, "email")); HGHandle confTypeH = graph.getTypeSystem().getTypeHandle(Conference.class); // graph.getIndexManager().register( // new ByPartIndexer<>(confTypeH, "start")); // graph.getIndexManager().register( // new ByPartIndexer<>(confTypeH, "location")); // graph.runMaintenance(); } } public void deinit() { this.graph.close(); } public String getStoreFile() { return storeFile; } public void setStoreFile(String storeFile) { this.storeFile = storeFile; } public User getUser(String email) { return this.graph.getOne(hg.and(hg.type(User.class), hg.eq("email", email))); } public boolean addUser(User newUser) { User u = getUser(newUser.getEmail()); if (u == null) { this.graph.add(newUser); return true; } System.out.println("User: " + u.getEmail() + " exists already"); return false; } public boolean updateUser(User user) { User u = getUser(user.getEmail()); u.setLocale(user.getLocale()); return this.graph.update(u); } public boolean removeUser(String email) { User u = getUser(email); HGHandle handle = this.graph.getHandle(u); if(u != null) { return this.graph.remove(handle); } return false; } public List<User> getAllUsers() { return this.graph.getAll(hg.typePlus(User.class)); } public List<Conference> getConferences(Date date, List<String> tags) { List<Conference> selected_confs = new ArrayList<>(); if(date == null) date = new Date(); List<Conference> confs = this.graph.getAll(hg.and(hg.type(Conference.class), hg.gt("start", date))); if(tags == null || tags.isEmpty()) { return confs; } for(Conference c : confs) { List<String> ts = c.getTags(); if(ts == null) continue; for(String tag : tags) { if(ts.contains(tag)) { selected_confs.add(c); break; } } } return selected_confs; } public boolean addConference(Conference conf) { this.graph.add(conf); return true; } public Conference getConference(String uuid) { Conference conf = this.graph.getOne(hg.and(hg.type(Conference.class), hg.eq("id", uuid))); return conf; } public void remConference(String uuid) { Conference conf = this.graph.getOne(hg.and(hg.type(Conference.class), hg.eq("id", uuid))); HGHandle handle = this.graph.getHandle(conf); this.graph.remove(handle); } }
package lia.util.timestamp; public class TimestampableStateValue<T extends Enum<T>, V> extends TimeStampedValue<V>{ private final T state; public TimestampableStateValue(T state, V value) { this(state, value, new Timestamp()); } public TimestampableStateValue(T state, V value, Timestamp timestamp) { super(value); this.state = state; } public T state() { return state; } }
package uk.org.glendale.yags.core.stats; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.Id; @Entity public class Skill { @Id private String uri; private String name; private String description; public Skill() { } /** * Gets the URI of this skill. This is used to uniquely identify * the skill. An uri will use [a-z] and hyphens only. * * @return URI of this skill. */ public String getUri() { return uri; } public void setUri(String uri) { this.uri = uri; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } }
/* * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package sockslib.utils; import java.io.IOException; import java.io.InputStream; /** * The class <code>StreamUtil</code> is a tool class for stream. * * @author Youchao Feng * @version 1.0 * @date Oct 20, 2015 2:55 PM */ public class StreamUtil { public static int checkEnd(int b) throws IOException { if (b < 0) { throw new IOException("End of stream"); } else { return b; } } public static byte[] read(InputStream inputStream, int length) throws IOException { byte[] bytes = new byte[length]; for (int i = 0; i < length; i++) { bytes[i] = (byte) checkEnd(inputStream.read()); } return bytes; } public static String readString(InputStream inputStream, int length) throws IOException { return new String(read(inputStream, length), "UTF-8"); } }
/* * (C) Copyright 2018 Boni Garcia (http://bonigarcia.github.io/) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package io.github.bonigarcia.seljup.test.docker; import static io.github.bonigarcia.seljup.BrowserType.CHROME; import static io.github.bonigarcia.seljup.BrowserType.FIREFOX; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.TestInstance.Lifecycle.PER_CLASS; import java.io.File; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInstance; import org.junit.jupiter.api.extension.RegisterExtension; import org.openqa.selenium.remote.RemoteWebDriver; import io.github.bonigarcia.seljup.DockerBrowser; import io.github.bonigarcia.seljup.SeleniumJupiter; @TestInstance(PER_CLASS) class DockerVncMixedJupiterTest { @RegisterExtension static SeleniumJupiter seleniumJupiter = new SeleniumJupiter(); File htmlFile; @BeforeAll void setup() { seleniumJupiter.getConfig().setVnc(true); seleniumJupiter.getConfig().setUsePreferences(false); } @Test void testHtmlVnc( @DockerBrowser(type = CHROME) RemoteWebDriver driver1, @DockerBrowser(type = FIREFOX) RemoteWebDriver driver2) throws InterruptedException { driver1.get("https://bonigarcia.github.io/selenium-jupiter/"); driver2.get("https://bonigarcia.github.io/selenium-jupiter/"); assertThat(driver1.getTitle(), containsString("JUnit 5 extension for Selenium")); assertThat(driver2.getTitle(), containsString("JUnit 5 extension for Selenium")); // Thread.sleep(50000); } }
package com.simibubi.create.content.logistics.block.funnel; import com.simibubi.create.foundation.data.SpecialBlockStateGen; import com.tterrag.registrate.providers.DataGenContext; import com.tterrag.registrate.providers.RegistrateBlockstateProvider; import net.minecraft.block.Block; import net.minecraft.block.BlockState; import net.minecraft.state.properties.BlockStateProperties; import net.minecraft.util.ResourceLocation; import net.minecraftforge.client.model.generators.ModelFile; public class BeltFunnelGenerator extends SpecialBlockStateGen { private String type; private ResourceLocation materialBlockTexture; public BeltFunnelGenerator(String type, ResourceLocation materialBlockTexture) { this.type = type; this.materialBlockTexture = materialBlockTexture; } @Override protected int getXRotation(BlockState state) { return 0; } @Override protected int getYRotation(BlockState state) { return horizontalAngle(state.get(BeltFunnelBlock.HORIZONTAL_FACING)) + 180; } @Override public <T extends Block> ModelFile getModel(DataGenContext<Block, T> ctx, RegistrateBlockstateProvider prov, BlockState state) { boolean powered = state.method_28500(BlockStateProperties.POWERED).orElse(false); String shapeName = state.get(BeltFunnelBlock.SHAPE) .getString(); String poweredSuffix = powered ? "_powered" : ""; String name = ctx.getName() + "_" + poweredSuffix; return prov.models() .withExistingParent(name + "_" + shapeName, prov.modLoc("block/belt_funnel/block_" + shapeName)) .texture("particle", materialBlockTexture) .texture("2", prov.modLoc("block/" + type + "_funnel_neutral")) .texture("2_1", prov.modLoc("block/" + type + "_funnel_push")) .texture("2_2", prov.modLoc("block/" + type + "_funnel_pull")) .texture("3", prov.modLoc("block/" + type + "_funnel_back")) .texture("5", prov.modLoc("block/" + type + "_funnel_tall" + poweredSuffix)) .texture("6", prov.modLoc("block/" + type + "_funnel" + poweredSuffix)) .texture("7", prov.modLoc("block/" + type + "_funnel_plating")); } }
package com.team2910.lib.autos; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import com.team1678.frc2021.DriveMotionPlanner; import com.team1678.frc2021.subsystems.Swerve; import com.team2910.lib.control.*; import com.team2910.lib.math.RigidTransform2; import com.team2910.lib.math.Rotation2; import com.team2910.lib.math.Vector2; import com.team2910.lib.math.spline.CubicHermiteSpline; import com.team2910.lib.math.spline.Spline; import com.team2910.lib.util.InterpolatingDouble; import com.team2910.lib.util.InterpolatingTreeMap; import com.team2910.lib.util.Side; import edu.wpi.first.wpilibj.Timer; public class AutonomousTrajectories { Swerve mSwerve = Swerve.getInstance(); private static final double SAMPLE_DISTANCE = 0.1; private static final double kMaxVelocity = 50.0; private static final double kMaxAccel = 50.0; private static final double kMaxDecel = 72.0; private static final double kMaxVoltage = 11.0; // Points private final RigidTransform2 testPoint1 = new RigidTransform2(new Vector2(0.0, 0.0), Rotation2.ZERO); private final RigidTransform2 testPoint2 = new RigidTransform2(new Vector2(20.0, 0.0), Rotation2.ZERO); // Trajectories private Trajectory testPath; public AutonomousTrajectories() { MaxAccelerationConstraint maxAccel = new MaxAccelerationConstraint(kMaxAccel, kMaxDecel); MaxVelocityConstraint maxVel = new MaxVelocityConstraint(kMaxVelocity); // FeedforwardConstraint maxVolts = new FeedforwardConstraint(kMaxVoltage, kMaxVelocity, kMaxAccel); TrajectoryConstraint constraints[] = {maxAccel, maxVel}; testPath = new Trajectory( new SimplePathBuilder(testPoint1.getTranslation(), testPoint1.getRotation()) .lineTo(testPoint2.getTranslation(), testPoint2.getRotation()) .build(), constraints, SAMPLE_DISTANCE ); } // Trajectory Methods public Trajectory getTestPath() { return testPath; } }
package saiba.bml.feedback; import hmi.xml.XMLFormatting; import hmi.xml.XMLNameSpace; import hmi.xml.XMLStructureAdapter; import hmi.xml.XMLTokenizer; import java.util.HashMap; import java.util.List; import java.util.Set; import saiba.bml.core.CustomAttributeHandler; import com.google.common.collect.ImmutableList; /** * Skeleton class for BMLFeedback * @author herwinvw * */ public class AbstractBMLFeedback extends XMLStructureAdapter implements BMLFeedback { protected CustomAttributeHandler caHandler = new CustomAttributeHandler(); public float getCustomFloatParameterValue(String name) { return caHandler.getCustomFloatParameterValue(name); } public void addCustomStringParameterValue(String name, String value) { caHandler.addCustomStringParameterValue(name, value); } public void addCustomFloatParameterValue(String name, float value) { caHandler.addCustomFloatParameterValue(name, value); } public String getCustomStringParameterValue(String name) { return caHandler.getCustomStringParameterValue(name); } public boolean specifiesCustomStringParameter(String name) { return caHandler.specifiesCustomStringParameter(name); } public boolean specifiesCustomFloatParameter(String name) { return caHandler.specifiesCustomFloatParameter(name); } public boolean specifiesCustomParameter(String name) { return caHandler.specifiesCustomParameter(name); } public boolean satisfiesCustomConstraint(String name, String value) { return caHandler.satisfiesCustomConstraint(name, value); } public void decodeCustomAttributes(HashMap<String, String> attrMap, XMLTokenizer tokenizer, Set<String> floatAttributes, Set<String> stringAttributes, XMLStructureAdapter beh) { caHandler.decodeCustomAttributes(attrMap, tokenizer, floatAttributes, stringAttributes, beh); } public StringBuilder appendCustomAttributeString(StringBuilder buf, XMLFormatting fmt) { return caHandler.appendCustomAttributeString(buf, fmt); } public static final double UNKNOWN_TIME = -Double.MAX_VALUE; public String toBMLFeedbackString(XMLNameSpace... xmlNamespaces) { return toBMLFeedbackString(ImmutableList.copyOf(xmlNamespaces)); } public String toBMLFeedbackString(List<XMLNameSpace> xmlNamespaceList) { StringBuilder buf = new StringBuilder(); appendXML(buf, new XMLFormatting(), xmlNamespaceList); return buf.toString(); } }
package application; public class Cat { private String name; //고양이 이름 public static final String FOOD = "고양이 사료"; //final은 상수(변하지 않는 값) //static 변수는 객체에서 공유 가능함. private static int count = 0; //생성한 고양이의 숫자 public Cat(String name) { //생성자는 return 타입이 없음(일반 메소드랑 다름) this.name = name; count++; //고양이 객체 생성 시 static count가 +1이 된다. } @Override public String toString() { return "Cat [이름 = " + name + "]"; } public static int getCount() { //String n = name; => static 메소드는 인스턴스(객체) 변수를 사용할 수 없다. return count; } }
package com.kh.onefit.train.model.vo; import java.sql.Date; import java.text.SimpleDateFormat; public class TrainerLesson { private int userNum; //pt회원번호 private String userName; //pt회원 이름 private String phone; //pt회원 전화번호 private int trNum; //트레이너 번호 private int count; //pt 남은 횟수 private int totalCount; //총 결제한 pt 개수 private int scNum; //스케줄 번호 private Date scDate; //수업일자 private Date scStart; //수업시작 시간 private Date scEnd; //수업 끝나는 시간 private String chName; private String scDateString; private String scStartString; private String scEndtString; public TrainerLesson() {} public TrainerLesson(int userNum, String userName, String phone, int trNum, int count, int totalCount, int scNum, Date scDate, Date scStart, Date scEnd, String chName, String scDateString, String scStartString, String scEndtString) { super(); this.userNum = userNum; this.userName = userName; this.phone = phone; this.trNum = trNum; this.count = count; this.totalCount = totalCount; this.scNum = scNum; this.scDate = scDate; this.scStart = scStart; this.scEnd = scEnd; this.chName = chName; this.scDateString = scDateString; this.scStartString = scStartString; this.scEndtString = scEndtString; } public int getUserNum() { return userNum; } public void setUserNum(int userNum) { this.userNum = userNum; } public String getUserName() { return userName; } public void setUserName(String userName) { this.userName = userName; } public String getPhone() { return phone; } public void setPhone(String phone) { this.phone = phone; } public int getTrNum() { return trNum; } public void setTrNum(int trNum) { this.trNum = trNum; } public int getCount() { return count; } public void setCount(int count) { this.count = count; } public int getTotalCount() { return totalCount; } public void setTotalCount(int totalCount) { this.totalCount = totalCount; } public int getScNum() { return scNum; } public void setScNum(int scNum) { this.scNum = scNum; } public Date getScDate() { return scDate; } public void setScDate(Date scDate) { this.scDate = scDate; } public Date getScStart() { return scStart; } public void setScStart(Date scStart) { this.scStart = scStart; } public Date getScEnd() { return scEnd; } public void setScEnd(Date scEnd) { this.scEnd = scEnd; } //시간을 저장하기 위해사 public String getScStartString() { return scStartString; } public void setScStartString(String scStartString) { this.scStartString = scStartString; } public String getScEndtString() { return scEndtString; } public void setScEndtString(String scEndtString) { this.scEndtString = scEndtString; } public String getScDateString() { return scDateString; } public void setScDateString(String scDateString) { this.scDateString = scDateString; } public String getChName() { return chName; } public void setChName(String chNum) { this.chName = chNum; } @Override public String toString() { return "TrainerLesson [userNum=" + userNum + ", userName=" + userName + ", phone=" + phone + ", trNum=" + trNum + ", count=" + count + ", totalCount=" + totalCount + ", scNum=" + scNum + ", scDate=" + scDate + ", scStart=" + scStart + ", scEnd=" + scEnd +", scStartString=" +scStartString+", scEndtString=" +scEndtString +", scDatetString=" +scDateString+", chName=" +chName+"]"; } }
/* * MIT License * * Copyright (c) 2022 MASES s.r.l. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ /************************************************************************************** * <auto-generated> * This code was generated from a template using JCOReflector * * Manual changes to this file may cause unexpected behavior in your application. * Manual changes to this file will be overwritten if the code is regenerated. * </auto-generated> *************************************************************************************/ package microsoft.entityframeworkcore.sqlserver.query.internal; import org.mases.jcobridge.*; import org.mases.jcobridge.netreflection.*; import java.util.ArrayList; // Import section import microsoft.entityframeworkcore.query.RelationalCompiledQueryCacheKeyGenerator; import microsoft.entityframeworkcore.query.CompiledQueryCacheKeyGeneratorDependencies; import microsoft.entityframeworkcore.query.RelationalCompiledQueryCacheKeyGeneratorDependencies; import microsoft.entityframeworkcore.sqlserver.storage.internal.ISqlServerConnection; import microsoft.entityframeworkcore.sqlserver.storage.internal.ISqlServerConnectionImplementation; import system.linq.expressions.Expression; /** * The base .NET class managing Microsoft.EntityFrameworkCore.SqlServer.Query.Internal.SqlServerCompiledQueryCacheKeyGenerator, Microsoft.EntityFrameworkCore.SqlServer, Version=6.0.3.0, Culture=neutral, PublicKeyToken=adb9793829ddae60. * <p> * * See: <a href="https://docs.microsoft.com/en-us/dotnet/api/Microsoft.EntityFrameworkCore.SqlServer.Query.Internal.SqlServerCompiledQueryCacheKeyGenerator" target="_top">https://docs.microsoft.com/en-us/dotnet/api/Microsoft.EntityFrameworkCore.SqlServer.Query.Internal.SqlServerCompiledQueryCacheKeyGenerator</a> */ public class SqlServerCompiledQueryCacheKeyGenerator extends RelationalCompiledQueryCacheKeyGenerator { /** * Fully assembly qualified name: Microsoft.EntityFrameworkCore.SqlServer, Version=6.0.3.0, Culture=neutral, PublicKeyToken=adb9793829ddae60 */ public static final String assemblyFullName = "Microsoft.EntityFrameworkCore.SqlServer, Version=6.0.3.0, Culture=neutral, PublicKeyToken=adb9793829ddae60"; /** * Assembly name: Microsoft.EntityFrameworkCore.SqlServer */ public static final String assemblyShortName = "Microsoft.EntityFrameworkCore.SqlServer"; /** * Qualified class name: Microsoft.EntityFrameworkCore.SqlServer.Query.Internal.SqlServerCompiledQueryCacheKeyGenerator */ public static final String className = "Microsoft.EntityFrameworkCore.SqlServer.Query.Internal.SqlServerCompiledQueryCacheKeyGenerator"; static JCOBridge bridge = JCOBridgeInstance.getInstance(assemblyFullName); /** * The type managed from JCOBridge. See {@link JCType} */ public static JCType classType = createType(); static JCEnum enumInstance = null; JCObject classInstance = null; static JCType createType() { try { String classToCreate = className + ", " + (JCOReflector.getUseFullAssemblyName() ? assemblyFullName : assemblyShortName); if (JCOReflector.getDebug()) JCOReflector.writeLog("Creating %s", classToCreate); JCType typeCreated = bridge.GetType(classToCreate); if (JCOReflector.getDebug()) JCOReflector.writeLog("Created: %s", (typeCreated != null) ? typeCreated.toString() : "Returned null value"); return typeCreated; } catch (JCException e) { JCOReflector.writeLog(e); return null; } } void addReference(String ref) throws Throwable { try { bridge.AddReference(ref); } catch (JCNativeException jcne) { throw translateException(jcne); } } /** * Internal constructor. Use with caution */ public SqlServerCompiledQueryCacheKeyGenerator(java.lang.Object instance) throws Throwable { super(instance); if (instance instanceof JCObject) { classInstance = (JCObject) instance; } else throw new Exception("Cannot manage object, it is not a JCObject"); } public String getJCOAssemblyName() { return assemblyFullName; } public String getJCOClassName() { return className; } public String getJCOObjectName() { return className + ", " + (JCOReflector.getUseFullAssemblyName() ? assemblyFullName : assemblyShortName); } public java.lang.Object getJCOInstance() { return classInstance; } public void setJCOInstance(JCObject instance) { classInstance = instance; super.setJCOInstance(classInstance); } public JCType getJCOType() { return classType; } /** * Try to cast the {@link IJCOBridgeReflected} instance into {@link SqlServerCompiledQueryCacheKeyGenerator}, a cast assert is made to check if types are compatible. * @param from {@link IJCOBridgeReflected} instance to be casted * @return {@link SqlServerCompiledQueryCacheKeyGenerator} instance * @throws java.lang.Throwable in case of error during cast operation */ public static SqlServerCompiledQueryCacheKeyGenerator cast(IJCOBridgeReflected from) throws Throwable { NetType.AssertCast(classType, from); return new SqlServerCompiledQueryCacheKeyGenerator(from.getJCOInstance()); } // Constructors section public SqlServerCompiledQueryCacheKeyGenerator() throws Throwable { } public SqlServerCompiledQueryCacheKeyGenerator(CompiledQueryCacheKeyGeneratorDependencies dependencies, RelationalCompiledQueryCacheKeyGeneratorDependencies relationalDependencies, ISqlServerConnection sqlServerConnection) throws Throwable, system.NotSupportedException, system.ArgumentException, system.ArgumentNullException, system.PlatformNotSupportedException, system.IndexOutOfRangeException, system.InvalidOperationException, system.ArgumentOutOfRangeException, system.OutOfMemoryException, system.FormatException { try { // add reference to assemblyName.dll file addReference(JCOReflector.getUseFullAssemblyName() ? assemblyFullName : assemblyShortName); setJCOInstance((JCObject)classType.NewObject(dependencies == null ? null : dependencies.getJCOInstance(), relationalDependencies == null ? null : relationalDependencies.getJCOInstance(), sqlServerConnection == null ? null : sqlServerConnection.getJCOInstance())); } catch (JCNativeException jcne) { throw translateException(jcne); } } // Methods section public NetObject GenerateCacheKey(Expression query, boolean async) throws Throwable, system.NotSupportedException, system.ArgumentException, system.ArgumentNullException, system.PlatformNotSupportedException, system.IndexOutOfRangeException, system.InvalidOperationException, system.ArgumentOutOfRangeException, system.OutOfMemoryException, system.FormatException, system.ObjectDisposedException { if (classInstance == null) throw new UnsupportedOperationException("classInstance is null."); try { JCObject objGenerateCacheKey = (JCObject)classInstance.Invoke("GenerateCacheKey", query == null ? null : query.getJCOInstance(), async); return new NetObject(objGenerateCacheKey); } catch (JCNativeException jcne) { throw translateException(jcne); } } // Properties section // Instance Events section }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/bigquery/connection/v1/connection.proto package com.google.cloud.bigquery.connection.v1; /** * <pre> * Authentication method for Amazon Web Services (AWS) that uses Google owned * AWS IAM user's access key to assume into customer's AWS IAM Role. * </pre> * * Protobuf type {@code google.cloud.bigquery.connection.v1.AwsCrossAccountRole} */ public final class AwsCrossAccountRole extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.bigquery.connection.v1.AwsCrossAccountRole) AwsCrossAccountRoleOrBuilder { private static final long serialVersionUID = 0L; // Use AwsCrossAccountRole.newBuilder() to construct. private AwsCrossAccountRole(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AwsCrossAccountRole() { iamRoleId_ = ""; iamUserId_ = ""; externalId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new AwsCrossAccountRole(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private AwsCrossAccountRole( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); iamRoleId_ = s; break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); iamUserId_ = s; break; } case 26: { java.lang.String s = input.readStringRequireUtf8(); externalId_ = s; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.bigquery.connection.v1.ConnectionOuterClass.internal_static_google_cloud_bigquery_connection_v1_AwsCrossAccountRole_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.bigquery.connection.v1.ConnectionOuterClass.internal_static_google_cloud_bigquery_connection_v1_AwsCrossAccountRole_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole.class, com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole.Builder.class); } public static final int IAM_ROLE_ID_FIELD_NUMBER = 1; private volatile java.lang.Object iamRoleId_; /** * <pre> * The user’s AWS IAM Role that trusts the Google-owned AWS IAM user * Connection. * </pre> * * <code>string iam_role_id = 1;</code> * @return The iamRoleId. */ @java.lang.Override public java.lang.String getIamRoleId() { java.lang.Object ref = iamRoleId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); iamRoleId_ = s; return s; } } /** * <pre> * The user’s AWS IAM Role that trusts the Google-owned AWS IAM user * Connection. * </pre> * * <code>string iam_role_id = 1;</code> * @return The bytes for iamRoleId. */ @java.lang.Override public com.google.protobuf.ByteString getIamRoleIdBytes() { java.lang.Object ref = iamRoleId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); iamRoleId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int IAM_USER_ID_FIELD_NUMBER = 2; private volatile java.lang.Object iamUserId_; /** * <pre> * Output only. Google-owned AWS IAM User for a Connection. * </pre> * * <code>string iam_user_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The iamUserId. */ @java.lang.Override public java.lang.String getIamUserId() { java.lang.Object ref = iamUserId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); iamUserId_ = s; return s; } } /** * <pre> * Output only. Google-owned AWS IAM User for a Connection. * </pre> * * <code>string iam_user_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The bytes for iamUserId. */ @java.lang.Override public com.google.protobuf.ByteString getIamUserIdBytes() { java.lang.Object ref = iamUserId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); iamUserId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int EXTERNAL_ID_FIELD_NUMBER = 3; private volatile java.lang.Object externalId_; /** * <pre> * Output only. A Google-generated id for representing Connection’s identity in AWS. * External Id is also used for preventing the Confused Deputy Problem. See * https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-user_externalid.html * </pre> * * <code>string external_id = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The externalId. */ @java.lang.Override public java.lang.String getExternalId() { java.lang.Object ref = externalId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); externalId_ = s; return s; } } /** * <pre> * Output only. A Google-generated id for representing Connection’s identity in AWS. * External Id is also used for preventing the Confused Deputy Problem. See * https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-user_externalid.html * </pre> * * <code>string external_id = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The bytes for externalId. */ @java.lang.Override public com.google.protobuf.ByteString getExternalIdBytes() { java.lang.Object ref = externalId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); externalId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(iamRoleId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, iamRoleId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(iamUserId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, iamUserId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(externalId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, externalId_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(iamRoleId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, iamRoleId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(iamUserId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, iamUserId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(externalId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, externalId_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole)) { return super.equals(obj); } com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole other = (com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole) obj; if (!getIamRoleId() .equals(other.getIamRoleId())) return false; if (!getIamUserId() .equals(other.getIamUserId())) return false; if (!getExternalId() .equals(other.getExternalId())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + IAM_ROLE_ID_FIELD_NUMBER; hash = (53 * hash) + getIamRoleId().hashCode(); hash = (37 * hash) + IAM_USER_ID_FIELD_NUMBER; hash = (53 * hash) + getIamUserId().hashCode(); hash = (37 * hash) + EXTERNAL_ID_FIELD_NUMBER; hash = (53 * hash) + getExternalId().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Authentication method for Amazon Web Services (AWS) that uses Google owned * AWS IAM user's access key to assume into customer's AWS IAM Role. * </pre> * * Protobuf type {@code google.cloud.bigquery.connection.v1.AwsCrossAccountRole} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.connection.v1.AwsCrossAccountRole) com.google.cloud.bigquery.connection.v1.AwsCrossAccountRoleOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.bigquery.connection.v1.ConnectionOuterClass.internal_static_google_cloud_bigquery_connection_v1_AwsCrossAccountRole_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.bigquery.connection.v1.ConnectionOuterClass.internal_static_google_cloud_bigquery_connection_v1_AwsCrossAccountRole_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole.class, com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole.Builder.class); } // Construct using com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); iamRoleId_ = ""; iamUserId_ = ""; externalId_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.bigquery.connection.v1.ConnectionOuterClass.internal_static_google_cloud_bigquery_connection_v1_AwsCrossAccountRole_descriptor; } @java.lang.Override public com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole getDefaultInstanceForType() { return com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole.getDefaultInstance(); } @java.lang.Override public com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole build() { com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole buildPartial() { com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole result = new com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole(this); result.iamRoleId_ = iamRoleId_; result.iamUserId_ = iamUserId_; result.externalId_ = externalId_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole) { return mergeFrom((com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole other) { if (other == com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole.getDefaultInstance()) return this; if (!other.getIamRoleId().isEmpty()) { iamRoleId_ = other.iamRoleId_; onChanged(); } if (!other.getIamUserId().isEmpty()) { iamUserId_ = other.iamUserId_; onChanged(); } if (!other.getExternalId().isEmpty()) { externalId_ = other.externalId_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object iamRoleId_ = ""; /** * <pre> * The user’s AWS IAM Role that trusts the Google-owned AWS IAM user * Connection. * </pre> * * <code>string iam_role_id = 1;</code> * @return The iamRoleId. */ public java.lang.String getIamRoleId() { java.lang.Object ref = iamRoleId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); iamRoleId_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The user’s AWS IAM Role that trusts the Google-owned AWS IAM user * Connection. * </pre> * * <code>string iam_role_id = 1;</code> * @return The bytes for iamRoleId. */ public com.google.protobuf.ByteString getIamRoleIdBytes() { java.lang.Object ref = iamRoleId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); iamRoleId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The user’s AWS IAM Role that trusts the Google-owned AWS IAM user * Connection. * </pre> * * <code>string iam_role_id = 1;</code> * @param value The iamRoleId to set. * @return This builder for chaining. */ public Builder setIamRoleId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } iamRoleId_ = value; onChanged(); return this; } /** * <pre> * The user’s AWS IAM Role that trusts the Google-owned AWS IAM user * Connection. * </pre> * * <code>string iam_role_id = 1;</code> * @return This builder for chaining. */ public Builder clearIamRoleId() { iamRoleId_ = getDefaultInstance().getIamRoleId(); onChanged(); return this; } /** * <pre> * The user’s AWS IAM Role that trusts the Google-owned AWS IAM user * Connection. * </pre> * * <code>string iam_role_id = 1;</code> * @param value The bytes for iamRoleId to set. * @return This builder for chaining. */ public Builder setIamRoleIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); iamRoleId_ = value; onChanged(); return this; } private java.lang.Object iamUserId_ = ""; /** * <pre> * Output only. Google-owned AWS IAM User for a Connection. * </pre> * * <code>string iam_user_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The iamUserId. */ public java.lang.String getIamUserId() { java.lang.Object ref = iamUserId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); iamUserId_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Output only. Google-owned AWS IAM User for a Connection. * </pre> * * <code>string iam_user_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The bytes for iamUserId. */ public com.google.protobuf.ByteString getIamUserIdBytes() { java.lang.Object ref = iamUserId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); iamUserId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Output only. Google-owned AWS IAM User for a Connection. * </pre> * * <code>string iam_user_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @param value The iamUserId to set. * @return This builder for chaining. */ public Builder setIamUserId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } iamUserId_ = value; onChanged(); return this; } /** * <pre> * Output only. Google-owned AWS IAM User for a Connection. * </pre> * * <code>string iam_user_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return This builder for chaining. */ public Builder clearIamUserId() { iamUserId_ = getDefaultInstance().getIamUserId(); onChanged(); return this; } /** * <pre> * Output only. Google-owned AWS IAM User for a Connection. * </pre> * * <code>string iam_user_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @param value The bytes for iamUserId to set. * @return This builder for chaining. */ public Builder setIamUserIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); iamUserId_ = value; onChanged(); return this; } private java.lang.Object externalId_ = ""; /** * <pre> * Output only. A Google-generated id for representing Connection’s identity in AWS. * External Id is also used for preventing the Confused Deputy Problem. See * https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-user_externalid.html * </pre> * * <code>string external_id = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The externalId. */ public java.lang.String getExternalId() { java.lang.Object ref = externalId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); externalId_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Output only. A Google-generated id for representing Connection’s identity in AWS. * External Id is also used for preventing the Confused Deputy Problem. See * https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-user_externalid.html * </pre> * * <code>string external_id = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The bytes for externalId. */ public com.google.protobuf.ByteString getExternalIdBytes() { java.lang.Object ref = externalId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); externalId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Output only. A Google-generated id for representing Connection’s identity in AWS. * External Id is also used for preventing the Confused Deputy Problem. See * https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-user_externalid.html * </pre> * * <code>string external_id = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @param value The externalId to set. * @return This builder for chaining. */ public Builder setExternalId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } externalId_ = value; onChanged(); return this; } /** * <pre> * Output only. A Google-generated id for representing Connection’s identity in AWS. * External Id is also used for preventing the Confused Deputy Problem. See * https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-user_externalid.html * </pre> * * <code>string external_id = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return This builder for chaining. */ public Builder clearExternalId() { externalId_ = getDefaultInstance().getExternalId(); onChanged(); return this; } /** * <pre> * Output only. A Google-generated id for representing Connection’s identity in AWS. * External Id is also used for preventing the Confused Deputy Problem. See * https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-user_externalid.html * </pre> * * <code>string external_id = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @param value The bytes for externalId to set. * @return This builder for chaining. */ public Builder setExternalIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); externalId_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.connection.v1.AwsCrossAccountRole) } // @@protoc_insertion_point(class_scope:google.cloud.bigquery.connection.v1.AwsCrossAccountRole) private static final com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole(); } public static com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AwsCrossAccountRole> PARSER = new com.google.protobuf.AbstractParser<AwsCrossAccountRole>() { @java.lang.Override public AwsCrossAccountRole parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new AwsCrossAccountRole(input, extensionRegistry); } }; public static com.google.protobuf.Parser<AwsCrossAccountRole> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AwsCrossAccountRole> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.bigquery.connection.v1.AwsCrossAccountRole getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
// Targeted by JavaCPP version 1.5-SNAPSHOT: DO NOT EDIT THIS FILE package org.bytedeco.caffe; import java.nio.*; import org.bytedeco.javacpp.*; import org.bytedeco.javacpp.annotation.*; import org.bytedeco.opencv.opencv_core.*; import static org.bytedeco.opencv.global.opencv_core.*; import org.bytedeco.opencv.opencv_imgproc.*; import static org.bytedeco.opencv.global.opencv_imgproc.*; import static org.bytedeco.opencv.global.opencv_imgcodecs.*; import org.bytedeco.opencv.opencv_videoio.*; import static org.bytedeco.opencv.global.opencv_videoio.*; import org.bytedeco.opencv.opencv_highgui.*; import static org.bytedeco.opencv.global.opencv_highgui.*; import org.bytedeco.hdf5.*; import static org.bytedeco.hdf5.global.hdf5.*; import static org.bytedeco.openblas.global.openblas_nolapack.*; import static org.bytedeco.openblas.global.openblas.*; import static org.bytedeco.caffe.global.caffe.*; /** * \brief Computes {@code y = |x| } * * @param bottom input Blob vector (length 1) * -# {@code (N \times C \times H \times W) } * the inputs {@code x } * @param top output Blob vector (length 1) * -# {@code (N \times C \times H \times W) } * the computed outputs {@code y = |x| } */ @Name("caffe::AbsValLayer<float>") @Properties(inherit = org.bytedeco.caffe.presets.caffe.class) public class FloatAbsValLayer extends FloatNeuronLayer { static { Loader.load(); } /** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */ public FloatAbsValLayer(Pointer p) { super(p); } public FloatAbsValLayer(@Const @ByRef LayerParameter param) { super((Pointer)null); allocate(param); } private native void allocate(@Const @ByRef LayerParameter param); @Virtual public native void LayerSetUp(@Const @ByRef FloatBlobVector bottom, @Const @ByRef FloatBlobVector top); @Virtual public native @Const({false, false, true}) @Cast("const char*") BytePointer type(); @Virtual public native @Const({false, false, true}) int ExactNumBottomBlobs(); @Virtual public native @Const({false, false, true}) int ExactNumTopBlobs(); @Virtual protected native void Forward_cpu(@Const @ByRef FloatBlobVector bottom, @Const @ByRef FloatBlobVector top); @Virtual protected native void Forward_gpu(@Const @ByRef FloatBlobVector bottom, @Const @ByRef FloatBlobVector top); @Virtual protected native void Backward_cpu(@Const @ByRef FloatBlobVector top, @Const @ByRef BoolVector propagate_down, @Const @ByRef FloatBlobVector bottom); @Virtual protected native void Backward_gpu(@Const @ByRef FloatBlobVector top, @Const @ByRef BoolVector propagate_down, @Const @ByRef FloatBlobVector bottom); }
/** * Copyright 2016-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the * Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0/ * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.voicebase.gateways.awsconnect.forward; import static com.voicebase.gateways.awsconnect.VoiceBaseAttributeExtractor.getVoicebaseAttributeName; import static org.junit.Assert.assertFalse; import com.amazonaws.services.s3.model.AmazonS3Exception; import com.voicebase.gateways.awsconnect.lambda.Lambda; import java.util.HashMap; import java.util.Map; import org.junit.Assert; import org.junit.Test; public class RecordingForwarderTest { private static Map<String, Object> awsConfigStub() { HashMap<String, Object> awsAttr = new HashMap<>(); awsAttr.put(Lambda.KEY_EXTERNAL_ID, (Object) "externalId"); HashMap<String, String> vbAttr = new HashMap<>(); awsAttr.put(Lambda.KEY_ATTRIBUTES, vbAttr); return awsAttr; } @SuppressWarnings("unchecked") private static Map<String, String> getVbAttributes(Map<String, Object> awsInput) { return (Map<String, String>) awsInput.get(Lambda.KEY_ATTRIBUTES); } @Test public void testIfVoiceBaseEnableFlowVariableIsHonored() { RecordingForwarder forwarder = new RecordingForwarder(); String flowVariable = getVoicebaseAttributeName(Lambda.VB_ATTR_ENABLE); Map<String, Object> awsAttr = awsConfigStub(); Assert.assertTrue( "Should forward request if respective flow variable isn't set", forwarder.shouldProcess(awsAttr)); awsAttr = awsConfigStub(); Map<String, String> vbAttr = getVbAttributes(awsAttr); vbAttr.put(flowVariable, "1"); Assert.assertTrue( "Should forward request if respective flow variable is set to 1", forwarder.shouldProcess(awsAttr)); awsAttr = awsConfigStub(); vbAttr = getVbAttributes(awsAttr); vbAttr.put(flowVariable, "0"); Assert.assertFalse( "Should not forward request if respective flow variable is set to 0", forwarder.shouldProcess(awsAttr)); } @Test public void testVerifyAudioAvailability() { Map<String, Object> ctrAsMap = awsConfigStub(); Map<String, Object> attr = (Map<String, Object>) ctrAsMap.get(Lambda.KEY_ATTRIBUTES); attr.put("x-voicebase_timesToFailAudioExists", 3); RecordingForwarder forwarder = new RecordingForwarder(); assertFalse(forwarder.verifyAudioAvailability(ctrAsMap, "alfa", "anything")); forwarder.setRedeliveryCount(ctrAsMap, 1); assertFalse(forwarder.verifyAudioAvailability(ctrAsMap, "alfa", "anything")); forwarder.setRedeliveryCount(ctrAsMap, 2); assertFalse(forwarder.verifyAudioAvailability(ctrAsMap, "alfa", "anything")); forwarder.setRedeliveryCount(ctrAsMap, 4); try { forwarder.verifyAudioAvailability(ctrAsMap, "alfa", "anything"); } catch (AmazonS3Exception se) { } } }
package org.tain.domain.chun; import java.sql.Timestamp; import java.time.LocalDateTime; import java.util.Date; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import org.hibernate.annotations.CreationTimestamp; import org.hibernate.annotations.UpdateTimestamp; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; @Entity @Data @NoArgsConstructor @JsonIgnoreProperties(value = { "" , "create_date" , "update_date" , "job_date" , "work_date" }) public class Tip { @Id @GeneratedValue(strategy = GenerationType.AUTO) private Long id; private String content; @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "sent_no") @JsonIgnore private Sent sent; @JsonIgnore //@JsonFormat(shape = Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss", timezone = "Asia/Seoul") //@DateTimeFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss") @CreationTimestamp private LocalDateTime createdDate; @JsonIgnore //@JsonFormat(shape = Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss", timezone = "Asia/Seoul") @UpdateTimestamp private Timestamp updatedDate; @JsonIgnore //@Temporal(TemporalType.TIMESTAMP) //@JsonFormat(shape = Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss", timezone = "Asia/Seoul") @UpdateTimestamp private Date jobDate; @Builder public Tip( String content, Sent sent ) { this.content = content; this.sent = sent; } }
package io.imunity.furms.rest.openapi; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.stereotype.Component; import io.swagger.v3.oas.annotations.OpenAPIDefinition; import io.swagger.v3.oas.models.Components; import io.swagger.v3.oas.models.OpenAPI; import io.swagger.v3.oas.models.info.Info; import io.swagger.v3.oas.models.info.License; import io.swagger.v3.oas.models.security.SecurityRequirement; import io.swagger.v3.oas.models.security.SecurityScheme; @OpenAPIDefinition @Component class OpenAPIConfiguration { @Bean OpenAPI customOpenAPI(@Value("${app.version:unknown}") String version) { SecurityScheme cidpSecScheme = new SecurityScheme() .name(APIDocConstants.CIDP_SECURITY_SCHEME) .type(SecurityScheme.Type.HTTP).scheme("BASIC") .description("Pre-shared secret between Fenix central IdP and FURMS shall be used " + "for authentication and authorization of each request with " + "HTTP Basic encoding in HTTP header"); SecurityScheme apiKeyScheme = new SecurityScheme() .name(APIDocConstants.FURMS_SECURITY_SCHEME) .type(SecurityScheme.Type.HTTP).scheme("BASIC") .description("Pre-shared token generated by FURMS shall be used " + "for authentication and authorization of each request, " + "together with owner's user id presented next to the generated token."); return new OpenAPI().info(new Info().title("FURMS REST API").version(version) .license(new License().name("BSD").url( "https://opensource.org/licenses/BSD-2-Clause"))) .addSecurityItem(new SecurityRequirement() .addList(APIDocConstants.CIDP_SECURITY_SCHEME)) .addSecurityItem(new SecurityRequirement() .addList(APIDocConstants.FURMS_SECURITY_SCHEME)) .components(new Components().addSecuritySchemes( APIDocConstants.CIDP_SECURITY_SCHEME, cidpSecScheme).addSecuritySchemes( APIDocConstants.FURMS_SECURITY_SCHEME, apiKeyScheme)); } }
package com.sdl.selenium.extjs3.form; import com.sdl.selenium.InputData; import com.sdl.selenium.extjs3.window.Window; import com.sdl.selenium.TestBase; import com.sdl.selenium.web.SearchType; import org.testng.annotations.*; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertTrue; public class ComboBoxIntegrationTest extends TestBase { private Window comboBoxWindow = new Window("ComboBoxWindow"); private ComboBox comboBox = new ComboBox("comboBox", comboBoxWindow); @BeforeClass public void startTest() { driver.get(InputData.EXTJS_URL); showComponent("ComboBox"); } @AfterClass public void endTests() { comboBoxWindow.close(); } @Test public void testEditorType() { assertTrue(comboBox.select("Romanian")); assertEquals(comboBox.getValue(), "Romanian"); } @Test public void searchTypeSelect() { assertTrue(comboBox.select("Bulgar", SearchType.STARTS_WITH)); assertEquals(comboBox.getValue(), "Bulgarian"); assertTrue(comboBox.select("United States", SearchType.CONTAINS)); assertEquals(comboBox.getValue(), "English(United States)"); } }
package com.itemanalysis.psychometrics.irt.estimation; import static junit.framework.Assert.assertEquals; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import org.apache.commons.io.FileUtils; import org.junit.Test; import util.FileUploadUtil; import com.itemanalysis.psychometrics.data.VariableName; import com.itemanalysis.psychometrics.distribution.UserSuppliedDistributionApproximation; import com.itemanalysis.psychometrics.irt.estimation.IrtExaminee; import com.itemanalysis.psychometrics.irt.model.Irm3PL; import com.itemanalysis.psychometrics.irt.model.IrmGPCM; import com.itemanalysis.psychometrics.irt.model.IrmGPCM2; import com.itemanalysis.psychometrics.irt.model.ItemResponseModel; public class IrtExamineeTest { public void readLsat7Data() { lsat7 = new byte[32][5]; try { File f = FileUtils.toFile(this.getClass().getResource("../../testdata/lsat7.txt")); BufferedReader br = new BufferedReader(new FileReader(f)); String line = ""; String[] s = null; int row = 0; br.readLine();// eliminate column names by skipping first row while ((line = br.readLine()) != null) { s = line.split(","); for (int j = 0; j < s.length; j++) { lsat7[row][j] = Byte.parseByte(s[j]); } row++; } br.close(); } catch (IOException ex) { ex.printStackTrace(); } } public void readMixedFormat() { mixedFormatData = new byte[20][42]; try { File f = FileUtils.toFile(this.getClass().getResource("/testdata/mixed-format.txt")); BufferedReader br = new BufferedReader(new FileReader(f)); String line = ""; String[] s = null; int row = 0; while ((line = br.readLine()) != null) { s = line.split(","); for (int j = 0; j < s.length; j++) { mixedFormatData[row][j] = Byte.parseByte(s[j]); } row++; } br.close(); } catch (IOException ex) { ex.printStackTrace(); } } @Test public void maximumLikelihoodTest2PL() { // System.out.println(); System.out.println("MLE test: LSAT7."); if (lsat7 == null) readLsat7Data(); int n = aParamLSAT7.length; int nPeople = lsat7.length; ItemResponseModel[] irmArray = new ItemResponseModel[n]; ItemResponseModel irm; // create item response models objects VariableName iName = null; for (int i = 0; i < n; i++) { String name = "V" + i; iName = new VariableName(name); irmArray[i] = new Irm3PL(aParamLSAT7[i], bParamLSAT7[i], 1.702); irmArray[i].setName(iName); } IrtExaminee iVec = new IrtExaminee(irmArray); // estimate ability scores for each response pattern double mle = 0.0; double se = 0.0; for (int j = 0; j < nPeople; j++) { iVec.setResponseVector(lsat7[j]); mle = iVec.maximumLikelihoodEstimate(minTheta, maxTheta); se = iVec.mleStandardErrorAt(mle); // System.out.println("MLE" + j + ": " + mle + " SE: " + se); assertEquals(" MLE Test" + j, trueMLE_LSAT7[j], mle, 1e-4); } } public IrtExaminee maximumLikelihoodTest2PLToContext() { // System.out.println(); System.out.println("MLE test: LSAT7."); if (lsat7 == null) lsat7 = FileUploadUtil.readTestData("lsat7.txt"); int n = aParamLSAT7.length; int nPeople = lsat7.length; ItemResponseModel[] irmArray = new ItemResponseModel[n]; ItemResponseModel irm; // create item response models objects VariableName iName = null; for (int i = 0; i < n; i++) { String name = "V" + i; iName = new VariableName(name); irmArray[i] = new Irm3PL(aParamLSAT7[i], bParamLSAT7[i], 1.702); irmArray[i].setName(iName); } IrtExaminee iVec = new IrtExaminee(irmArray); // estimate ability scores for each response pattern double mle = 0.0; double se = 0.0; for (int j = 0; j < nPeople; j++) { iVec.setResponseVector(lsat7[j]); mle = iVec.maximumLikelihoodEstimate(minTheta, maxTheta); se = iVec.mleStandardErrorAt(mle); // System.out.println("MLE" + j + ": " + mle + " SE: " + se); } return iVec; } @Test public void eapTest2PL() { System.out.println(); System.out.println("EAP test: LSAT7."); if (lsat7 == null) readLsat7Data(); int n = aParamLSAT7.length; int nPeople = lsat7.length; ItemResponseModel[] irmArray = new ItemResponseModel[n]; ItemResponseModel irm; VariableName iName = null; for (int i = 0; i < n; i++) { String name = "V" + i; iName = new VariableName(name); irmArray[i] = new Irm3PL(aParamLSAT7[i], bParamLSAT7[i], 1.702); irmArray[i].setName(iName); } IrtExaminee iVec = new IrtExaminee(irmArray); double eap = 0.0; double se = 0.0; for (int j = 0; j < nPeople; j++) { iVec.setResponseVector(lsat7[j]); eap = iVec.eapEstimate(0.0, 1.0, -4.0, 4.0, 25); se = iVec.eapStandardErrorAt(eap); System.out.println("EAP" + j + ": " + eap + " SE: " + se); assertEquals(" EAP Test" + j, trueEAP_LSAT7[j], eap, 1e-3); } } @Test public void mapTest2PL() { // System.out.println(); System.out.println("MAP test: LSAT7."); if (lsat7 == null) readLsat7Data(); int n = aParamLSAT7.length; int nPeople = lsat7.length; ItemResponseModel[] irmArray = new ItemResponseModel[n]; ItemResponseModel irm; VariableName iName = null; for (int i = 0; i < n; i++) { String name = "V" + i; iName = new VariableName(name); irmArray[i] = new Irm3PL(aParamLSAT7[i], bParamLSAT7[i], 1.702); irmArray[i].setName(iName); } IrtExaminee iVec = new IrtExaminee(irmArray); double map = 0.0; double se = 0.0; for (int j = 0; j < nPeople; j++) { iVec.setResponseVector(lsat7[j]); map = iVec.mapEstimate(0.0, 1.0, -4.0, 4.0); se = iVec.mapStandardErrorAt(map); // System.out.println("MAP" + j + ": " + map + " SE: " + se); assertEquals(" MAP Test" + j, trueMAP_LSAT7[j], map, 1e-4); } } @Test public void jmleTest() { // System.out.println(); System.out.println("JMLE PCF test: LSAT7."); if (lsat7 == null) readLsat7Data(); int n = bParam_jmle.length; int nPeople = lsat7.length; ItemResponseModel[] irmArray = new ItemResponseModel[n]; ItemResponseModel irm; VariableName iName = null; for (int i = 0; i < n; i++) { String name = "V" + i; iName = new VariableName(name); irmArray[i] = new Irm3PL(bParam_jmle[i], 1.0); irmArray[i].setName(iName); } IrtExaminee iVec = new IrtExaminee(irmArray); double pcf = 0.0; double se = 0.0; for (int j = 0; j < nPeople; j++) { iVec.setResponseVector(lsat7[j]); pcf = iVec.pcfEstimate(50, 0.001, 0.3); se = iVec.pcfStandardErrorAt(pcf); // System.out.println("PCF" + j + ": " + pcf + "SE: " + se); assertEquals(" JMLE Test" + j, trueJMLE_LSAT7[j], pcf, 1e-4); } } @Test public void mixedFormatTestMLE() { System.out.println(); System.out.println("MLE test: mixed-format."); if (mixedFormatData == null) readMixedFormat(); int n = aParam_mixed.length; int nPeople = mixedFormatData.length; ItemResponseModel[] irmArray = new ItemResponseModel[n]; ItemResponseModel irm; VariableName iName = null; for (int i = 0; i < n; i++) { String name = "V" + i; iName = new VariableName(name); if (i < 40) { irmArray[i] = new Irm3PL(aParam_mixed[i], bParam_mixed[i], cParam_mixed[i], 1.7); } else { irmArray[i] = new IrmGPCM(aParam_mixed[i], stepParam_mixed[i - 40], 1.0); } irmArray[i].setName(iName); } IrtExaminee iVec = new IrtExaminee(irmArray); double mle = 0.0; double se = 0.0; for (int j = 0; j < nPeople; j++) { iVec.setResponseVector(mixedFormatData[j]); mle = iVec.maximumLikelihoodEstimate(-6.0, 6.0); se = iVec.mleStandardErrorAt(mle); System.out.println("MLE" + j + ": " + mle + " True MLE: " + trueMLE_mixed[j] + " SE: " + se); assertEquals(" MLE Test" + j, trueMLE_mixed[j], mle, 1e-3); } } @Test public void mixedFormatTestEAP() { System.out.println(); System.out.println("EAP test: mixed-format."); if (mixedFormatData == null) readMixedFormat(); int n = aParam_mixed.length; int nPeople = mixedFormatData.length; ItemResponseModel[] irmArray = new ItemResponseModel[n]; ItemResponseModel irm; VariableName iName = null; for (int i = 0; i < n; i++) { String name = "V" + i; iName = new VariableName(name); if (i < 40) { irmArray[i] = new Irm3PL(aParam_mixed[i], bParam_mixed[i], cParam_mixed[i], 1.7); } else { irmArray[i] = new IrmGPCM(aParam_mixed[i], stepParam_mixed[i - 40], 1.0); } irmArray[i].setName(iName); } IrtExaminee iVec = new IrtExaminee(irmArray); double eap = 0.0; for (int j = 0; j < nPeople; j++) { iVec.setResponseVector(mixedFormatData[j]); eap = iVec.eapEstimate(0.0, 1.0, -6.0, 6.0, 40);// ICL defaults System.out.println("EAP" + j + ": " + eap + " True EAP: " + trueEAP_mixed[j]); // Not sure if ICL is using final quadrature or normal distribution // values when computing EAP. // May be a reason for low accuracy of the results assertEquals(" EAP Test" + j, trueEAP_mixed[j], eap, 1e-4); } } @Test public void parscaleTest() { // System.out.println(); System.out.println("EAP test: parscale."); int n = aparam_parscale.length; int nPeople = parscale_data.length; ItemResponseModel[] irmArray = new ItemResponseModel[n]; ItemResponseModel irm; double[] scoring = { 1, 2, 3, 4 }; VariableName iName = null; for (int i = 0; i < n; i++) { String name = "V" + i; iName = new VariableName(name); irmArray[i] = new IrmGPCM2(aparam_parscale[i], bparam_parscale[i], step_Parscale[i], 1.7); irmArray[i].setName(iName); irmArray[i].setScoreWeights(scoring); } IrtExaminee iVec = new IrtExaminee(irmArray); UserSuppliedDistributionApproximation dist = new UserSuppliedDistributionApproximation(quad_point, quad_weight); double eap = 0.0; double mle = 0.0; for (int j = 0; j < nPeople; j++) { iVec.setResponseVector(parscale_data[j]); eap = iVec.eapEstimate(0.0, 1.0, parscale_minTheta, parscale_maxTheta, 30); eap = iVec.eapEstimate(dist); // System.out.println("EAP" + j + ": " + eap + " True EAP: " + // trueEAP_parscale[j]); // Results not accurate but correlate 0.996 with each other. // Difference may be due to optimizer convergence criteria. // assertEquals(" EAP Test" + j, trueEAP_parscale[j], eap, 1e-1); mle = iVec.maximumLikelihoodEstimate(parscale_minTheta, parscale_maxTheta); System.out.println("MLE" + j + ": " + mle + " True MLE: " + trueMLE_parscale[j]); // assertEquals(" MLE Test" + j, trueMLE_parscale[j], mle, 1e-1); } } @Test public void parscaleTest2() { // same test as above but uses IrmGPCM instead of IrmGPCM2 System.out.println(); System.out.println("EAP test: parscale reparameritized."); int n = aparam_parscale.length; int nPeople = parscale_data.length; ItemResponseModel[] irmArray = new ItemResponseModel[n]; ItemResponseModel irm; double[] scoring = { 1, 2, 3, 4 }; double[] tempStepParam = null; VariableName iName = null; for (int i = 0; i < n; i++) { String name = "V" + i; iName = new VariableName(name); tempStepParam = new double[step_Parscale[i].length]; for (int j = 0; j < step_Parscale[i].length; j++) { tempStepParam[j] = bparam_parscale[i] - step_Parscale[i][j]; } irmArray[i] = new IrmGPCM(aparam_parscale[i], tempStepParam, 1.7); irmArray[i].setName(iName); irmArray[i].setScoreWeights(scoring); } IrtExaminee iVec = new IrtExaminee(irmArray); UserSuppliedDistributionApproximation dist = new UserSuppliedDistributionApproximation(quad_point, quad_weight); double eap = 0.0; double mle = 0.0; for (int j = 0; j < nPeople; j++) { iVec.setResponseVector(parscale_data[j]); eap = iVec.eapEstimate(0.0, 1.0, parscale_minTheta, parscale_maxTheta, 30); eap = iVec.eapEstimate(dist); // System.out.println("EAP" + j + ": " + eap + " True EAP: " + // trueEAP_parscale[j]); // Results not accurate but correlate 0.996 with each other. // Difference may be due to optimizer convergence criteria. // assertEquals(" EAP Test" + j, trueEAP_parscale[j], eap, 1e-1); mle = iVec.maximumLikelihoodEstimate(parscale_minTheta, parscale_maxTheta); System.out.println("MLE" + j + ": " + mle + " True MLE: " + trueMLE_parscale[j]); // assertEquals(" MLE Test" + j, trueMLE_parscale[j], mle, 1e-1); } } // ====================================================================================================================== // TRUE parameter values // ====================================================================================================================== private static byte[][] lsat7 = null; private static byte[][] mixedFormatData = null; private double minTheta = -9.0;// in lieu of negative infinity private double maxTheta = 9.0;// in lieu of positive infinity // true parameters obtained from mirt package in R private double[] aParamLSAT7 = { 0.5760746525, 0.6713629131, 0.9567791258, 0.4266571055, 0.4398755432 }; private double[] bParamLSAT7 = { -1.8877222528, -0.7154624410, -1.0825764537, -0.6760358283, -2.4772429769 }; private double[] trueMLE_LSAT7 = { minTheta, -3.10845041, -3.15107153, -2.13728887, -1.98956866, -1.38727274, -1.40365915, -0.87089110, -2.50911393, -1.76060374, -1.77924262, -1.21512783, -1.10671814, -0.55780612, -0.57560892, 0.13601644, -2.72808025, -1.89875271, -1.91867347, -1.33073839, -1.22120399, -0.68345783, -0.70044698, -0.04756461, -1.57652362, -1.03570578, -1.05158493, -0.49482657, -0.36534495, 0.48802400, 0.45257041, maxTheta }; private double[] trueEAP_LSAT7 = { -1.86699915002, -1.51833780543, -1.52862017915, -1.19050028062, -1.12167621040, -0.78392865464, -0.79420134176, -0.44431659698, -1.33969835385, -1.00375904585, -1.01386610198, -0.67338304915, -0.60187663710, -0.23968655305, -0.25097467227, 0.14224215163, -1.41293907667, -1.07654120538, -1.08662659805, -0.74812568573, -0.67735619964, -0.32043055589, -0.33151951606, 0.05371430242, -0.89930927803, -0.55455966449, -0.56516178853, -0.20021595605, -0.12161974292, 0.28530512338, 0.27243030193, 0.72578380552 }; private double[] trueMAP_LSAT7 = { -1.82014732325, -1.48950570373, -1.49914838882, -1.18508879236, -1.12160703999, -0.81006616362, -0.81957574775, -0.49478440966, -1.32320650445, -1.01284454621, -1.02218279038, -0.70781334045, -0.64155893474, -0.30261794957, -0.31330117802, 0.06239891391, -1.39119370820, -1.07990412674, -1.08922566785, -0.77692861198, -0.71149909142, -0.37865183861, -0.38909943798, -0.02295620020, -0.91654678664, -0.59751769790, -0.60739760260, -0.26523888053, -0.19071994704, 0.20152618293, 0.18892566808, 0.63800689395 }; // LSAT7 results from jmetrik using Rasch Models analysis private double[] bParam_jmle = { -0.6827150092298472, 0.6679301544552803, -0.18450773468097034, 1.0331646145652922, -0.8338720251097548 }; private double[] trueJMLE_LSAT7 = { -2.9681488055844527, -1.5443472409279775, -1.5443472409279775, -0.4669235333931464, -1.5443472409279775, -0.4669235333931464, -0.4669235333931464, 0.45293530892580947, -1.5443472409279775, -0.4669235333931464, -0.4669235333931464, 0.45293530892580947, -0.4669235333931464, 0.45293530892580947, 0.45293530892580947, 1.5467384674981617, -1.5443472409279775, -0.4669235333931464, -0.4669235333931464, 0.45293530892580947, -0.4669235333931464, 0.45293530892580947, 0.45293530892580947, 1.5467384674981617, -0.4669235333931464, 0.45293530892580947, 0.45293530892580947, 1.5467384674981617, 0.45293530892580947, 1.5467384674981617, 1.5467384674981617, 2.987845332665278 }; // ability estimates from ICL for the resposne vectors in mixed-format.txt private double[] trueEAP_mixed = { 1.203520, 1.237269, -0.839161, -0.253902, 1.901835, 0.427541, 0.235083, 0.750366, -0.674191, 0.441296, 1.572613, 0.801079, 0.329102, 1.458463, -0.189362, 1.845943, 0.878068, 0.802608, -0.829593, -0.502849 }; private double[] trueMLE_mixed = { 1.302364, 1.336559, -0.845830, -0.208761, 2.101531, 0.491479, 0.305466, 0.817874, -0.685480, 0.508849, 1.690902, 0.868703, 0.390239, 1.577347, -0.157638, 2.016163, 0.961122, 0.874694, -0.898311, -0.503212 }; // item parameters for 40 multiple choice and 2 polytomous items from // mixed-format.txt // (estimates obtained from larger item response file) private double[] aParam_mixed = { 0.7573722, 0.84344208, 0.91272306, 0.8476324, 0.75653338, 0.91264614, 1.7310894, 0.93499608, 1.54515682, 1.24897604, 0.71757726, 1.00732102, 1.03585051, 0.81302008, 0.94757159, 0.59698281, 0.62028276, 1.00758979, 0.58312897, 0.60219414, 0.66084336, 0.62989076, 0.70693752, 0.86574777, 0.61312122, 0.56735134, 0.98809429, 0.53907226, 0.64132853, 0.67836688, 0.92011339, 0.83107679, 0.87903698, 1.43258799, 0.44561383, 1.03101111, 1.6025383, 0.85904148, 1.55120193, 0.8282011, 0.97862369, 1.30034675 }; private double[] bParam_mixed = { 1.6230173, 0.67152368, -0.32696466, -0.01087341, -0.12937667, 0.80500481, 1.29248633, 1.06740232, -0.14137442, 0.59466423, 0.19875489, 2.02049419, 1.14622086, -0.60786305, 0.52960921, 0.12096465, 0.49659716, 1.13475338, -1.6020013, 1.10518653, -0.10679459, -0.59138503, 1.88745664, 0.7035784, -0.30082617, -1.492581, 1.95993867, 0.21187485, 0.59263333, 2.09225317, 0.17273864, 0.82048979, 0.71126868, 0.31175695, 0.82426725, 1.87676213, 2.16970874, 1.0654643, 1.81675684, 0.17928933 }; private double[] cParam_mixed = { 0.13656342, 0.29406404, 0.25057165, 0.1797302, 0.20751763, 0.19368337, 0.26555399, 0.16015504, 0.10072914, 0.1945635, 0.11197594, 0.27011819, 0.22768903, 0.1748153, 0.16277419, 0.13670017, 0.24440971, 0.16699152, 0.16742496, 0.2379273, 0.27999908, 0.08793936, 0.19558209, 0.1887004, 0.1045626, 0.12386857, 0.33002528, 0.40741659, 0.13649977, 0.47153802, 0.18915205, 0.33726248, 0.10903968, 0.23812296, 0.23128587, 0.11829606, 0.15408938, 0.13365926, 0.21462302, 0.15194013 }; private double[][] stepParam_mixed = { { 0.50768878, 1.37864855, 0.13240289 }, { 0.50140888, 1.65465213, 4.10892819 } }; // ============================================================================================================================================================================ // These parameters come from example1 that comes with PARSCALE. However, // the analysis uses a partial credit model for each item instead of a // single rating scale. // ============================================================================================================================================================================ double parscale_minTheta = -4.0; double parscale_maxTheta = 4.0; private double[] aparam_parscale = { 1.50508, 1.61609, 1.54948, 1.71541, 1.76130, 1.23014, 1.31342, 1.39885, 1.30280, 1.25339, 0.96456, 0.98534, 1.07900, 1.02679, 1.05099, 0.74472, 0.68904, 0.75242, 0.71619, 0.73505 }; private double[] bparam_parscale = { 0.00750, -0.00775, 0.01932, -0.01465, -0.01038, 0.47858, 0.48654, 0.44832, 0.47259, 0.50252, -0.53854, -0.50507, -0.50478, -0.44867, -0.46213, 0.02238, -0.07591, 0.04737, 0.04728, 0.07827 }; private double[][] step_Parscale = { { 1.02242, 0.01036, -1.03278 }, { 0.95478, 0.02745, -0.98223 }, { 0.98682, -0.01933, -0.96749 }, { 0.94606, -0.01821, -0.92785 }, { 0.92796, 0.01413, -0.94209 }, { 0.96578, 0.00920, -0.97499 }, { 0.92201, 0.01823, -0.94025 }, { 0.92441, 0.05682, -0.98123 }, { 0.94065, 0.04582, -0.98648 }, { 1.05179, -0.00746, -1.04433 }, { 1.08470, -0.02502, -1.05968 }, { 0.95794, 0.01865, -0.97659 }, { 1.02808, -0.06415, -0.96393 }, { 1.02342, -0.02146, -1.00197 }, { 0.92162, -0.03804, -0.88358 }, { 0.97320, 0.03400, -1.00720 }, { 1.10923, 0.05042, -1.15964 }, { 1.01524, -0.01631, -0.99893 }, { 1.00752, 0.00576, -1.01328 }, { 0.91689, 0.00846, -0.92535 } }; // EAP estimates and standard errors from the first 20 examinees in teh // example data file private double[] trueEAP_parscale = { 0.6071, -0.6969, -0.4161, -0.8136, -0.7741, 1.5050, 0.1959, 1.9895, 1.5136, -2.0296, 1.1653, -2.2606, -1.7540, 0.3270, 0.1765, -0.1735, 0.2841, 0.0638, -0.3918, 1.9686 }; private double[] trueEAP_stdError_parscale = { 0.2056, 0.2028, 0.1967, 0.2085, 0.2088, 0.2445, 0.2092, 0.3191, 0.2464, 0.3486, 0.2226, 0.4004, 0.2972, 0.2106, 0.2015, 0.1898, 0.1920, 0.2102, 0.2127, 0.3132 }; private double[] trueMLE_parscale = { 0.6322, -0.7244, -0.4308, -0.8431, -0.8051, 1.5732, 0.2050, 2.19332, 1.5815, -2.2649, 1.2113, -2.7058, -1.8775, 0.3413, 0.1809, -0.1790, 0.2946, 0.0674, -0.4082, 2.48472 }; // response patterns for the first 20 examinees in parscale example1.dat but // recoded from 1,2,3,4 to 0,1,2,3 private byte[][] parscale_data = { { 3, 1, 3, 3, 3, 1, 2, 1, 1, 1, 2, 2, 3, 2, 3, 2, 2, 2, 2, 1 }, { 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 2, 1, 3, 0, 1, 0, 3, 2, 1 }, { 2, 1, 1, 0, 1, 1, 0, 1, 1, 0, 2, 2, 3, 1, 2, 0, 3, 0, 1, 0 }, { 0, 2, 1, 1, 1, 0, 0, 0, 0, 0, 2, 1, 1, 3, 1, 1, 0, 0, 0, 0 }, { 1, 0, 1, 0, 0, 0, 1, 1, 2, 0, 2, 0, 2, 1, 2, 0, 1, 0, 2, 0 }, { 2, 3, 3, 3, 2, 3, 3, 3, 2, 3, 3, 2, 3, 3, 3, 3, 3, 2, 3, 3 }, { 1, 2, 2, 3, 2, 1, 0, 2, 3, 0, 3, 2, 0, 1, 2, 2, 2, 2, 2, 0 }, { 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 3, 2, 3, 2 }, { 3, 3, 3, 3, 3, 1, 2, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 2, 2, 2 }, { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0 }, { 3, 2, 2, 3, 3, 3, 3, 3, 1, 2, 3, 3, 2, 3, 3, 1, 3, 3, 1, 1 }, { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0 }, { 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0 }, { 0, 2, 1, 3, 2, 1, 1, 2, 0, 2, 3, 3, 2, 1, 3, 3, 2, 3, 2, 0 }, { 2, 2, 1, 1, 1, 1, 2, 0, 2, 2, 1, 3, 1, 3, 3, 1, 2, 2, 2, 3 }, { 1, 1, 1, 2, 2, 1, 1, 0, 0, 1, 2, 1, 2, 3, 1, 2, 1, 1, 1, 3 }, { 2, 2, 2, 2, 2, 2, 2, 1, 1, 0, 1, 2, 3, 2, 3, 2, 3, 0, 1, 0 }, { 2, 2, 0, 2, 1, 1, 2, 0, 2, 1, 1, 2, 3, 0, 0, 3, 2, 3, 2, 3 }, { 3, 1, 1, 0, 2, 0, 0, 1, 2, 1, 0, 0, 2, 1, 1, 0, 1, 1, 3, 2 }, { 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 3, 3, 3, 3, 3, 2, 2, 3, 3 } }; // quadrature from parscale output double[] quad_point = { -0.4000E+01, -0.3724E+01, -0.3448E+01, -0.3172E+01, -0.2897E+01, -0.2621E+01, -0.2345E+01, -0.2069E+01, -0.1793E+01, -0.1517E+01, -0.1241E+01, -0.9655E+00, -0.6897E+00, -0.4138E+00, -0.1379E+00, 0.1379E+00, 0.4138E+00, 0.6897E+00, 0.9655E+00, 0.1241E+01, 0.1517E+01, 0.1793E+01, 0.2069E+01, 0.2345E+01, 0.2621E+01, 0.2897E+01, 0.3172E+01, 0.3448E+01, 0.3724E+01, 0.4000E+01 }; double[] quad_weight = { 0.3692E-04, 0.1071E-03, 0.2881E-03, 0.7181E-03, 0.1659E-02, 0.3550E-02, 0.7042E-02, 0.1294E-01, 0.2205E-01, 0.3481E-01, 0.5093E-01, 0.6905E-01, 0.8676E-01, 0.1010E+00, 0.1090E+00, 0.1090E+00, 0.1010E+00, 0.8676E-01, 0.6905E-01, 0.5093E-01, 0.3481E-01, 0.2205E-01, 0.1294E-01, 0.7042E-02, 0.3550E-02, 0.1659E-02, 0.7181E-03, 0.2881E-03, 0.1071E-03, 0.3692E-04 }; }
package model; import java.util.ArrayList; public class ReadyQueue { private ArrayList<Task> queue; public ReadyQueue() { queue = new ArrayList<>(); } public Task dequeue() { Task p = null; if (!isEmpty()) { p = queue.get(0); queue.remove(p); } return p; } public void enqueue(Task task) { //the case when the queue is empty if (queue.isEmpty()) { queue.add(task); } else if(!this.contain(task)){ int i; for (i = 0; i < queue.size(); i++) { if (queue.get(i).getPriority() > task.getPriority()) { queue.add(i, task); break; } } // the case when the priority of the added process is higher than of all processes if(i == queue.size() ){ queue.add(task); } } } private boolean contain(Task task){ for(Task p : queue){ if(p.getTaskID() == task.getTaskID()) return true; return false; } return false; } public int size() { return queue.size(); } public Boolean isEmpty() { return (queue.size() == 0); } }
package top.itmp.rtbox.example; import android.graphics.Color; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentPagerAdapter; import android.support.v4.util.Pair; import android.support.v4.view.PagerTabStrip; import android.support.v4.view.ViewPager; import android.support.v7.app.AppCompatActivity; import android.util.TypedValue; import java.util.ArrayList; import java.util.List; /** * Created by hz on 16/5/11. */ public class MainActivity extends AppCompatActivity { private ViewPager viewPager; private PagerTabStrip tabs; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); viewPager = (ViewPager) findViewById(R.id.viewpager); MainPagerAdapter pagerAdapter = new MainPagerAdapter(getSupportFragmentManager()); pagerAdapter.add(new FragmentNormal(), "normal"); pagerAdapter.add(new FramentNew(), "exec"); pagerAdapter.add(new FragmentBin(), "binTest"); viewPager.setAdapter(pagerAdapter); tabs = (PagerTabStrip) findViewById(R.id.tabs); tabs.setTextSize(TypedValue.COMPLEX_UNIT_SP, 18); tabs.setTextColor(Color.WHITE); } private static class MainPagerAdapter extends FragmentPagerAdapter { private List<Pair<Fragment, String>> fragments = new ArrayList<>(); public void add(Fragment fragment, String title) { fragments.add(new Pair(fragment, title)); } public MainPagerAdapter(FragmentManager fm) { super(fm); } @Override public Fragment getItem(int position) { return fragments.get(position).first; } @Override public int getCount() { return fragments.size(); } @Override public CharSequence getPageTitle(int position) { return fragments.get(position).second; } } }
/** * Copyright (C) 2015 The Gravitee team (http://gravitee.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.gravitee.am.gateway.handler.common.jwe.impl; import com.nimbusds.jose.EncryptionMethod; import com.nimbusds.jose.JOSEException; import com.nimbusds.jose.JWEAlgorithm; import com.nimbusds.jose.JWEEncrypter; import com.nimbusds.jose.JWEHeader; import com.nimbusds.jose.JWEObject; import com.nimbusds.jose.Payload; import com.nimbusds.jose.crypto.AESEncrypter; import com.nimbusds.jose.crypto.DirectEncrypter; import com.nimbusds.jose.crypto.ECDHEncrypter; import com.nimbusds.jose.crypto.PasswordBasedEncrypter; import com.nimbusds.jose.crypto.RSAEncrypter; import com.nimbusds.jose.crypto.X25519Encrypter; import com.nimbusds.jose.crypto.impl.AESCryptoProvider; import com.nimbusds.jose.crypto.impl.DirectCryptoProvider; import com.nimbusds.jose.crypto.impl.ECDHCryptoProvider; import com.nimbusds.jose.crypto.impl.PasswordBasedCryptoProvider; import com.nimbusds.jose.crypto.impl.RSACryptoProvider; import com.nimbusds.jose.jwk.KeyType; import com.nimbusds.jose.jwk.OctetSequenceKey; import io.gravitee.am.common.oauth2.exception.OAuth2Exception; import io.gravitee.am.common.oauth2.exception.ServerErrorException; import io.gravitee.am.gateway.handler.common.jwe.JWEService; import io.gravitee.am.gateway.handler.common.jwk.JWKService; import io.gravitee.am.gateway.handler.common.jwk.converter.JWKConverter; import io.gravitee.am.model.Client; import io.gravitee.am.model.jose.ECKey; import io.gravitee.am.model.jose.JWK; import io.gravitee.am.model.jose.OCTKey; import io.gravitee.am.model.jose.OKPKey; import io.gravitee.am.model.jose.RSAKey; import io.gravitee.am.service.exception.InvalidClientMetadataException; import io.reactivex.Maybe; import io.reactivex.Single; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import java.util.function.Predicate; import static io.gravitee.am.gateway.handler.common.jwa.utils.JWAlgorithmUtils.getDefaultIdTokenResponseEnc; import static io.gravitee.am.gateway.handler.common.jwa.utils.JWAlgorithmUtils.getDefaultUserinfoResponseEnc; import static io.gravitee.am.gateway.handler.common.jwk.JWKFilter.*; /** * @author Alexandre FARIA (contact at alexandrefaria.net) * @author GraviteeSource Team */ public class JWEServiceImpl implements JWEService { private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(JWEServiceImpl.class); @Autowired JWKService jwkService; @Override public Single<String> encryptIdToken(String signedJwt, Client client) { //Return input without encryption if client does not require JWE or algorithm is set to none if(client.getIdTokenEncryptedResponseAlg()==null || JWEAlgorithm.NONE.equals(client.getIdTokenEncryptedResponseAlg())) { return Single.just(signedJwt); } JWEObject jwe = new JWEObject( new JWEHeader.Builder( JWEAlgorithm.parse(client.getIdTokenEncryptedResponseAlg()), EncryptionMethod.parse(client.getIdTokenEncryptedResponseEnc()!=null?client.getIdTokenEncryptedResponseEnc():getDefaultIdTokenResponseEnc()) ).contentType("JWT").build(), new Payload(signedJwt) ); return encrypt(jwe,client) .onErrorResumeNext(throwable -> { if(throwable instanceof OAuth2Exception) { return Single.error(throwable); } LOGGER.error(throwable.getMessage(), throwable); return Single.error(new ServerErrorException("Unable to encrypt id_token")); }); } @Override public Single<String> encryptUserinfo(String signedJwt, Client client) { //Return input without encryption if client does not require JWE or algorithm is set to none if(client.getUserinfoEncryptedResponseAlg()==null || JWEAlgorithm.NONE.equals(client.getUserinfoEncryptedResponseAlg())) { return Single.just(signedJwt); } JWEObject jwe = new JWEObject( new JWEHeader.Builder( JWEAlgorithm.parse(client.getUserinfoEncryptedResponseAlg()), EncryptionMethod.parse(client.getUserinfoEncryptedResponseEnc()!=null?client.getUserinfoEncryptedResponseEnc():getDefaultUserinfoResponseEnc()) ).contentType("JWT").build(), new Payload(signedJwt) ); return encrypt(jwe,client) .onErrorResumeNext(throwable -> { if(throwable instanceof OAuth2Exception) { return Single.error(throwable); } LOGGER.error(throwable.getMessage(), throwable); return Single.error(new ServerErrorException("Unable to encrypt userinfo")); }); } private Single<String> encrypt(JWEObject jwe, Client client) { JWEAlgorithm algorithm = jwe.getHeader().getAlgorithm(); //RSA encryption if(RSACryptoProvider.SUPPORTED_ALGORITHMS.contains(algorithm)) { return encrypt(jwe, client, RSA_KEY_ENCRYPTION(), jwk -> new RSAEncrypter(JWKConverter.convert((RSAKey) jwk)) ); } //Curve encryption (Elliptic "EC" & Edward "OKP") else if(ECDHCryptoProvider.SUPPORTED_ALGORITHMS.contains(algorithm)) { return encrypt(jwe, client, CURVE_KEY_ENCRYPTION(), jwk -> { if(KeyType.EC.getValue().equals(jwk.getKty())) { return new ECDHEncrypter(JWKConverter.convert((ECKey) jwk)); } return new X25519Encrypter(JWKConverter.convert((OKPKey) jwk)); }); } //AES encryption ("OCT" keys) else if(AESCryptoProvider.SUPPORTED_ALGORITHMS.contains(algorithm)) { return encrypt(jwe, client, OCT_KEY_ENCRYPTION(algorithm), jwk -> new AESEncrypter(JWKConverter.convert((OCTKey) jwk)) ); } //Direct encryption ("OCT" keys) else if(DirectCryptoProvider.SUPPORTED_ALGORITHMS.contains(algorithm)) { return encrypt(jwe, client, OCT_KEY_ENCRYPTION(jwe.getHeader().getEncryptionMethod()), jwk -> new DirectEncrypter(JWKConverter.convert((OCTKey) jwk)) ); } //Password Base Encryption ("OCT" keys) else if(PasswordBasedCryptoProvider.SUPPORTED_ALGORITHMS.contains(algorithm)) { return encrypt(jwe, client, OCT_KEY_ENCRYPTION(), jwk -> { OctetSequenceKey octKey = JWKConverter.convert((OCTKey) jwk); return new PasswordBasedEncrypter( octKey.getKeyValue().decode(), PasswordBasedEncrypter.MIN_SALT_LENGTH, PasswordBasedEncrypter.MIN_RECOMMENDED_ITERATION_COUNT ); }); } return Single.error(new ServerErrorException("Unable to perform Json Web Encryption, unsupported algorithm"+algorithm.getName())); } private Single<String> encrypt(JWEObject jwe, Client client, Predicate<JWK> filter, JWEEncrypterFunction<JWK, JWEEncrypter> function) { return jwkService.getKeys(client) .flatMap(jwkSet -> jwkService.filter(jwkSet, filter)) .switchIfEmpty(Maybe.error(new InvalidClientMetadataException("no matching key found to encrypt"))) .flatMapSingle(jwk -> Single.just(function.apply(jwk))) .map(encrypter -> { jwe.encrypt(encrypter); return jwe.serialize(); }); } @FunctionalInterface private interface JWEEncrypterFunction<JWK, JWEEncrypter> { JWEEncrypter apply(JWK jwk) throws JOSEException; } }
package ms.messageapp.analytics; import ms.messageapp.analytics.e2e.DecryptionFailureReason; /** * * A category to be linked to an {@link im.vector.analytics.TrackingEvent} * * @param value to log into your analytics console */ @kotlin.Metadata(mv = {1, 1, 13}, bv = {1, 0, 3}, k = 1, d1 = {"\u0000\u0012\n\u0002\u0018\u0002\n\u0002\u0010\u0010\n\u0000\n\u0002\u0010\u000e\n\u0002\b\u0006\b\u0086\u0001\u0018\u00002\b\u0012\u0004\u0012\u00020\u00000\u0001B\u000f\b\u0002\u0012\u0006\u0010\u0002\u001a\u00020\u0003\u00a2\u0006\u0002\u0010\u0004R\u0011\u0010\u0002\u001a\u00020\u0003\u00a2\u0006\b\n\u0000\u001a\u0004\b\u0005\u0010\u0006j\u0002\b\u0007j\u0002\b\b\u00a8\u0006\t"}, d2 = {"Lms/messageapp/analytics/Category;", "", "value", "", "(Ljava/lang/String;ILjava/lang/String;)V", "getValue", "()Ljava/lang/String;", "METRICS", "E2E", "vector_appDebug"}) public enum Category { /*public static final*/ METRICS /* = new METRICS(null) */, /*public static final*/ E2E /* = new E2E(null) */; @org.jetbrains.annotations.NotNull() private final java.lang.String value = null; @org.jetbrains.annotations.NotNull() public final java.lang.String getValue() { return null; } Category(java.lang.String value) { } }
/* * Copyright 2014 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.bootstrap.interceptor; import com.navercorp.pinpoint.bootstrap.config.ProfilerConfig; import com.navercorp.pinpoint.bootstrap.context.AsyncTraceId; import com.navercorp.pinpoint.bootstrap.context.MethodDescriptor; import com.navercorp.pinpoint.bootstrap.context.ParsingResult; import com.navercorp.pinpoint.bootstrap.context.ServerMetaDataHolder; import com.navercorp.pinpoint.bootstrap.context.Trace; import com.navercorp.pinpoint.bootstrap.context.TraceContext; import com.navercorp.pinpoint.bootstrap.context.TraceId; /** * @author emeroad * @author hyungil.jeong * @author Taejin Koo */ public class MockTraceContext implements TraceContext { private Trace trace; public void setTrace(Trace trace) { this.trace = trace; } @Override public Trace currentTraceObject() { if (trace == null) { return null; } if (trace.canSampled()) { return null; } return trace; } @Override public Trace currentRawTraceObject() { return trace; } @Override public Trace continueTraceObject(TraceId traceID) { return trace; } @Override public Trace newTraceObject() { return trace; } @Override public String getAgentId() { return null; } @Override public String getApplicationName() { return null; } @Override public long getAgentStartTime() { return 0; } @Override public short getServerTypeCode() { return 0; } @Override public String getServerType() { return null; } @Override public int cacheApi(MethodDescriptor methodDescriptor) { return 0; } @Override public int cacheString(String value) { return 0; } @Override public ParsingResult parseSql(String sql) { return null; } @Override public boolean cacheSql(ParsingResult parsingResult) { return false; } @Override public TraceId createTraceId(String transactionId, long parentSpanID, long spanID, short flags) { return null; } @Override public Trace disableSampling() { return null; } @Override public ProfilerConfig getProfilerConfig() { return null; } @Override public ServerMetaDataHolder getServerMetaDataHolder() { return null; } @Override public Trace continueAsyncTraceObject(AsyncTraceId traceId, int asyncId, long startTime) { return null; } @Override public int getAsyncId() { return 0; } @Override public Trace continueTraceObject(Trace trace) { return null; } @Override public Trace removeTraceObject() { Trace old = trace; trace = null; return old; } }
/* * Copyright (c) 2008, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.registry.extensions.handlers; import org.apache.axiom.om.OMElement; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.xerces.xni.parser.XMLInputSource; import org.wso2.carbon.registry.core.*; import org.wso2.carbon.registry.core.config.RegistryContext; import org.wso2.carbon.registry.core.exceptions.RegistryException; import org.wso2.carbon.registry.core.jdbc.handlers.Handler; import org.wso2.carbon.registry.core.jdbc.handlers.RequestContext; import org.wso2.carbon.registry.core.utils.AuthorizationUtils; import org.wso2.carbon.registry.core.utils.RegistryUtils; import org.wso2.carbon.registry.extensions.handlers.utils.SchemaProcessor; import org.wso2.carbon.registry.extensions.handlers.utils.SchemaValidator; import org.wso2.carbon.registry.extensions.utils.CommonConstants; import org.wso2.carbon.registry.extensions.utils.CommonUtil; import org.wso2.carbon.registry.extensions.utils.WSDLValidationInfo; import javax.xml.namespace.QName; import java.io.*; import java.util.*; public class XSDMediaTypeHandler extends Handler { private static final Log log = LogFactory.getLog(XSDMediaTypeHandler.class); private String location = "/schema/"; // location will always has a leading '/' and trailing '/' private String locationTag = "location"; private boolean disableSchemaValidation = false; private boolean disableSymlinkCreation = true; private String defaultSchemaVersion = CommonConstants.SCHEMA_VERSION_DEFAULT_VALUE; public OMElement getLocationConfiguration() { return locationConfiguration; } public void setLocationConfiguration(OMElement locationConfiguration) throws RegistryException { Iterator confElements = locationConfiguration.getChildElements(); while (confElements.hasNext()) { OMElement confElement = (OMElement) confElements.next(); if (confElement.getQName().equals(new QName(locationTag))) { location = confElement.getText(); if (!location.startsWith(RegistryConstants.PATH_SEPARATOR)) { location = RegistryConstants.PATH_SEPARATOR + location; } if (!location.endsWith(RegistryConstants.PATH_SEPARATOR)) { location = location + RegistryConstants.PATH_SEPARATOR; } } } this.locationConfiguration = locationConfiguration; } public boolean isDisableSymlinkCreation() { return disableSymlinkCreation; } public void setDisableSymlinkCreation(String disableSymlinkCreation) { this.disableSymlinkCreation = Boolean.toString(true).equals(disableSymlinkCreation); } public void setDefaultServiceVersion(String defaultSchemaVersion) { this.defaultSchemaVersion = defaultSchemaVersion; } private OMElement locationConfiguration; public void put(RequestContext requestContext) throws RegistryException { if (!CommonUtil.isUpdateLockAvailable()) { return; } CommonUtil.acquireUpdateLock(); try { Resource resource = requestContext.getResource(); String resourcePath = requestContext.getResourcePath().getPath(); String parentPath = RegistryUtils.getParentPath(resourcePath); // String sourceURL = requestContext.getSourceURL(); Registry registry = requestContext.getRegistry(); // This is to distinguish operations on xsd and wsdl on remote mounting. String remotePut = resource.getProperty(RegistryConstants.REMOTE_MOUNT_OPERATION); if (remotePut != null) { CommonUtil.releaseUpdateLock(); resource.removeProperty(RegistryConstants.REMOTE_MOUNT_OPERATION); registry.put(resourcePath, resource); requestContext.setProcessingComplete(true); return; } String oldResourcePath = null; if (registry.resourceExists(resourcePath)) { // If the resource is already there and the content is not changed, perform the default processing. // logic to compare content, and return only if the content didn't change. Object newContent = resource.getContent(); if (newContent instanceof String) { newContent = RegistryUtils.encodeString(((String) newContent)); } Resource oldResource = registry.get(resourcePath); Object oldContent = oldResource.getContent(); String newContentString = null; String oldContentString = null; if (newContent != null) { if (newContent instanceof String) { newContentString = (String) newContent; } else { newContentString = RegistryUtils.decodeBytes((byte[]) newContent); } } if (oldContent != null) { if (oldContent instanceof String) { oldContentString = (String) oldContent; } else { oldContentString = RegistryUtils.decodeBytes((byte[]) oldContent); } } if ((newContent == null && oldContent == null) || (newContentString != null && newContentString.equals(oldContentString))) { // this will continue adding from the default path. return; } oldResourcePath = resourcePath; // keep the old resource path. } WSDLValidationInfo validationInfo = null; String savedName; requestContext.setSourceURL( requestContext.getResource().getProperty(CommonConstants.SOURCEURL_PARAMETER_NAME)); String sourceURL = requestContext.getSourceURL(); if (StringUtils.isNotBlank(sourceURL)) { if (requestContext.getSourceURL().toLowerCase() .startsWith("file:")) { String msg = "The source URL must not be file in the server's local file system"; throw new RegistryException(msg); } try { if (!disableSchemaValidation) { validationInfo = SchemaValidator.validate(new XMLInputSource(null, sourceURL, null)); } } catch (Exception e) { // Since SchemaValidator.validate method is throwing Exception need to catch it here throw new RegistryException("Exception occurred while validating the schema " + sourceURL, e); } savedName = processSchemaImport(requestContext, resourcePath, validationInfo); } else { Object resourceContent = resource.getContent(); if (resourceContent instanceof String) { resourceContent = RegistryUtils.encodeString(((String) resourceContent)); resource.setContent(resourceContent); } if (resourceContent instanceof byte[]) { try { InputStream in = new ByteArrayInputStream((byte[]) resourceContent); if (!disableSchemaValidation) { // PublicId, SystemId, BaseSystemId and Encoding set to null. validationInfo = SchemaValidator. validate(new XMLInputSource(null, null, null, in, null)); } } catch (Exception e) { // Since SchemaValidator.validate method is throwing Exception need to catch it here throw new RegistryException("Exception occurred while validating the schema", e); } } savedName = processSchemaUpload(requestContext, resourcePath, validationInfo); } if (StringUtils.isNotBlank(savedName)) { onPutCompleted(resourcePath, Collections.singletonMap(sourceURL, savedName), Collections.<String>emptyList(), requestContext); requestContext.setActualPath(savedName); } requestContext.setProcessingComplete(true); } finally { CommonUtil.releaseUpdateLock(); } } public void importResource(RequestContext requestContext) throws RegistryException { if (!CommonUtil.isUpdateLockAvailable()) { return; } CommonUtil.acquireUpdateLock(); try { String parentPath = RegistryUtils.getParentPath(requestContext.getResourcePath().getPath()); String resourcePath = requestContext.getResourcePath().getCompletePath(); String sourceURL = requestContext.getSourceURL(); if (requestContext.getSourceURL() != null && requestContext.getSourceURL().toLowerCase().startsWith("file:")) { String msg = "The source URL must not be file in the server's local file system"; throw new RegistryException(msg); } WSDLValidationInfo validationInfo = null; try { if (!disableSchemaValidation) { validationInfo = SchemaValidator.validate(new XMLInputSource(null, sourceURL, null)); } } catch (Exception e) { throw new RegistryException("Exception occured while validating the schema", e); } String savedName = processSchemaImport(requestContext, resourcePath, validationInfo); if (parentPath.endsWith(RegistryConstants.PATH_SEPARATOR)) { requestContext.setActualPath(parentPath + RegistryUtils.getResourceName(savedName)); } else { requestContext.setActualPath(parentPath + RegistryConstants.PATH_SEPARATOR + RegistryUtils.getResourceName(savedName)); } onPutCompleted(resourcePath, Collections.singletonMap(sourceURL, savedName), Collections.<String>emptyList(), requestContext); requestContext.setProcessingComplete(true); } finally { CommonUtil.releaseUpdateLock(); } } /** * Method that runs the schema upload procedure. * * @param requestContext the request context for the put operation * @param resourcePath the path of the resource * @param validationInfo the validation information * @return the path at which the schema was uploaded to * @throws RegistryException if the operation failed. */ protected String processSchemaUpload(RequestContext requestContext, String resourcePath, WSDLValidationInfo validationInfo) throws RegistryException { String registryPath = null; List<File> tempFiles = makeTempDirStructure(requestContext); try { SchemaProcessor schemaProcessor = buildSchemaProcessor(requestContext, validationInfo); registryPath = schemaProcessor .putSchemaToRegistry(requestContext, resourcePath, getChrootedLocation(requestContext.getRegistryContext()), true,disableSymlinkCreation); } finally { deleteTempFiles(tempFiles); } return registryPath; } /** * creates all the tmp dirs/files created in the tmp location in the file system to perform the XML Schema update. * * @param requestContext * @return * @throws RegistryException */ private List<File> makeTempDirStructure(RequestContext requestContext) throws RegistryException { final String resourcePath = requestContext.getResource().getPath(); final Registry registry = requestContext.getRegistry(); List<File> tempFiles = new ArrayList<File>(); if (resourcePath == null) { return tempFiles; } try { // creating temp files for the wsdl and all the dependencies. Set<String> registryPaths = new LinkedHashSet<String>(); // the first resourcePath is the current resource resourcePath. registryPaths.add(resourcePath); // get the associations. Association[] dependencies = CommonUtil.getDependenciesRecursively(registry, resourcePath); if (dependencies != null) { for (Association dependency : dependencies) { String targetPath = dependency.getDestinationPath(); if (targetPath.startsWith(RegistryConstants.ROOT_PATH)) { registryPaths.add(targetPath); } } } File referenceTempFile = File.createTempFile("xsd", ".ref"); File tempDir = new File(referenceTempFile.getAbsolutePath().substring(0, referenceTempFile.getAbsolutePath().length() - ".ref".length())); String tempDirPath = tempDir.getAbsolutePath(); // now add each of the registry paths to the the tempDir for (String registryPath : registryPaths) { if (!registryPath.startsWith(RegistryConstants.ROOT_PATH)) { continue; } String filePath = tempDirPath + registryPath; File tempFile = new File(filePath); makeDirs(tempFile.getParentFile()); Object resourceContent; if (registryPath.equals(resourcePath)) { // this is the xsd we want to update. resourceContent = requestContext.getResource().getContent(); } else { if (!registry.resourceExists(registryPath)) { continue; } Resource r = registry.get(registryPath); if (r == null) { continue; } resourceContent = r.getContent(); } byte[] resourceContentBytes; if (resourceContent == null) { resourceContentBytes = new byte[0]; } else if (resourceContent instanceof byte[]) { resourceContentBytes = (byte[]) resourceContent; } else if (resourceContent instanceof String) { resourceContentBytes = RegistryUtils.encodeString(((String) resourceContent)); } else { String msg = "Unknown type for the content resourcePath: " + registryPath + ", content type: " + resourceContent.getClass().getName() + "."; log.error(msg); throw new RegistryException(msg); } InputStream in = new ByteArrayInputStream(resourceContentBytes); BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(tempFile)); byte[] contentChunk = new byte[1024]; int byteCount; while ((byteCount = in.read(contentChunk)) != -1) { out.write(contentChunk, 0, byteCount); } out.flush(); out.close(); tempFiles.add(tempFile); } if (tempFiles.size() == 0) { // unreachable state, anyway better log and return. String msg = "Temporary files count is zero, when updating a xsd. " + "xsd resourcePath: " + resourcePath + "."; log.error(msg); } File tempFile = tempFiles.get(0); String uri = tempFile.toURI().toString(); if (uri != null) { if (uri.startsWith("file:")) { uri = uri.substring(5); } while (uri.startsWith("/")) { uri = uri.substring(1); } uri = "file:///" + uri; requestContext.setSourceURL(uri); } //adding the tmp dir for delete purposes. tempFiles.add(tempDir); //adding the root tmp dir created for delete purposes. tempFiles.add(referenceTempFile); } catch (IOException ioe) { String msg = "Error in updating the XML Schema. XML Schema resourcePath: " + resourcePath + "."; log.error(msg, ioe); throw new RegistryException(msg, ioe); } return tempFiles; } /** * deletes all the tmp dirs/files created in the tmp location in the file system to perform the XML Schema update. * * @param tempFiles * @throws IOException */ private void deleteTempFiles(List<File> tempFiles) throws RegistryException { try { // now we will delete each temp files, ref file and the temp directory. final int fileSize = tempFiles.size(); if (fileSize >= 2) { for (int i = 0; i < (fileSize - 2); i++) { FileUtils.forceDelete(tempFiles.get(i)); } //deleting the root tmp dir FileUtils.deleteDirectory(tempFiles.get(fileSize - 2)); //deleting the tmp file FileUtils.forceDelete(tempFiles.get(fileSize - 1)); } } catch (IOException ioe) { String msg = "Error in updating the XML Schema. XML Schema resourcePath: " + tempFiles.get(0) + "."; log.error(msg, ioe); throw new RegistryException(msg, ioe); } } /** * creates the parent directory structure for a given resource at a temp location in the file system. * * @param file * @throws IOException */ private void makeDirs(File file) throws IOException { if (file != null && !file.exists() && !file.mkdirs()) { log.warn("Failed to create directories at path: " + file.getAbsolutePath()); } } /** * Method to customize the Schema Processor. * * @param requestContext the request context for the import/put operation. * @param validationInfo the WSDL validation information. * @return the Schema Processor instance. */ @SuppressWarnings("unused") protected SchemaProcessor buildSchemaProcessor(RequestContext requestContext, WSDLValidationInfo validationInfo) { return new SchemaProcessor(requestContext, validationInfo); } /** * Method that runs the schema import procedure. * * @param requestContext the request context for the import operation * @param resourcePath the path of the resource * @param validationInfo the validation information * @return the path at which the schema was uploaded to * @throws RegistryException if the operation failed. */ protected String processSchemaImport(RequestContext requestContext, String resourcePath, WSDLValidationInfo validationInfo) throws RegistryException { SchemaProcessor schemaProcessor = buildSchemaProcessor(requestContext, validationInfo); return schemaProcessor .importSchemaToRegistry(requestContext, resourcePath, getChrootedLocation(requestContext.getRegistryContext()), true,disableSymlinkCreation); } /** * Method that will executed after the put operation has been done. * * @param path the path of the resource. * @param addedResources the resources that have been added to the registry. * @param otherResources the resources that have not been added to the registry. * @param requestContext the request context for the put operation. * @throws RegistryException if the operation failed. */ @SuppressWarnings("unused") protected void onPutCompleted(String path, Map<String, String> addedResources, List<String> otherResources, RequestContext requestContext) throws RegistryException { } private String getChrootedLocation(RegistryContext registryContext) { return RegistryUtils.getAbsolutePath(registryContext, RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH + location); } public void setDisableSchemaValidation(String disableSchemaValidation) { this.disableSchemaValidation = Boolean.toString(true).equals(disableSchemaValidation); } @Override public void delete(RequestContext requestContext) throws RegistryException { if (!CommonUtil.isUpdateLockAvailable()) { return; } CommonUtil.acquireUpdateLock(); try { Registry registry = requestContext.getRegistry(); ResourcePath resourcePath = requestContext.getResourcePath(); if (resourcePath == null) { throw new RegistryException("The resource path is not available."); } Resource resource = registry.get(resourcePath.getPath()); } finally { CommonUtil.releaseUpdateLock(); } } }
/* * Copyright (c) 1998 - 2014. University Corporation for Atmospheric Research/Unidata * Portions of this software were developed by the Unidata Program at the * University Corporation for Atmospheric Research. * * Access and use of this software shall impose the following obligations * and understandings on the user. The user is granted the right, without * any fee or cost, to use, copy, modify, alter, enhance and distribute * this software, and any derivative works thereof, and its supporting * documentation for any purpose whatsoever, provided that this entire * notice appears in all copies of the software, derivative works and * supporting documentation. Further, UCAR requests that the user credit * UCAR/Unidata in any publications that result from the use of this * software or in any product that includes this software. The names UCAR * and/or Unidata, however, may not be used in any advertising or publicity * to endorse or promote any products or commercial entity unless specific * written permission is obtained from UCAR/Unidata. The user also * understands that UCAR/Unidata is not obligated to provide the user with * any support, consulting, training or assistance of any kind with regard * to the use, operation and performance of this software nor to provide * the user with any updates, revisions, new versions or "bug fixes." * * THIS SOFTWARE IS PROVIDED BY UCAR/UNIDATA "AS IS" AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL UCAR/UNIDATA BE LIABLE FOR ANY SPECIAL, * INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING * FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, * NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION * WITH THE ACCESS, USE OR PERFORMANCE OF THIS SOFTWARE. */ package thredds.featurecollection; import org.jdom2.Element; import org.jdom2.input.SAXBuilder; import thredds.client.catalog.Catalog; import thredds.inventory.CollectionAbstract; import ucar.nc2.util.AliasTranslator; import java.io.File; import java.util.ArrayList; import java.util.Formatter; import java.util.List; /** * Read the <featureCollection> element. create a FeatureCollectionConfig * * @author John * @since 1/17/2015 */ public class FeatureCollectionConfigBuilder { Formatter errlog; public boolean fatalError; public FeatureCollectionConfigBuilder(Formatter errlog) { this.errlog = errlog; } // input is xml file with just the <featureCollection> public FeatureCollectionConfig readConfigFromFile(String filename) { org.jdom2.Document doc; try { SAXBuilder builder = new SAXBuilder(); doc = builder.build(filename); } catch (Exception e) { System.out.printf("Error parsing featureCollection %s err = %s", filename, e.getMessage()); return null; } return readConfig(doc.getRootElement()); } /** * Read a catalog and extract a FeatureCollectionConfig from it * * @param catalogAndPath catalog filename, or catalog#featureName * @return FeatureCollectionConfig or null */ public FeatureCollectionConfig readConfigFromCatalog(String catalogAndPath) { String catFilename; String fcName = null; int pos = catalogAndPath.indexOf("#"); if (pos > 0) { catFilename = catalogAndPath.substring(0, pos); fcName = catalogAndPath.substring(pos + 1); } else { catFilename = catalogAndPath; } File cat = new File(catFilename); org.jdom2.Document doc; try { SAXBuilder builder = new SAXBuilder(); doc = builder.build(cat); } catch (Exception e) { e.printStackTrace(); return null; } try { List<Element> fcElems = new ArrayList<>(); findFeatureCollection(doc.getRootElement(), fcName, fcElems); if (fcElems.size() > 0) return readConfig(fcElems.get(0)); } catch (IllegalStateException e) { e.printStackTrace(); } return null; } private void findFeatureCollection(Element parent, String name, List<Element> fcElems) { List<Element> elist = parent.getChildren("featureCollection", Catalog.defNS); if (name == null) fcElems.addAll(elist); else { for (Element elem : elist) { if (name.equals(elem.getAttributeValue("name"))) fcElems.add(elem); } } for (Element child : parent.getChildren("dataset", Catalog.defNS)) findFeatureCollection(child, name, fcElems); } public FeatureCollectionConfig readConfig(Element featureCollectionElement) { String name = featureCollectionElement.getAttributeValue("name"); String path = featureCollectionElement.getAttributeValue("path"); String fcTypeS = featureCollectionElement.getAttributeValue("featureType"); FeatureCollectionType fcType = FeatureCollectionType.valueOf(fcTypeS); if (fcType == null) { errlog.format("featureCollection %s must have a valid FeatureCollectionType attribute, found '%s'%n", name, fcTypeS); fatalError = true; } // collection element required Element collElem = featureCollectionElement.getChild("collection", Catalog.defNS); if (collElem == null) { errlog.format("featureCollection %s must have a <collection> element%n", name); fatalError = true; return null; } String collectionName = collElem.getAttributeValue("name"); collectionName = CollectionAbstract.cleanName(collectionName != null ? collectionName : name); String spec = collElem.getAttributeValue("spec"); spec = expandAliasForCollectionSpec(spec); String timePartition = collElem.getAttributeValue("timePartition"); String dateFormatMark = collElem.getAttributeValue("dateFormatMark"); String olderThan = collElem.getAttributeValue("olderThan"); String rootDir = collElem.getAttributeValue("rootDir"); String regExp = collElem.getAttributeValue("regExp"); if (spec == null && rootDir == null) { errlog.format("featureCollection " + name + " must have a spec or rootDir attribute."); fatalError = true; return null; } Element innerNcml = featureCollectionElement.getChild("netcdf", Catalog.ncmlNS); FeatureCollectionConfig config = new FeatureCollectionConfig(name, path, fcType, spec, collectionName, dateFormatMark, olderThan, timePartition, innerNcml); config.setFilter(rootDir, regExp); config.setFilesSort(featureCollectionElement.getChild("filesSort", Catalog.defNS)); // tds and update elements Element tdmElem = featureCollectionElement.getChild("tdm", Catalog.defNS); config.tdmConfig = readUpdateElement(tdmElem); Element updateElem = featureCollectionElement.getChild("update", Catalog.defNS); config.updateConfig = readUpdateElement(updateElem); // protoDataset element Element protoElem = featureCollectionElement.getChild("protoDataset", Catalog.defNS); if (protoElem != null) { String choice = protoElem.getAttributeValue("choice"); String change = protoElem.getAttributeValue("change"); String param = protoElem.getAttributeValue("param"); Element ncmlElem = protoElem.getChild("netcdf", Catalog.ncmlNS); config.protoConfig = new FeatureCollectionConfig.ProtoConfig(choice, change, param, ncmlElem); } // fmrcConfig element Element fmrcElem = featureCollectionElement.getChild("fmrcConfig", Catalog.defNS); if (fmrcElem != null) { String regularize = fmrcElem.getAttributeValue("regularize"); config.fmrcConfig = new FeatureCollectionConfig.FmrcConfig(regularize); String datasetTypes = fmrcElem.getAttributeValue("datasetTypes"); if (null != datasetTypes) config.fmrcConfig.addDatasetType(datasetTypes); List<Element> bestElemsOld = fmrcElem.getChildren("dataset", Catalog.defNS); for (Element best : bestElemsOld) { String bestName = best.getAttributeValue("name"); String offs = best.getAttributeValue("offsetsGreaterEqual"); double off = Double.parseDouble(offs); config.fmrcConfig.addBestDataset(bestName, off); } List<Element> bestElems = fmrcElem.getChildren("bestDataset", Catalog.defNS); for (Element best : bestElems) { String bestName = best.getAttributeValue("name"); String offs = best.getAttributeValue("offsetsGreaterEqual"); double off = Double.parseDouble(offs); config.fmrcConfig.addBestDataset(bestName, off); } } // pointConfig element optional Element pointElem = featureCollectionElement.getChild("pointConfig", Catalog.defNS); if (pointElem != null) { String datasetTypes = pointElem.getAttributeValue("datasetTypes"); if (null != datasetTypes) config.pointConfig.addDatasetType(datasetTypes); } // gribConfig element optional Element gribConfig = featureCollectionElement.getChild("gribConfig", Catalog.defNS); if (gribConfig != null) { config.gribConfig.configFromXml(gribConfig, Catalog.defNS); } // done reading - do anything needed config.finish(); return config; } private FeatureCollectionConfig.UpdateConfig readUpdateElement(Element updateElem) { if (updateElem == null) { return new FeatureCollectionConfig.UpdateConfig(); // default } String startup = updateElem.getAttributeValue("startup"); String rewrite = updateElem.getAttributeValue("rewrite"); String recheckAfter = updateElem.getAttributeValue("recheckAfter"); String rescan = updateElem.getAttributeValue("rescan"); String trigger = updateElem.getAttributeValue("trigger"); String deleteAfter = updateElem.getAttributeValue("deleteAfter"); return new FeatureCollectionConfig.UpdateConfig(startup, rewrite, recheckAfter, rescan, trigger, deleteAfter); } private String expandAliasForCollectionSpec(String location) { String result = AliasTranslator.translateAlias(location); if (result != null) return result; return location; } }
/* * << * Davinci * == * Copyright (C) 2016 - 2019 EDP * == * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * >> * */ package edp.davinci.dao; import edp.davinci.dto.userDto.UserBaseInfo; import edp.davinci.model.User; import org.apache.ibatis.annotations.Delete; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; import org.springframework.stereotype.Component; import java.util.List; import java.util.Set; @Component public interface UserMapper { int insert(User user); @Select({"select * from `user` where id = #{id}"}) User getById(@Param("id") Long id); @Select({"select * from `user` where `username` = #{username} or `email` = #{username} or `name` = #{username}"}) User selectByUsername(@Param("username") String username); @Select({"select * from `user` where `email` = #{email}"}) User selectByEmail(@Param("email") String email); List<UserBaseInfo> getUsersByKeyword(@Param("keyword") String keyword, @Param("orgId") Long orgId); @Update({"update `user` set `name` = #{name}, description = #{description}, department = #{department}, update_time = #{updateTime}", "where id = #{id}"}) int updateBaseInfo(User user); @Update({"update user set `avatar` = #{avatar}, update_time = #{updateTime} where id = #{id}"}) int updateAvatar(User user); @Select({"select id from user where (LOWER(`username`) = LOWER(#{name}) or LOWER(`email`) = LOWER(#{name}) or LOWER(`name`) = LOWER(#{name}))"}) Long getIdByName(@Param("name") String name); @Update({"update `user` set `active` = #{active}, `update_time` = #{updateTime} where id = #{id}"}) int activeUser(User user); @Update({"update `user` set `password` = #{password}, `update_time` = #{updateTime} where id = #{id}"}) int changePassword(User user); List<User> getByIds(@Param("userIds") List<Long> userIds); @Select({"select count(id) from `user` where `email` = #{email}"}) boolean existEmail(@Param("email") String email); @Select({"select count(id) from `user` where `username` = #{username}"}) boolean existUsername(@Param("username") String username); /** * only for test * @param id * @return */ @Delete({"delete from `user` where id = #{id}"}) int deleteById(@Param("id") Long id); List<User> selectByEmails(@Param("emails") Set<String> emails); }
/******************************************************************************* * Copyright (c) 2000, 2005 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.jdt.internal.corext.refactoring.typeconstraints2; public interface ITypeConstraint2 { public ConstraintVariable2 getLeft(); public ConstraintVariable2 getRight(); }
/* * Copyright 2015-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.glacier.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Data retrieval policy rule. * </p> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DataRetrievalRule implements Serializable, Cloneable, StructuredPojo { /** * <p> * The type of data retrieval policy to set. * </p> * <p> * Valid values: BytesPerHour|FreeTier|None * </p> */ private String strategy; /** * <p> * The maximum number of bytes that can be retrieved in an hour. * </p> * <p> * This field is required only if the value of the Strategy field is <code>BytesPerHour</code>. Your PUT operation * will be rejected if the Strategy field is not set to <code>BytesPerHour</code> and you set this field. * </p> */ private Long bytesPerHour; /** * <p> * The type of data retrieval policy to set. * </p> * <p> * Valid values: BytesPerHour|FreeTier|None * </p> * * @param strategy * The type of data retrieval policy to set.</p> * <p> * Valid values: BytesPerHour|FreeTier|None */ public void setStrategy(String strategy) { this.strategy = strategy; } /** * <p> * The type of data retrieval policy to set. * </p> * <p> * Valid values: BytesPerHour|FreeTier|None * </p> * * @return The type of data retrieval policy to set.</p> * <p> * Valid values: BytesPerHour|FreeTier|None */ public String getStrategy() { return this.strategy; } /** * <p> * The type of data retrieval policy to set. * </p> * <p> * Valid values: BytesPerHour|FreeTier|None * </p> * * @param strategy * The type of data retrieval policy to set.</p> * <p> * Valid values: BytesPerHour|FreeTier|None * @return Returns a reference to this object so that method calls can be chained together. */ public DataRetrievalRule withStrategy(String strategy) { setStrategy(strategy); return this; } /** * <p> * The maximum number of bytes that can be retrieved in an hour. * </p> * <p> * This field is required only if the value of the Strategy field is <code>BytesPerHour</code>. Your PUT operation * will be rejected if the Strategy field is not set to <code>BytesPerHour</code> and you set this field. * </p> * * @param bytesPerHour * The maximum number of bytes that can be retrieved in an hour.</p> * <p> * This field is required only if the value of the Strategy field is <code>BytesPerHour</code>. Your PUT * operation will be rejected if the Strategy field is not set to <code>BytesPerHour</code> and you set this * field. */ public void setBytesPerHour(Long bytesPerHour) { this.bytesPerHour = bytesPerHour; } /** * <p> * The maximum number of bytes that can be retrieved in an hour. * </p> * <p> * This field is required only if the value of the Strategy field is <code>BytesPerHour</code>. Your PUT operation * will be rejected if the Strategy field is not set to <code>BytesPerHour</code> and you set this field. * </p> * * @return The maximum number of bytes that can be retrieved in an hour.</p> * <p> * This field is required only if the value of the Strategy field is <code>BytesPerHour</code>. Your PUT * operation will be rejected if the Strategy field is not set to <code>BytesPerHour</code> and you set this * field. */ public Long getBytesPerHour() { return this.bytesPerHour; } /** * <p> * The maximum number of bytes that can be retrieved in an hour. * </p> * <p> * This field is required only if the value of the Strategy field is <code>BytesPerHour</code>. Your PUT operation * will be rejected if the Strategy field is not set to <code>BytesPerHour</code> and you set this field. * </p> * * @param bytesPerHour * The maximum number of bytes that can be retrieved in an hour.</p> * <p> * This field is required only if the value of the Strategy field is <code>BytesPerHour</code>. Your PUT * operation will be rejected if the Strategy field is not set to <code>BytesPerHour</code> and you set this * field. * @return Returns a reference to this object so that method calls can be chained together. */ public DataRetrievalRule withBytesPerHour(Long bytesPerHour) { setBytesPerHour(bytesPerHour); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getStrategy() != null) sb.append("Strategy: ").append(getStrategy()).append(","); if (getBytesPerHour() != null) sb.append("BytesPerHour: ").append(getBytesPerHour()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DataRetrievalRule == false) return false; DataRetrievalRule other = (DataRetrievalRule) obj; if (other.getStrategy() == null ^ this.getStrategy() == null) return false; if (other.getStrategy() != null && other.getStrategy().equals(this.getStrategy()) == false) return false; if (other.getBytesPerHour() == null ^ this.getBytesPerHour() == null) return false; if (other.getBytesPerHour() != null && other.getBytesPerHour().equals(this.getBytesPerHour()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getStrategy() == null) ? 0 : getStrategy().hashCode()); hashCode = prime * hashCode + ((getBytesPerHour() == null) ? 0 : getBytesPerHour().hashCode()); return hashCode; } @Override public DataRetrievalRule clone() { try { return (DataRetrievalRule) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.glacier.model.transform.DataRetrievalRuleMarshaller.getInstance().marshall(this, protocolMarshaller); } }
package com; import com.algorithm.SearchAlgorithm; import com.algorithm.SortAlgorithm; import com.codility.Solution; import com.data_structure.BinaryTree; import com.data_structure.CustomHashMap; import com.data_structure.LRUCache; import com.data_structure.SinglyLinkedList; import com.dynamic_programming.*; import com.graph_traversal.FindShortestPath; import com.graph_traversal.NetworkGraph; import com.math_related.MathRelated; import com.tree_traversal.TreeTraversal; import java.nio.file.Paths; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedList; import java.util.List; public class TestCases { /** * Selection of test case for sort algorithm related * @param selection number of selected test case */ public static void testSortAlgorithms(int selection) { // generate random numbers int n = 10; List<Integer> inputSort = Utils.generateRandomIntegers(n, n); System.out.println("inputSort: " + inputSort); switch (selection) { case 0: // test Bubble Sort algorithm SortAlgorithm.bubbleSort(inputSort); System.out.println("inputSort: " + inputSort); case 1: // test Selection Sort algorithm SortAlgorithm.selectionSort(inputSort); System.out.println("inputSort: " + inputSort); case 2: // test Insertion Sort algorithm SortAlgorithm.insertionSort(inputSort); System.out.println("inputSort: " + inputSort); case 3: // test Merge Sort algorithm List<Integer> outputSort = SortAlgorithm.mergeSort(inputSort); System.out.println("outputSort: " + outputSort); case 4: // test Quick Sort algorithm SortAlgorithm.quickSort(inputSort); System.out.println("inputSort: " + inputSort); } } public static void testSearchAlgorithms() { // generate random numbers int n = 10; List<Integer> inputSearch = Utils.generateRandomIntegers(n, n); int searchNumber = 5; // test Binary Search algorithm int indexInput = SearchAlgorithm.binarySearch(inputSearch, searchNumber); System.out.println("indexInput: " + indexInput); } /** * Selection of test case for data structure related * @param selection number of selected test case */ public static void testDataStructures(int selection) { switch (selection) { case 0: CustomHashMap customHashMap = new CustomHashMap(); customHashMap.put("test", 1); System.out.println("test get customHashMap: " + customHashMap.get("test")); System.out.println("test get customHashMap: " + customHashMap.get("test2")); case 1: SinglyLinkedList list = new SinglyLinkedList(); list.add(1); list.add(2); list.add(3); list.add(4); list.printList(); list.reverseList(); list.printList(); case 2: LRUCache cache = new LRUCache(4); for (int i = 1; i <= 10; i++) { cache.put(i, i); } cache.printContent(); cache.get(7); cache.printContent(); cache.put(10,2); cache.printContent(); cache.remove(7); cache.printContent(); cache.evict(); cache.printContent(); } } public static void testFindPath() { //////////////////////// Test find path in a maze ////////////////////// // create a two-dimensional boolean maze boolean[][] maze = new boolean[][]{ {true, true, false}, {false, false, true}, {false, false, true}, }; // test find path in maze problem List<FindPath.Point> path = FindPath.getPathFromBooleanMaze(maze); System.out.println("path: " + path.toString()); } /** * Selection of test case for finding max value related * @param selection number of selected test case */ public static void testFindMaxValue(int selection) { switch (selection) { case 0: ///////////////////// Test find max profit stock option ////////////////////// // test find max profit stock option int numberPoints = 10; // generate random floats List<Float> floats = Utils.generateRandomFloats(numberPoints, 100); // generate list of DateTimes List<LocalDateTime> times = Utils.generateDateTimes(numberPoints); // generate list of stock option prices List<FindMaxValue.StockOption> stockOptions = new LinkedList<>(); for (int i = 0; i < numberPoints; i++) { FindMaxValue.StockOption stockOption = new FindMaxValue.StockOption(floats.get(i), times.get(i)); stockOptions.add(stockOption); } float maxProfit = FindMaxValue.maxProfitStockOptions(stockOptions); case 1: ///////////////////// Test find max profit with two transactions ////////////////////// int prices[] = {2, 30, 15, 10, 8, 25, 80}; float profit = FindMaxValue.maxProfitWithMaxTwoTransactions(prices); System.out.println("Maximum Profit = "+ profit); case 2: ///////////////////// Test find max product of three integers ////////////////////// int[] inputInts = new int[] {-100, 100, 200, 9, 2}; int maxProducts = FindMaxValue.findMaxProductsThreeInts(inputInts); System.out.println("maxProducts: " + maxProducts); case 3: int sellProfit; // Find max profit by selling flower bouquets sellProfit = FindMaxValue.findMaxProfitSellingBouquets(2, 3, "0001000"); System.out.println("profit: " + sellProfit); sellProfit = FindMaxValue.findMaxProfitSellingBouquets(2, 3, "01"); System.out.println("profit: " + sellProfit); sellProfit = FindMaxValue.findMaxProfitSellingBouquets(10, 7, "0"); System.out.println("profit: " + sellProfit); sellProfit = FindMaxValue.findMaxProfitSellingBouquets(10, 7, "0011001100"); System.out.println("profit: " + sellProfit); } } /** * Selection of test case for String construct related * @param selection number of selected test case */ public static void testStringConstruct(int selection) { String[] substrings = { "B", "T", "A", "C", "MA", "N", "M", "I", "ATMA", "D" }; String stringToBeConstructed = "BATMANC"; switch (selection) { case 0: boolean canStringBeConstructed = StringConstruct.isStringConstructableFromSubstrings( stringToBeConstructed, Arrays.asList(substrings) ); System.out.println("canStringBeConstructed: " + canStringBeConstructed); case 1: boolean isConstructableRecursive = StringConstructDp.isConstructableByDictRecursive( stringToBeConstructed, new ArrayList<>(Arrays.asList(substrings))); System.out.println("isConstructable: " + isConstructableRecursive); case 2: boolean isConstructableIterative = StringConstructDp.isConstructableByDictIterative( stringToBeConstructed, new ArrayList<>(Arrays.asList(substrings))); System.out.println("isConstructable: " + isConstructableIterative); } } public static void testFindCombination() { // test find anagrams in dict String[] words = {"cat", "bac", "test", "act", "demon", "tset", "act", "tac"}; List<String> wordList = new ArrayList<>(Arrays.asList(words)); List<String> anagrams = FindCombination.findAnagramInDict(wordList); System.out.println("anagrams: " + anagrams); } /** * Selection of test case for Graph related * @param selection number of selected test case */ public static void testGraph(int selection) { switch(selection) { case 0: // find network cluster sizes String fileName = "task2-test-input.txt"; String inputPath = Paths.get(".", "src", "com", "graph_traversal", fileName) .toString(); NetworkGraph.findSubclustersSize(inputPath, "output.txt"); break; case 1: // find shortest path using dijkstra /** * 2d array which holds the weight of the edges */ /* Let us create the example graph discussed above */ int adjacencyMatrix[][] = new int[][]{ {0, 4, 0, 0, 0, 0, 0, 8, 0}, {4, 0, 8, 0, 0, 0, 0, 11, 0}, {0, 8, 0, 7, 0, 4, 0, 0, 2}, {0, 0, 7, 0, 9, 14, 0, 0, 0}, {0, 0, 0, 9, 0, 10, 0, 0, 0}, {0, 0, 4, 14, 10, 0, 2, 0, 0}, {0, 0, 0, 0, 0, 2, 0, 1, 6}, {8, 11, 0, 0, 0, 0, 1, 0, 7}, {0, 0, 2, 0, 0, 0, 6, 7, 0} }; /** * source and destination vertices */ int src = 8; int dest = 4; FindShortestPath.Path shortestPath = FindShortestPath.dijkstra(adjacencyMatrix, src, dest); System.out.println("shortestPath: " + shortestPath.getPath()); System.out.println("shortestPath distance: " + shortestPath.getDistance()); case 2: // find minimum distance in a 2D maze from start point to end point // using BFS int[][] mat = { {1, 0, 1, 1, 1, 1, 0, 1, 1, 1 }, {1, 0, 1, 0, 1, 1, 1, 0, 1, 1 }, {1, 1, 1, 0, 1, 1, 0, 1, 0, 1 }, {0, 0, 0, 0, 1, 0, 0, 0, 0, 1 }, {1, 1, 1, 0, 1, 1, 1, 0, 1, 0 }, {1, 0, 1, 1, 1, 1, 0, 1, 0, 0 }, {1, 0, 0, 0, 0, 0, 0, 0, 0, 1 }, {1, 0, 1, 1, 1, 1, 0, 1, 1, 1 }, {1, 1, 0, 0, 0, 0, 1, 0, 0, 1 } }; FindShortestPath.Point srcPoint = new FindShortestPath.Point(0,0); FindShortestPath.Point destPoint = new FindShortestPath.Point(8, 9); int minDistance = FindShortestPath.bfs(mat, srcPoint, destPoint); System.out.println("minDistance: " + minDistance); } } public static void testOther() { } /** * Selection of test case for Tree related * @param selection number of selected test case */ public static void testTree(int selection) { switch (selection) { case 0: BinaryTree<Integer> tree = new BinaryTree<>(); tree.setRoot(1); tree.getRoot().setLeft(2); tree.getRoot().setRight(3); tree.getRoot().getLeft().setLeft(4); tree.getRoot().getLeft().setRight(1); tree.getRoot().getLeft().getLeft().setLeft(8); tree.getRoot().getLeft().getLeft().setRight(7); tree.getRoot().getLeft().getRight().setLeft(10); tree.getRoot().getLeft().getRight().setRight(12); System.out.println("Preorder traversal of binary tree is "); tree.printPreorder(); System.out.println("\nInorder traversal of binary tree is "); tree.printInorder(); System.out.println("\nPostorder traversal of binary tree is "); tree.printPostorder(); Integer maxElement = (Integer) TreeTraversal .getMaxElementInLevel(tree.getRoot(), 3); System.out.println("maxElement: " + maxElement); } } /** * Selection of test case for Math related * @param selection number of selected test case */ public static void testMathRelated(int selection) { switch (selection) { case 0: // check if prime number for (int i = 1; i <= 1000; i++) { if (MathRelated.isPrimeNumber(i)) System.out.println("Prime number: " + i); } break; case 1: // print Fibonacci series MathRelated.printFibonacciSeries(100); break; } } /** * Selection of test case for Codility problems * @param selection number of selected test case */ public static void testCodility(int selection) { Solution solution = new Solution(); int result; switch (selection) { case 0: int[] A1 = {1, 3, 6, 4, 1, 2}; int[] A2 = {-1, -3}; int[] A3 = {1, 2, 3}; result = solution.smallestPositiveIntegerNotOccurInArray(A1); case 1: int[] input = {102, 955, 1651}; for(int i: input) { result = solution.findBinaryPeriodOfAnInt(i); System.out.println("result: " + result); } case 2: int[] inputA = {10,9,8,7,6,5,4,3,2,1,10,11}; int[] inputA1 = {1,0,-1,-2,-3}; int[] inputA2 = {1,1,1,1,1,1}; int[] inputA3 = {}; int[] inputA4 = {1}; int[] inputA5 = {1,1}; int[] inputA6 = {1,2,3,4,5,6,7}; result = solution.findMaxDistanceMonotonic(inputA6); System.out.println("result: " + result); } } }
/* Calldrive Android Library is available under MIT license * * @author Mario Danic * Copyright (C) 2017 Mario Danic * Copyright (C) 2017 Calldrive GmbH * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. * */ package com.owncloud.android.lib.common; import com.google.gson.annotations.SerializedName; import org.parceler.Parcel; import java.util.ArrayList; import lombok.AllArgsConstructor; import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; /** * User information data model */ @Parcel @Getter @Setter @NoArgsConstructor @AllArgsConstructor public class UserInfo { @SerializedName("id") public String id; @SerializedName("enabled") public Boolean enabled; @SerializedName(value = "display-name", alternate = {"displayname"}) public String displayName; @SerializedName("email") public String email; @SerializedName("phone") public String phone; @SerializedName("address") public String address; @SerializedName(value = "website", alternate = {"webpage"}) public String website; @SerializedName("twitter") public String twitter; @SerializedName("quota") public Quota quota; @SerializedName("groups") public ArrayList<String> groups; }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.distributed; import org.apache.ignite.*; import org.apache.ignite.cluster.*; import org.apache.ignite.internal.*; import org.apache.ignite.internal.cluster.*; import org.apache.ignite.internal.processors.cache.*; import org.apache.ignite.internal.processors.cache.distributed.dht.*; import org.apache.ignite.internal.processors.cache.distributed.near.*; import org.apache.ignite.internal.processors.cache.transactions.*; import org.apache.ignite.internal.processors.cache.version.*; import org.apache.ignite.internal.processors.datastreamer.*; import org.apache.ignite.internal.processors.task.*; import org.apache.ignite.internal.util.future.*; import org.apache.ignite.internal.util.typedef.internal.*; import org.apache.ignite.lang.*; import org.apache.ignite.resources.*; import org.apache.ignite.transactions.*; import org.jetbrains.annotations.*; import java.io.*; import java.util.*; import java.util.concurrent.*; import static org.apache.ignite.internal.GridClosureCallMode.*; /** * Distributed cache implementation. */ public abstract class GridDistributedCacheAdapter<K, V> extends GridCacheAdapter<K, V> { /** */ private static final long serialVersionUID = 0L; /** * Empty constructor required by {@link Externalizable}. */ protected GridDistributedCacheAdapter() { // No-op. } /** * @param ctx Cache registry. * @param startSize Start size. */ protected GridDistributedCacheAdapter(GridCacheContext<K, V> ctx, int startSize) { super(ctx, startSize); } /** * @param ctx Cache context. * @param map Cache map. */ protected GridDistributedCacheAdapter(GridCacheContext<K, V> ctx, GridCacheConcurrentMap map) { super(ctx, map); } /** {@inheritDoc} */ @Override public IgniteInternalFuture<Boolean> txLockAsync( Collection<KeyCacheObject> keys, long timeout, IgniteTxLocalEx tx, boolean isRead, boolean retval, TransactionIsolation isolation, boolean isInvalidate, long accessTtl, CacheEntryPredicate[] filter ) { assert tx != null; return lockAllAsync(keys, timeout, tx, isInvalidate, isRead, retval, isolation, accessTtl, filter); } /** {@inheritDoc} */ @Override public IgniteInternalFuture<Boolean> lockAllAsync(Collection<? extends K> keys, long timeout, CacheEntryPredicate... filter) { IgniteTxLocalEx tx = ctx.tm().userTxx(); // Return value flag is true because we choose to bring values for explicit locks. return lockAllAsync(ctx.cacheKeysView(keys), timeout, tx, false, false, /*retval*/true, null, -1L, filter); } /** * @param keys Keys to lock. * @param timeout Timeout. * @param tx Transaction * @param isInvalidate Invalidation flag. * @param isRead Indicates whether value is read or written. * @param retval Flag to return value. * @param isolation Transaction isolation. * @param accessTtl TTL for read operation. * @param filter Optional filter. * @return Future for locks. */ protected abstract IgniteInternalFuture<Boolean> lockAllAsync(Collection<KeyCacheObject> keys, long timeout, @Nullable IgniteTxLocalEx tx, boolean isInvalidate, boolean isRead, boolean retval, @Nullable TransactionIsolation isolation, long accessTtl, CacheEntryPredicate[] filter); /** * @param key Key to remove. * @param ver Version to remove. */ public void removeVersionedEntry(KeyCacheObject key, GridCacheVersion ver) { GridCacheEntryEx entry = peekEx(key); if (entry == null) return; if (entry.markObsoleteVersion(ver)) removeEntry(entry); } /** {@inheritDoc} */ @Override public void removeAll() throws IgniteCheckedException { try { long topVer; do { topVer = ctx.affinity().affinityTopologyVersion(); // Send job to all data nodes. Collection<ClusterNode> nodes = ctx.grid().cluster().forDataNodes(name()).nodes(); if (!nodes.isEmpty()) { ctx.closures().callAsyncNoFailover(BROADCAST, new GlobalRemoveAllCallable<>(name(), topVer), nodes, true).get(); } } while (ctx.affinity().affinityTopologyVersion() > topVer); } catch (ClusterGroupEmptyCheckedException ignore) { if (log.isDebugEnabled()) log.debug("All remote nodes left while cache remove [cacheName=" + name() + "]"); } } /** {@inheritDoc} */ @Override public IgniteInternalFuture<?> removeAllAsync() { GridFutureAdapter<Void> opFut = new GridFutureAdapter<>(); long topVer = ctx.affinity().affinityTopologyVersion(); removeAllAsync(opFut, topVer); return opFut; } /** * @param opFut Future. * @param topVer Topology version. */ private void removeAllAsync(final GridFutureAdapter<Void> opFut, final long topVer) { Collection<ClusterNode> nodes = ctx.grid().cluster().forDataNodes(name()).nodes(); if (!nodes.isEmpty()) { IgniteInternalFuture<?> rmvFut = ctx.closures().callAsyncNoFailover(BROADCAST, new GlobalRemoveAllCallable<>(name(), topVer), nodes, true); rmvFut.listen(new IgniteInClosure<IgniteInternalFuture<?>>() { @Override public void apply(IgniteInternalFuture<?> fut) { try { fut.get(); long topVer0 = ctx.affinity().affinityTopologyVersion(); if (topVer0 == topVer) opFut.onDone(); else removeAllAsync(opFut, topVer0); } catch (ClusterGroupEmptyCheckedException ignore) { if (log.isDebugEnabled()) log.debug("All remote nodes left while cache remove [cacheName=" + name() + "]"); opFut.onDone(); } catch (IgniteCheckedException e) { opFut.onDone(e); } catch (Error e) { opFut.onDone(e); throw e; } } }); } else opFut.onDone(); } /** {@inheritDoc} */ @Override public String toString() { return S.toString(GridDistributedCacheAdapter.class, this, "super", super.toString()); } /** * Internal callable which performs remove all primary key mappings * operation on a cache with the given name. */ @GridInternal private static class GlobalRemoveAllCallable<K,V> implements Callable<Object>, Externalizable { /** */ private static final long serialVersionUID = 0L; /** Cache name. */ private String cacheName; /** Topology version. */ private long topVer; /** Injected grid instance. */ @IgniteInstanceResource private Ignite ignite; /** * Empty constructor for serialization. */ public GlobalRemoveAllCallable() { // No-op. } /** * @param cacheName Cache name. * @param topVer Topology version. */ private GlobalRemoveAllCallable(String cacheName, long topVer) { this.cacheName = cacheName; this.topVer = topVer; } /** * {@inheritDoc} */ @Override public Object call() throws Exception { GridCacheAdapter<K, V> cacheAdapter = ((IgniteKernal)ignite).context().cache().internalCache(cacheName); final GridCacheContext<K, V> ctx = cacheAdapter.context(); ctx.affinity().affinityReadyFuture(topVer).get(); ctx.gate().enter(); try { if (ctx.affinity().affinityTopologyVersion() != topVer) return null; // Ignore this remove request because remove request will be sent again. GridDhtCacheAdapter<K, V> dht; if (cacheAdapter instanceof GridNearCacheAdapter) dht = ((GridNearCacheAdapter<K, V>)cacheAdapter).dht(); else dht = (GridDhtCacheAdapter<K, V>)cacheAdapter; try (DataStreamerImpl<KeyCacheObject, Object> dataLdr = (DataStreamerImpl)ignite.dataStreamer(cacheName)) { ((DataStreamerImpl)dataLdr).maxRemapCount(0); dataLdr.updater(DataStreamerCacheUpdaters.<KeyCacheObject, Object>batched()); for (GridDhtLocalPartition locPart : dht.topology().currentLocalPartitions()) { if (!locPart.isEmpty() && locPart.primary(topVer)) { for (GridDhtCacheEntry o : locPart.entries()) { if (!o.obsoleteOrDeleted()) dataLdr.removeDataInternal(o.key()); } } } Iterator<KeyCacheObject> it = dht.context().swap().offHeapKeyIterator(true, false, topVer); while (it.hasNext()) dataLdr.removeDataInternal(it.next()); it = dht.context().swap().swapKeyIterator(true, false, topVer); while (it.hasNext()) dataLdr.removeDataInternal(it.next()); } } finally { ctx.gate().leave(); } return null; } /** {@inheritDoc} */ @Override public void writeExternal(ObjectOutput out) throws IOException { U.writeString(out, cacheName); out.writeLong(topVer); } /** {@inheritDoc} */ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { cacheName = U.readString(in); topVer = in.readLong(); } } }
/* * * ***** BEGIN LICENSE BLOCK ***** * Version: MPL 1.1/GPL 2.0 * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is Rhino code, released * May 6, 1999. * * The Initial Developer of the Original Code is * Netscape Communications Corporation. * Portions created by the Initial Developer are Copyright (C) 1997-1999 * the Initial Developer. All Rights Reserved. * * Contributor(s): * Bob Jervis * Google Inc. * * Alternatively, the contents of this file may be used under the terms of * the GNU General Public License Version 2 or later (the "GPL"), in which * case the provisions of the GPL are applicable instead of those above. If * you wish to allow use of your version of this file only under the terms of * the GPL and not to allow others to use your version of this file under the * MPL, indicate your decision by deleting the provisions above and replacing * them with the notice and other provisions required by the GPL. If you do * not delete the provisions above, a recipient may use your version of this * file under either the MPL or the GPL. * * ***** END LICENSE BLOCK ***** */ package com.google.javascript.rhino.jstype; import static com.google.javascript.rhino.jstype.TernaryValue.UNKNOWN; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.javascript.rhino.ErrorReporter; import java.util.Collection; import java.util.List; import java.util.SortedSet; import java.util.TreeSet; /** * The {@code UnionType} implements a common JavaScript idiom in which the * code is specifically designed to work with multiple input types. Because * JavaScript always knows the runtime type of an object value, this is safer * than a C union.<p> * * For instance, values of the union type {@code (String,boolean)} can be of * type {@code String} or of type {@code boolean}. The commutativity of the * statement is captured by making {@code (String,boolean)} and * {@code (boolean,String)} equal.<p> * * The implementation of this class prevents the creation of nested * unions.<p> */ public class UnionType extends JSType { private static final long serialVersionUID = 1L; Collection<JSType> alternates; private final int hashcode; /** * Creates a union type. * * @param alternates the alternates of the union */ UnionType(JSTypeRegistry registry, Collection<JSType> alternates) { super(registry); this.alternates = alternates; this.hashcode = this.alternates.hashCode(); } /** * Gets the alternate types of this union type. * @return The alternate types of this union type. The returned set is * immutable. */ public Iterable<JSType> getAlternates() { return alternates; } /** * This predicate is used to test whether a given type can appear in a * numeric context, such as an operand of a multiply operator. * * @return true if the type can appear in a numeric context. */ @Override public boolean matchesNumberContext() { // TODO(user): Reverse this logic to make it correct instead of generous. for (JSType t : alternates) { if (t.matchesNumberContext()) { return true; } } return false; } /** * This predicate is used to test whether a given type can appear in a * {@code String} context, such as an operand of a string concat ({@code +}) * operator.<p> * * All types have at least the potential for converting to {@code String}. * When we add externally defined types, such as a browser OM, we may choose * to add types that do not automatically convert to {@code String}. * * @return {@code true} if not {@link VoidType} */ @Override public boolean matchesStringContext() { // TODO(user): Reverse this logic to make it correct instead of generous. for (JSType t : alternates) { if (t.matchesStringContext()) { return true; } } return false; } /** * This predicate is used to test whether a given type can appear in an * {@code Object} context, such as the expression in a {@code with} * statement.<p> * * Most types we will encounter, except notably {@code null}, have at least * the potential for converting to {@code Object}. Host defined objects can * get peculiar.<p> * * VOID type is included here because while it is not part of the JavaScript * language, functions returning 'void' type can't be used as operands of * any operator or statement.<p> * * @return {@code true} if the type is not {@link NullType} or * {@link VoidType} */ @Override public boolean matchesObjectContext() { // TODO(user): Reverse this logic to make it correct instead of generous. for (JSType t : alternates) { if (t.matchesObjectContext()) { return true; } } return false; } @Override public JSType findPropertyType(String propertyName) { JSType propertyType = null; for (JSType alternate : getAlternates()) { // Filter out the null/undefined type. if (alternate.isNullType() || alternate.isVoidType()) { continue; } JSType altPropertyType = alternate.findPropertyType(propertyName); if (altPropertyType == null) { continue; } if (propertyType == null) { propertyType = altPropertyType; } else { propertyType = propertyType.getLeastSupertype(altPropertyType); } } return propertyType; } @Override public boolean canAssignTo(JSType that) { boolean canAssign = true; for (JSType t : alternates) { if (t.isUnknownType()) { return true; } canAssign &= t.canAssignTo(that); } return canAssign; } @Override public boolean canBeCalled() { for (JSType t : alternates) { if (!t.canBeCalled()) { return false; } } return true; } @Override public JSType autobox() { UnionTypeBuilder restricted = new UnionTypeBuilder(registry); for (JSType t : alternates) { restricted.addAlternate(t.autobox()); } return restricted.build(); } @Override public JSType restrictByNotNullOrUndefined() { UnionTypeBuilder restricted = new UnionTypeBuilder(registry); for (JSType t : alternates) { restricted.addAlternate(t.restrictByNotNullOrUndefined()); } return restricted.build(); } @Override public TernaryValue testForEquality(JSType that) { TernaryValue result = null; for (JSType t : alternates) { TernaryValue test = t.testForEquality(that); if (result == null) { result = test; } else if (!result.equals(test)) { return UNKNOWN; } } return result; } /** * This predicate determines whether objects of this type can have the * {@code null} value, and therefore can appear in contexts where * {@code null} is expected. * * @return {@code true} for everything but {@code Number} and * {@code Boolean} types. */ @Override public boolean isNullable() { for (JSType t : alternates) { if (t.isNullable()) { return true; } } return false; } @Override public boolean isUnknownType() { for (JSType t : alternates) { if (t.isUnknownType()) { return true; } } return false; } @Override public JSType getLeastSupertype(JSType that) { if (!that.isUnknownType() && !that.isUnionType()) { for (JSType alternate : alternates) { if (!alternate.isUnknownType() && that.isSubtype(alternate)) { return this; } } } return getLeastSupertype(this, that); } JSType meet(JSType that) { UnionTypeBuilder builder = new UnionTypeBuilder(registry); for (JSType alternate : alternates) { if (alternate.isSubtype(that)) { builder.addAlternate(alternate); } } if (that.isUnionType()) { for (JSType otherAlternate : that.toMaybeUnionType().alternates) { if (otherAlternate.isSubtype(this)) { builder.addAlternate(otherAlternate); } } } else if (that.isSubtype(this)) { builder.addAlternate(that); } JSType result = builder.build(); if (!result.isNoType()) { return result; } else if (this.isObject() && that.isObject()) { return getNativeType(JSTypeNative.NO_OBJECT_TYPE); } else { return getNativeType(JSTypeNative.NO_TYPE); } } /** * Two union types are equal if they have the same number of alternates * and all alternates are equal. */ @Override public boolean isEquivalentTo(JSType object) { if (object == null) { return false; } if (object.isUnionType()) { UnionType that = object.toMaybeUnionType(); if (alternates.size() != that.alternates.size()) { return false; } for (JSType alternate : that.alternates) { if (!hasAlternate(alternate)) { return false; } } return true; } else { return false; } } private boolean hasAlternate(JSType type) { for (JSType alternate : alternates) { if (alternate.isEquivalentTo(type)) { return true; } } return false; } @Override public int hashCode() { return this.hashcode; } @Override public UnionType toMaybeUnionType() { return this; } @Override public boolean isObject() { for (JSType alternate : alternates) { if (!alternate.isObject()) { return false; } } return true; } /** * A {@link UnionType} contains a given type (alternate) iff the member * vector contains it. * * @param type The alternate which might be in this union. * * @return {@code true} if the alternate is in the union */ public boolean contains(JSType type) { for (JSType alt : alternates) { if (alt.isEquivalentTo(type)) { return true; } } return false; } /** * Returns a more restricted union type than {@code this} one, in which all * subtypes of {@code type} have been removed.<p> * * Examples: * <ul> * <li>{@code (number,string)} restricted by {@code number} is * {@code string}</li> * <li>{@code (null, EvalError, URIError)} restricted by * {@code Error} is {@code null}</li> * </ul> * * @param type the supertype of the types to remove from this union type */ public JSType getRestrictedUnion(JSType type) { UnionTypeBuilder restricted = new UnionTypeBuilder(registry); for (JSType t : alternates) { if (t.isUnknownType() || !t.isSubtype(type)) { restricted.addAlternate(t); } } return restricted.build(); } @Override public String toString() { StringBuilder result = new StringBuilder(); boolean firstAlternate = true; result.append("("); SortedSet<JSType> sorted = new TreeSet<JSType>(ALPHA); sorted.addAll(alternates); for (JSType t : sorted) { if (!firstAlternate) { result.append("|"); } result.append(t.toString()); firstAlternate = false; } result.append(")"); return result.toString(); } @Override public boolean isSubtype(JSType that) { // unknown if (that.isUnknownType()) { return true; } // all type if (that.isAllType()) { return true; } for (JSType element : alternates) { if (!element.isSubtype(that)) { return false; } } return true; } @Override public JSType getRestrictedTypeGivenToBooleanOutcome(boolean outcome) { // gather elements after restriction UnionTypeBuilder restricted = new UnionTypeBuilder(registry); for (JSType element : alternates) { restricted.addAlternate( element.getRestrictedTypeGivenToBooleanOutcome(outcome)); } return restricted.build(); } @Override public BooleanLiteralSet getPossibleToBooleanOutcomes() { BooleanLiteralSet literals = BooleanLiteralSet.EMPTY; for (JSType element : alternates) { literals = literals.union(element.getPossibleToBooleanOutcomes()); if (literals == BooleanLiteralSet.BOTH) { break; } } return literals; } @Override public TypePair getTypesUnderEquality(JSType that) { UnionTypeBuilder thisRestricted = new UnionTypeBuilder(registry); UnionTypeBuilder thatRestricted = new UnionTypeBuilder(registry); for (JSType element : alternates) { TypePair p = element.getTypesUnderEquality(that); if (p.typeA != null) { thisRestricted.addAlternate(p.typeA); } if (p.typeB != null) { thatRestricted.addAlternate(p.typeB); } } return new TypePair( thisRestricted.build(), thatRestricted.build()); } @Override public TypePair getTypesUnderInequality(JSType that) { UnionTypeBuilder thisRestricted = new UnionTypeBuilder(registry); UnionTypeBuilder thatRestricted = new UnionTypeBuilder(registry); for (JSType element : alternates) { TypePair p = element.getTypesUnderInequality(that); if (p.typeA != null) { thisRestricted.addAlternate(p.typeA); } if (p.typeB != null) { thatRestricted.addAlternate(p.typeB); } } return new TypePair( thisRestricted.build(), thatRestricted.build()); } @Override public TypePair getTypesUnderShallowInequality(JSType that) { UnionTypeBuilder thisRestricted = new UnionTypeBuilder(registry); UnionTypeBuilder thatRestricted = new UnionTypeBuilder(registry); for (JSType element : alternates) { TypePair p = element.getTypesUnderShallowInequality(that); if (p.typeA != null) { thisRestricted.addAlternate(p.typeA); } if (p.typeB != null) { thatRestricted.addAlternate(p.typeB); } } return new TypePair( thisRestricted.build(), thatRestricted.build()); } @Override public <T> T visit(Visitor<T> visitor) { return visitor.caseUnionType(this); } @Override JSType resolveInternal(ErrorReporter t, StaticScope<JSType> scope) { setResolvedTypeInternal(this); // for circularly defined types. boolean changed = false; ImmutableList.Builder<JSType> resolvedTypes = ImmutableList.builder(); for (JSType alternate : alternates) { JSType newAlternate = alternate.resolve(t, scope); changed |= (alternate != newAlternate); resolvedTypes.add(alternate); } if (changed) { Collection<JSType> newAlternates = resolvedTypes.build(); Preconditions.checkState( newAlternates.hashCode() == this.hashcode); alternates = newAlternates; } return this; } @Override public String toDebugHashCodeString() { List<String> hashCodes = Lists.newArrayList(); for (JSType a : alternates) { hashCodes.add(a.toDebugHashCodeString()); } return "{(" + Joiner.on(",").join(hashCodes) + ")}"; } @Override public boolean setValidator(Predicate<JSType> validator) { for (JSType a : alternates) { a.setValidator(validator); } return true; } }
// This file is part of CPAchecker, // a tool for configurable software verification: // https://cpachecker.sosy-lab.org // // SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org> // // SPDX-License-Identifier: Apache-2.0 package pack; public class SimpleExample_false_assert { public static void main(String[] args) { int n1 = 3 + 3 * 4; // n1 = 15 int n2 = 2 + 2 * 6; // n2 = 14 boolean b1 = n1 == n2; // b1 = false assert b1; // always false } }
/* * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. Camunda licenses this file to you under the Apache License, * Version 2.0; you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.spin.python.json.tree; import org.camunda.spin.impl.test.ScriptEngine; import org.camunda.spin.json.tree.JsonTreeMapJsonToJavaScriptTest; /** * @author Sebastian Menski */ @ScriptEngine("python") public class JsonTreeMapJsonToJavaPythonTest extends JsonTreeMapJsonToJavaScriptTest { }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.atlas.gremlin.optimizer; import org.apache.atlas.gremlin.GremlinExpressionFactory; import org.apache.atlas.groovy.AbstractFunctionExpression; import org.apache.atlas.groovy.GroovyExpression; /** * Finds order expression in the call hierarchy. * */ public class OrderFinder implements CallHierarchyVisitor { private boolean hasOrderExpression; private GremlinExpressionFactory gremlinFactory; public OrderFinder(GremlinExpressionFactory gremlinFactory) { this.gremlinFactory = gremlinFactory; } @Override public boolean preVisitFunctionCaller(AbstractFunctionExpression expr) { return true; } @Override public void visitNonFunctionCaller(GroovyExpression expr) { } @Override public void visitNullCaller() { } @Override public boolean postVisitFunctionCaller(AbstractFunctionExpression functionCall) { if (gremlinFactory.isOrderExpression(functionCall)) { hasOrderExpression = true; return false; } return true; } public boolean hasOrderExpression() { return hasOrderExpression; } }
package im.zhaojun.zfile.service.impl; import cn.hutool.core.convert.Convert; import com.amazonaws.auth.AWSStaticCredentialsProvider; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.client.builder.AwsClientBuilder; import com.amazonaws.services.s3.AmazonS3ClientBuilder; import im.zhaojun.zfile.model.constant.StorageConfigConstant; import im.zhaojun.zfile.model.entity.StorageConfig; import im.zhaojun.zfile.model.enums.StorageTypeEnum; import im.zhaojun.zfile.service.base.AbstractS3BaseFileService; import im.zhaojun.zfile.service.base.BaseFileService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.config.ConfigurableBeanFactory; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Service; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; /** * @author zhaojun */ @Service @Scope(ConfigurableBeanFactory.SCOPE_PROTOTYPE) public class S3ServiceImpl extends AbstractS3BaseFileService implements BaseFileService { private static final Logger log = LoggerFactory.getLogger(S3ServiceImpl.class); @Override public void init(Integer driveId) { try { this.driveId = driveId; Map<String, StorageConfig> stringStorageConfigMap = storageConfigService.selectStorageConfigMapByDriveId(driveId); String accessKey = stringStorageConfigMap.get(StorageConfigConstant.ACCESS_KEY).getValue(); String secretKey = stringStorageConfigMap.get(StorageConfigConstant.SECRET_KEY).getValue(); String endPoint = stringStorageConfigMap.get(StorageConfigConstant.ENDPOINT_KEY).getValue(); super.domain = stringStorageConfigMap.get(StorageConfigConstant.DOMAIN_KEY).getValue(); super.basePath = stringStorageConfigMap.get(StorageConfigConstant.BASE_PATH).getValue(); super.bucketName = stringStorageConfigMap.get(StorageConfigConstant.BUCKET_NAME_KEY).getValue(); super.isPrivate = Convert.toBool(stringStorageConfigMap.get(StorageConfigConstant.IS_PRIVATE).getValue(), true); String pathStyle = stringStorageConfigMap.get(StorageConfigConstant.PATH_STYLE).getValue(); boolean isPathStyle = "path-style".equals(pathStyle); if (Objects.isNull(accessKey) || Objects.isNull(secretKey) || Objects.isNull(endPoint) || Objects.isNull(bucketName)) { log.debug("初始化存储策略 [{}] 失败: 参数不完整", getStorageTypeEnum().getDescription()); isInitialized = false; } else { BasicAWSCredentials credentials = new BasicAWSCredentials(accessKey, secretKey); s3Client = AmazonS3ClientBuilder.standard() .withPathStyleAccessEnabled(isPathStyle) .withCredentials(new AWSStaticCredentialsProvider(credentials)) .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(endPoint, "")).build(); isInitialized = testConnection(); } } catch (Exception e) { log.debug(getStorageTypeEnum().getDescription() + " 初始化异常, 已跳过"); } } @Override public StorageTypeEnum getStorageTypeEnum() { return StorageTypeEnum.S3; } @Override public List<StorageConfig> storageStrategyConfigList() { return new ArrayList<StorageConfig>() {{ add(new StorageConfig("accessKey", "AccessKey")); add(new StorageConfig("secretKey", "SecretKey")); add(new StorageConfig("endPoint", "服务地址(EndPoint)")); add(new StorageConfig("bucketName", "存储空间名称")); add(new StorageConfig("basePath", "基路径")); add(new StorageConfig("domain", "加速域名")); add(new StorageConfig("pathStyle", "域名风格")); add(new StorageConfig("isPrivate", "是否是私有空间")); }}; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.shardingsphere.example.sharding.raw.jdbc.config; import com.google.common.collect.Lists; import org.apache.shardingsphere.example.algorithm.PreciseModuloShardingDatabaseAlgorithm; import org.apache.shardingsphere.example.algorithm.PreciseModuloShardingTableAlgorithm; import org.apache.shardingsphere.example.common.DataSourceUtil; import org.apache.shardingsphere.example.config.ExampleConfiguration; import org.apache.shardingsphere.api.config.masterslave.MasterSlaveRuleConfiguration; import org.apache.shardingsphere.api.config.sharding.KeyGeneratorConfiguration; import org.apache.shardingsphere.api.config.sharding.ShardingRuleConfiguration; import org.apache.shardingsphere.api.config.sharding.TableRuleConfiguration; import org.apache.shardingsphere.api.config.sharding.strategy.StandardShardingStrategyConfiguration; import org.apache.shardingsphere.shardingjdbc.api.ShardingDataSourceFactory; import javax.sql.DataSource; import java.sql.SQLException; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; public final class ShardingMasterSlaveConfigurationPrecise implements ExampleConfiguration { @Override public DataSource getDataSource() throws SQLException { ShardingRuleConfiguration shardingRuleConfig = new ShardingRuleConfiguration(); shardingRuleConfig.getTableRuleConfigs().add(getOrderTableRuleConfiguration()); shardingRuleConfig.getTableRuleConfigs().add(getOrderItemTableRuleConfiguration()); shardingRuleConfig.getBindingTableGroups().add("t_order, t_order_item"); shardingRuleConfig.setDefaultDatabaseShardingStrategyConfig(new StandardShardingStrategyConfiguration("user_id", new PreciseModuloShardingDatabaseAlgorithm())); shardingRuleConfig.setDefaultTableShardingStrategyConfig(new StandardShardingStrategyConfiguration("order_id", new PreciseModuloShardingTableAlgorithm())); shardingRuleConfig.setMasterSlaveRuleConfigs(getMasterSlaveRuleConfigurations()); return ShardingDataSourceFactory.createDataSource(createDataSourceMap(), shardingRuleConfig, new Properties()); } private static TableRuleConfiguration getOrderTableRuleConfiguration() { TableRuleConfiguration result = new TableRuleConfiguration("t_order", "ds_${0..1}.t_order_${[0, 1]}"); result.setKeyGeneratorConfig(new KeyGeneratorConfiguration("SNOWFLAKE", "order_id", getProperties())); return result; } private static TableRuleConfiguration getOrderItemTableRuleConfiguration() { TableRuleConfiguration result = new TableRuleConfiguration("t_order_item", "ds_${0..1}.t_order_item_${[0, 1]}"); result.setKeyGeneratorConfig(new KeyGeneratorConfiguration("SNOWFLAKE", "order_item_id", getProperties())); return result; } private static List<MasterSlaveRuleConfiguration> getMasterSlaveRuleConfigurations() { MasterSlaveRuleConfiguration masterSlaveRuleConfig1 = new MasterSlaveRuleConfiguration("ds_0", "demo_ds_master_0", Arrays.asList("demo_ds_master_0_slave_0", "demo_ds_master_0_slave_1")); MasterSlaveRuleConfiguration masterSlaveRuleConfig2 = new MasterSlaveRuleConfiguration("ds_1", "demo_ds_master_1", Arrays.asList("demo_ds_master_1_slave_0", "demo_ds_master_1_slave_1")); return Lists.newArrayList(masterSlaveRuleConfig1, masterSlaveRuleConfig2); } private static Map<String, DataSource> createDataSourceMap() { final Map<String, DataSource> result = new HashMap<>(); result.put("demo_ds_master_0", DataSourceUtil.createDataSource("demo_ds_master_0")); result.put("demo_ds_master_0_slave_0", DataSourceUtil.createDataSource("demo_ds_master_0_slave_0")); result.put("demo_ds_master_0_slave_1", DataSourceUtil.createDataSource("demo_ds_master_0_slave_1")); result.put("demo_ds_master_1", DataSourceUtil.createDataSource("demo_ds_master_1")); result.put("demo_ds_master_1_slave_0", DataSourceUtil.createDataSource("demo_ds_master_1_slave_0")); result.put("demo_ds_master_1_slave_1", DataSourceUtil.createDataSource("demo_ds_master_1_slave_1")); return result; } private static Properties getProperties() { Properties result = new Properties(); result.setProperty("worker.id", "123"); return result; } }
package com.github.tusharepro.core.entity; import lombok.Data; import com.github.tusharepro.core.bean.IndexMonthly; import javax.persistence.*; import java.io.Serializable; import java.time.LocalDate; /** * 指数月线行情 * https://tushare.pro/document/2?doc_id=172 */ @Data @Entity @Table(name = "index_monthly") @IdClass(IndexMonthlyEntity.PrimaryKey.class) public class IndexMonthlyEntity implements IndexMonthly { @Data public static class PrimaryKey implements Serializable { private String tsCode; private LocalDate tradeDate; } @Id @Column(name = "ts_code") protected String tsCode; // TS指数代码 @Id @Column(name = "trade_date") protected LocalDate tradeDate; // 交易日 @Column(name = "close") protected Double close; // 收盘点位 @Column(name = "open") protected Double open; // 开盘点位 @Column(name = "high") protected Double high; // 最高点位 @Column(name = "low") protected Double low; // 最低点位 @Column(name = "pre_close") protected Double preClose; // 昨日收盘点 @Column(name = "change") protected Double change; // 涨跌点位 @Column(name = "pct_chg") protected Double pctChg; // 涨跌幅 @Column(name = "vol") protected Double vol; // 成交量 @Column(name = "amount") protected Double amount; // 成交额 }
/* * SPDX-License-Identifier: Apache-2.0 * * The OpenSearch Contributors require contributions made to * this file be licensed under the Apache-2.0 license or a * compatible open source license. */ /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /* * Modifications Copyright OpenSearch Contributors. See * GitHub history for details. */ package org.opensearch.search.fetch.subphase; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.Collector; import org.apache.lucene.search.ConjunctionDISI; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; import org.opensearch.common.lucene.search.TopDocsAndMaxScore; import org.opensearch.index.mapper.Uid; import org.opensearch.search.SearchHit; import org.opensearch.search.internal.SearchContext; import org.opensearch.search.internal.SubSearchContext; import org.opensearch.search.lookup.SourceLookup; import java.io.IOException; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.Objects; /** * Context used for inner hits retrieval */ public final class InnerHitsContext { private final Map<String, InnerHitSubContext> innerHits; public InnerHitsContext() { this.innerHits = new HashMap<>(); } InnerHitsContext(Map<String, InnerHitSubContext> innerHits) { this.innerHits = Objects.requireNonNull(innerHits); } public Map<String, InnerHitSubContext> getInnerHits() { return innerHits; } public void addInnerHitDefinition(InnerHitSubContext innerHit) { if (innerHits.containsKey(innerHit.getName())) { throw new IllegalArgumentException( "inner_hit definition with the name [" + innerHit.getName() + "] already exists. Use a different inner_hit name or define one explicitly" ); } innerHits.put(innerHit.getName(), innerHit); } /** * A {@link SubSearchContext} that associates {@link TopDocs} to each {@link SearchHit} * in the parent search context */ public abstract static class InnerHitSubContext extends SubSearchContext { private final String name; protected final SearchContext context; private InnerHitsContext childInnerHits; private Weight innerHitQueryWeight; // TODO: when types are complete removed just use String instead for the id: private Uid rootId; private SourceLookup rootLookup; protected InnerHitSubContext(String name, SearchContext context) { super(context); this.name = name; this.context = context; } public abstract TopDocsAndMaxScore topDocs(SearchHit hit) throws IOException; public String getName() { return name; } @Override public InnerHitsContext innerHits() { return childInnerHits; } public void setChildInnerHits(Map<String, InnerHitSubContext> childInnerHits) { this.childInnerHits = new InnerHitsContext(childInnerHits); } protected Weight getInnerHitQueryWeight() throws IOException { if (innerHitQueryWeight == null) { final boolean needsScores = size() != 0 && (sort() == null || sort().sort.needsScores()); innerHitQueryWeight = context.searcher() .createWeight(context.searcher().rewrite(query()), needsScores ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES, 1f); } return innerHitQueryWeight; } public SearchContext parentSearchContext() { return context; } /** * The _id of the root document. * * Since this ID is available on the context, inner hits can avoid re-loading the root _id. */ public Uid getRootId() { return rootId; } public void setRootId(Uid rootId) { this.rootId = rootId; } /** * A source lookup for the root document. * * This shared lookup allows inner hits to avoid re-loading the root _source. */ public SourceLookup getRootLookup() { return rootLookup; } public void setRootLookup(SourceLookup rootLookup) { this.rootLookup = rootLookup; } } public static void intersect(Weight weight, Weight innerHitQueryWeight, Collector collector, LeafReaderContext ctx) throws IOException { ScorerSupplier scorerSupplier = weight.scorerSupplier(ctx); if (scorerSupplier == null) { return; } // use low leadCost since this scorer will be consumed on a minority of documents Scorer scorer = scorerSupplier.get(0); ScorerSupplier innerHitQueryScorerSupplier = innerHitQueryWeight.scorerSupplier(ctx); if (innerHitQueryScorerSupplier == null) { return; } // use low loadCost since this scorer will be consumed on a minority of documents Scorer innerHitQueryScorer = innerHitQueryScorerSupplier.get(0); final LeafCollector leafCollector; try { leafCollector = collector.getLeafCollector(ctx); // Just setting the innerHitQueryScorer is ok, because that is the actual scoring part of the query leafCollector.setScorer(innerHitQueryScorer); } catch (CollectionTerminatedException e) { return; } try { Bits acceptDocs = ctx.reader().getLiveDocs(); DocIdSetIterator iterator = ConjunctionDISI.intersectIterators( Arrays.asList(innerHitQueryScorer.iterator(), scorer.iterator()) ); for (int docId = iterator.nextDoc(); docId < DocIdSetIterator.NO_MORE_DOCS; docId = iterator.nextDoc()) { if (acceptDocs == null || acceptDocs.get(docId)) { leafCollector.collect(docId); } } } catch (CollectionTerminatedException e) { // ignore and continue } } }
/* * * ***** BEGIN LICENSE BLOCK ***** * Version: MPL 1.1/GPL 2.0 * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is Rhino code, released * May 6, 1999. * * The Initial Developer of the Original Code is * Netscape Communications Corporation. * Portions created by the Initial Developer are Copyright (C) 1997-1999 * the Initial Developer. All Rights Reserved. * * Contributor(s): * Nick Santos * * Alternatively, the contents of this file may be used under the terms of * the GNU General Public License Version 2 or later (the "GPL"), in which * case the provisions of the GPL are applicable instead of those above. If * you wish to allow use of your version of this file only under the terms of * the GPL and not to allow others to use your version of this file under the * MPL, indicate your decision by deleting the provisions above and replacing * them with the notice and other provisions required by the GPL. If you do * not delete the provisions above, a recipient may use your version of this * file under either the MPL or the GPL. * * ***** END LICENSE BLOCK ***** */ package com.google.javascript.rhino.jstype; import static com.google.common.truth.Truth.assertWithMessage; import static com.google.javascript.rhino.jstype.JSTypeNative.ALL_TYPE; import static com.google.javascript.rhino.jstype.JSTypeNative.BOOLEAN_OBJECT_FUNCTION_TYPE; import static com.google.javascript.rhino.jstype.JSTypeNative.BOOLEAN_OBJECT_TYPE; import static com.google.javascript.rhino.jstype.JSTypeNative.BOOLEAN_TYPE; import static com.google.javascript.rhino.jstype.JSTypeNative.GENERATOR_TYPE; import static com.google.javascript.rhino.jstype.JSTypeNative.ITERABLE_TYPE; import static com.google.javascript.rhino.jstype.JSTypeNative.ITERATOR_TYPE; import static com.google.javascript.rhino.jstype.JSTypeNative.I_TEMPLATE_ARRAY_TYPE; import static com.google.javascript.rhino.jstype.JSTypeNative.NULL_TYPE; import static com.google.javascript.rhino.jstype.JSTypeNative.NULL_VOID; import static com.google.javascript.rhino.jstype.JSTypeNative.NUMBER_TYPE; import static com.google.javascript.rhino.jstype.JSTypeNative.STRING_TYPE; import static com.google.javascript.rhino.jstype.JSTypeNative.STRING_VALUE_OR_OBJECT_TYPE; import static com.google.javascript.rhino.testing.TypeSubject.assertType; import static com.google.javascript.rhino.testing.TypeSubject.types; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import junit.framework.TestCase; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * Tests {@link JSTypeRegistry}. * */ @RunWith(JUnit4.class) public class JSTypeRegistryTest extends TestCase { // TODO(user): extend this class with more tests, as JSTypeRegistry is // now much larger @Test public void testGetBuiltInType_boolean() { JSTypeRegistry typeRegistry = new JSTypeRegistry(null); assertType(typeRegistry.getType(null, "boolean")) .isStructurallyEqualTo(typeRegistry.getNativeType(JSTypeNative.BOOLEAN_TYPE)); } @Test public void testGetBuiltInType_iterable() { JSTypeRegistry typeRegistry = new JSTypeRegistry(null); assertType(typeRegistry.getGlobalType("Iterable")) .isStructurallyEqualTo(typeRegistry.getNativeType(ITERABLE_TYPE)); } @Test public void testGetBuiltInType_iterator() { JSTypeRegistry typeRegistry = new JSTypeRegistry(null); assertType(typeRegistry.getGlobalType("Iterator")) .isStructurallyEqualTo(typeRegistry.getNativeType(ITERATOR_TYPE)); } @Test public void testGetBuiltInType_generator() { JSTypeRegistry typeRegistry = new JSTypeRegistry(null); assertType(typeRegistry.getGlobalType("Generator")) .isStructurallyEqualTo(typeRegistry.getNativeType(GENERATOR_TYPE)); } @Test public void testGetBuildInType_iTemplateArray() { JSTypeRegistry typeRegistry = new JSTypeRegistry(null); assertType(typeRegistry.getGlobalType("ITemplateArray")) .isStructurallyEqualTo(typeRegistry.getNativeType(I_TEMPLATE_ARRAY_TYPE)); } @Test public void testGetBuiltInType_Promise() { JSTypeRegistry registry = new JSTypeRegistry(null); ObjectType promiseType = registry.getNativeObjectType(JSTypeNative.PROMISE_TYPE); assertType(registry.getGlobalType("Promise")).isStructurallyEqualTo(promiseType); // Test that it takes one parameter of type // function(function((IThenable<TYPE>|TYPE|null|{then: ?})=): ?, function(*=): ?): ? FunctionType promiseCtor = promiseType.getConstructor(); Node paramList = promiseCtor.getParametersNode(); Node firstParameter = paramList.getFirstChild(); assertNotNull(firstParameter); FunctionType paramType = paramList.getFirstChild().getJSType().toMaybeFunctionType(); assertEquals( "function(function((IThenable<TYPE>|TYPE|null|{then: ?})=): ?, function(*=): ?): ?", paramType.toString()); } @Test public void testGetDeclaredType() { JSTypeRegistry typeRegistry = new JSTypeRegistry(null); JSType type = typeRegistry.createAnonymousObjectType(null); String name = "Foo"; typeRegistry.declareType(null, name, type); assertType(typeRegistry.getType(null, name)).isStructurallyEqualTo(type); // Ensure different instances are independent. JSTypeRegistry typeRegistry2 = new JSTypeRegistry(null); assertEquals(null, typeRegistry2.getType(null, name)); assertType(typeRegistry.getType(null, name)).isStructurallyEqualTo(type); } @Test public void testPropertyOnManyTypes() { // Given JSTypeRegistry typeRegistry = new JSTypeRegistry(null); // By default the UnionTypeBuilder will treat a union of more than 30 // types as an unknown type. We don't want that for property checking // so test that the limit is higher. for (int i = 0; i < 100; i++) { JSType type = typeRegistry.createObjectType("type: " + i, null); // When typeRegistry.registerPropertyOnType("foo", type); // Then assertWithMessage("Registered property `foo` on <%s> types.", i + 1) .about(types()) .that(typeRegistry.getGreatestSubtypeWithProperty(type, "foo")) .isNotUnknown(); } } @Test public void testReadableTypeName() { JSTypeRegistry registry = new JSTypeRegistry(null); assertEquals("*", getReadableTypeNameHelper(registry, ALL_TYPE)); assertEquals("boolean", getReadableTypeNameHelper(registry, BOOLEAN_TYPE)); assertEquals("Boolean", getReadableTypeNameHelper(registry, BOOLEAN_OBJECT_TYPE)); assertEquals("function", getReadableTypeNameHelper(registry, BOOLEAN_OBJECT_FUNCTION_TYPE)); assertEquals( "(String|string)", getReadableTypeNameHelper(registry, STRING_VALUE_OR_OBJECT_TYPE)); assertEquals("(null|undefined)", getReadableTypeNameHelper(registry, NULL_VOID)); assertEquals("(null|undefined)", getReadableTypeNameHelper(registry, NULL_VOID, true)); assertEquals( "(number|string|null)", getReadableTypeNameHelper(registry, union(registry, NUMBER_TYPE, STRING_TYPE, NULL_TYPE))); assertEquals( "(Number|String)", getReadableTypeNameHelper( registry, union(registry, NUMBER_TYPE, STRING_TYPE, NULL_TYPE), true)); } private JSType union(JSTypeRegistry registry, JSTypeNative... types) { return registry.createUnionType(types); } private String getReadableTypeNameHelper(JSTypeRegistry registry, JSTypeNative type) { return getReadableTypeNameHelper(registry, registry.getNativeType(type), false); } private String getReadableTypeNameHelper( JSTypeRegistry registry, JSTypeNative type, boolean deref) { return getReadableTypeNameHelper(registry, registry.getNativeType(type), deref); } private String getReadableTypeNameHelper(JSTypeRegistry registry, JSType type) { return getReadableTypeNameHelper(registry, type, false); } private String getReadableTypeNameHelper(JSTypeRegistry registry, JSType type, boolean deref) { Node n = new Node(Token.ADD); n.setJSType(type); return registry.getReadableJSTypeName(n, deref); } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.cloud.hypervisor.kvm.storage; import java.io.File; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import org.apache.commons.codec.binary.Base64; import org.apache.log4j.Logger; import org.libvirt.Connect; import org.libvirt.LibvirtException; import org.libvirt.Secret; import org.libvirt.StoragePool; import org.libvirt.StoragePoolInfo.StoragePoolState; import org.libvirt.StorageVol; import com.ceph.rados.IoCTX; import com.ceph.rados.Rados; import com.ceph.rados.exceptions.ErrorCode; import com.ceph.rados.exceptions.RadosException; import com.ceph.rbd.Rbd; import com.ceph.rbd.RbdException; import com.ceph.rbd.RbdImage; import com.ceph.rbd.jna.RbdImageInfo; import com.ceph.rbd.jna.RbdSnapInfo; import org.apache.cloudstack.utils.qemu.QemuImg; import org.apache.cloudstack.utils.qemu.QemuImg.PhysicalDiskFormat; import org.apache.cloudstack.utils.qemu.QemuImgException; import org.apache.cloudstack.utils.qemu.QemuImgFile; import com.cloud.exception.InternalErrorException; import com.cloud.hypervisor.kvm.resource.LibvirtConnection; import com.cloud.hypervisor.kvm.resource.LibvirtSecretDef; import com.cloud.hypervisor.kvm.resource.LibvirtSecretDef.Usage; import com.cloud.hypervisor.kvm.resource.LibvirtStoragePoolDef; import com.cloud.hypervisor.kvm.resource.LibvirtStoragePoolDef.AuthenticationType; import com.cloud.hypervisor.kvm.resource.LibvirtStoragePoolDef.PoolType; import com.cloud.hypervisor.kvm.resource.LibvirtStoragePoolXMLParser; import com.cloud.hypervisor.kvm.resource.LibvirtStorageVolumeDef; import com.cloud.hypervisor.kvm.resource.LibvirtStorageVolumeDef.VolumeFormat; import com.cloud.hypervisor.kvm.resource.LibvirtStorageVolumeXMLParser; import com.cloud.storage.Storage; import com.cloud.storage.Storage.StoragePoolType; import com.cloud.storage.StorageLayer; import com.cloud.utils.exception.CloudRuntimeException; import com.cloud.utils.script.Script; public class LibvirtStorageAdaptor implements StorageAdaptor { private static final Logger s_logger = Logger.getLogger(LibvirtStorageAdaptor.class); private StorageLayer _storageLayer; private String _mountPoint = "/mnt"; private String _manageSnapshotPath; private String rbdTemplateSnapName = "cloudstack-base-snap"; private int rbdFeatures = (1 << 0); /* Feature 1<<0 means layering in RBD format 2 */ private int rbdOrder = 0; /* Order 0 means 4MB blocks (the default) */ public LibvirtStorageAdaptor(StorageLayer storage) { _storageLayer = storage; _manageSnapshotPath = Script.findScript("scripts/storage/qcow2/", "managesnapshot.sh"); } @Override public boolean createFolder(String uuid, String path) { String mountPoint = _mountPoint + File.separator + uuid; File f = new File(mountPoint + File.separator + path); if (!f.exists()) { f.mkdirs(); } return true; } public StorageVol getVolume(StoragePool pool, String volName) { StorageVol vol = null; try { vol = pool.storageVolLookupByName(volName); } catch (LibvirtException e) { s_logger.debug("Could not find volume " + volName + ": " + e.getMessage()); } /** * The volume was not found in the storage pool * This can happen when a volume has just been created on a different host and * since then the libvirt storage pool has not been refreshed. */ if (vol == null) { try { s_logger.debug("Refreshing storage pool " + pool.getName()); refreshPool(pool); } catch (LibvirtException e) { s_logger.debug("Failed to refresh storage pool: " + e.getMessage()); } try { vol = pool.storageVolLookupByName(volName); s_logger.debug("Found volume " + volName + " in storage pool " + pool.getName() + " after refreshing the pool"); } catch (LibvirtException e) { throw new CloudRuntimeException("Could not find volume " + volName + ": " + e.getMessage()); } } return vol; } public StorageVol createVolume(Connect conn, StoragePool pool, String uuid, long size, VolumeFormat format) throws LibvirtException { LibvirtStorageVolumeDef volDef = new LibvirtStorageVolumeDef(UUID.randomUUID().toString(), size, format, null, null); s_logger.debug(volDef.toString()); return pool.storageVolCreateXML(volDef.toString(), 0); } public void storagePoolRefresh(StoragePool pool) { try { synchronized (getStoragePool(pool.getUUIDString())) { refreshPool(pool); } } catch (LibvirtException e) { s_logger.debug("refresh storage pool failed: " + e.toString()); } } private StoragePool createNetfsStoragePool(PoolType fsType, Connect conn, String uuid, String host, String path) throws LibvirtException { String targetPath = _mountPoint + File.separator + uuid; LibvirtStoragePoolDef spd = new LibvirtStoragePoolDef(fsType, uuid, uuid, host, path, targetPath); _storageLayer.mkdir(targetPath); StoragePool sp = null; try { s_logger.debug(spd.toString()); sp = conn.storagePoolCreateXML(spd.toString(), 0); return sp; } catch (LibvirtException e) { s_logger.error(e.toString()); // if error is that pool is mounted, try to handle it if (e.toString().contains("already mounted")) { s_logger.error("Attempting to unmount old mount libvirt is unaware of at " + targetPath); String result = Script.runSimpleBashScript("umount -l " + targetPath); if (result == null) { s_logger.error("Succeeded in unmounting " + targetPath); try { sp = conn.storagePoolCreateXML(spd.toString(), 0); s_logger.error("Succeeded in redefining storage"); return sp; } catch (LibvirtException l) { s_logger.error("Target was already mounted, unmounted it but failed to redefine storage:" + l); } } else { s_logger.error("Failed in unmounting and redefining storage"); } } else { s_logger.error("Internal error occurred when attempting to mount: specified path may be invalid"); throw e; } if (sp != null) { try { if (sp.isPersistent() == 1) { sp.destroy(); sp.undefine(); } else { sp.destroy(); } sp.free(); } catch (LibvirtException l) { s_logger.debug("Failed to undefine " + fsType.toString() + " storage pool with: " + l.toString()); } } return null; } } private StoragePool createSharedStoragePool(Connect conn, String uuid, String host, String path) { String mountPoint = path; if (!_storageLayer.exists(mountPoint)) { s_logger.error(mountPoint + " does not exists. Check local.storage.path in agent.properties."); return null; } LibvirtStoragePoolDef spd = new LibvirtStoragePoolDef(PoolType.DIR, uuid, uuid, host, path, path); StoragePool sp = null; try { s_logger.debug(spd.toString()); sp = conn.storagePoolCreateXML(spd.toString(), 0); return sp; } catch (LibvirtException e) { s_logger.error(e.toString()); if (sp != null) { try { if (sp.isPersistent() == 1) { sp.destroy(); sp.undefine(); } else { sp.destroy(); } sp.free(); } catch (LibvirtException l) { s_logger.debug("Failed to define shared mount point storage pool with: " + l.toString()); } } return null; } } private StoragePool createCLVMStoragePool(Connect conn, String uuid, String host, String path) { String volgroupPath = "/dev/" + path; String volgroupName = path; volgroupName = volgroupName.replaceFirst("/", ""); LibvirtStoragePoolDef spd = new LibvirtStoragePoolDef(PoolType.LOGICAL, volgroupName, uuid, host, volgroupPath, volgroupPath); StoragePool sp = null; try { s_logger.debug(spd.toString()); sp = conn.storagePoolCreateXML(spd.toString(), 0); return sp; } catch (LibvirtException e) { s_logger.error(e.toString()); if (sp != null) { try { if (sp.isPersistent() == 1) { sp.destroy(); sp.undefine(); } else { sp.destroy(); } sp.free(); } catch (LibvirtException l) { s_logger.debug("Failed to define clvm storage pool with: " + l.toString()); } } return null; } } private StoragePool createRBDStoragePool(Connect conn, String uuid, String host, int port, String userInfo, String path) { LibvirtStoragePoolDef spd; StoragePool sp = null; Secret s = null; String[] userInfoTemp = userInfo.split(":"); if (userInfoTemp.length == 2) { LibvirtSecretDef sd = new LibvirtSecretDef(Usage.CEPH, uuid); sd.setCephName(userInfoTemp[0] + "@" + host + ":" + port + "/" + path); try { s_logger.debug(sd.toString()); s = conn.secretDefineXML(sd.toString()); s.setValue(Base64.decodeBase64(userInfoTemp[1])); } catch (LibvirtException e) { s_logger.error("Failed to define the libvirt secret: " + e.toString()); if (s != null) { try { s.undefine(); s.free(); } catch (LibvirtException l) { s_logger.error("Failed to undefine the libvirt secret: " + l.toString()); } } return null; } spd = new LibvirtStoragePoolDef(PoolType.RBD, uuid, uuid, host, port, path, userInfoTemp[0], AuthenticationType.CEPH, uuid); } else { spd = new LibvirtStoragePoolDef(PoolType.RBD, uuid, uuid, host, port, path, ""); } try { s_logger.debug(spd.toString()); sp = conn.storagePoolCreateXML(spd.toString(), 0); return sp; } catch (LibvirtException e) { s_logger.error("Failed to create RBD storage pool: " + e.toString()); if (sp != null) { try { if (sp.isPersistent() == 1) { sp.destroy(); sp.undefine(); } else { sp.destroy(); } sp.free(); } catch (LibvirtException l) { s_logger.error("Failed to undefine RBD storage pool: " + l.toString()); } } if (s != null) { try { s_logger.error("Failed to create the RBD storage pool, cleaning up the libvirt secret"); s.undefine(); s.free(); } catch (LibvirtException se) { s_logger.error("Failed to remove the libvirt secret: " + se.toString()); } } return null; } } public StorageVol copyVolume(StoragePool destPool, LibvirtStorageVolumeDef destVol, StorageVol srcVol, int timeout) throws LibvirtException { StorageVol vol = destPool.storageVolCreateXML(destVol.toString(), 0); String srcPath = srcVol.getKey(); String destPath = vol.getKey(); Script.runSimpleBashScript("cp " + srcPath + " " + destPath, timeout); return vol; } public boolean copyVolume(String srcPath, String destPath, String volumeName, int timeout) throws InternalErrorException { _storageLayer.mkdirs(destPath); if (!_storageLayer.exists(srcPath)) { throw new InternalErrorException("volume:" + srcPath + " is not exits"); } String result = Script.runSimpleBashScript("cp " + srcPath + " " + destPath + File.separator + volumeName, timeout); return result == null; } public LibvirtStoragePoolDef getStoragePoolDef(Connect conn, StoragePool pool) throws LibvirtException { String poolDefXML = pool.getXMLDesc(0); LibvirtStoragePoolXMLParser parser = new LibvirtStoragePoolXMLParser(); return parser.parseStoragePoolXML(poolDefXML); } public LibvirtStorageVolumeDef getStorageVolumeDef(Connect conn, StorageVol vol) throws LibvirtException { String volDefXML = vol.getXMLDesc(0); LibvirtStorageVolumeXMLParser parser = new LibvirtStorageVolumeXMLParser(); return parser.parseStorageVolumeXML(volDefXML); } @Override public KVMStoragePool getStoragePool(String uuid) { return this.getStoragePool(uuid, false); } @Override public KVMStoragePool getStoragePool(String uuid, boolean refreshInfo) { s_logger.info("Trying to fetch storage pool " + uuid + " from libvirt"); StoragePool storage = null; try { Connect conn = LibvirtConnection.getConnection(); storage = conn.storagePoolLookupByUUIDString(uuid); if (storage.getInfo().state != StoragePoolState.VIR_STORAGE_POOL_RUNNING) { s_logger.warn("Storage pool " + uuid + " is not in running state. Attempting to start it."); storage.create(0); } LibvirtStoragePoolDef spd = getStoragePoolDef(conn, storage); if (spd == null) { throw new CloudRuntimeException("Unable to parse the storage pool definition for storage pool " + uuid); } StoragePoolType type = null; if (spd.getPoolType() == LibvirtStoragePoolDef.PoolType.NETFS) { type = StoragePoolType.NetworkFilesystem; } else if (spd.getPoolType() == LibvirtStoragePoolDef.PoolType.DIR) { type = StoragePoolType.Filesystem; } else if (spd.getPoolType() == LibvirtStoragePoolDef.PoolType.RBD) { type = StoragePoolType.RBD; } else if (spd.getPoolType() == LibvirtStoragePoolDef.PoolType.LOGICAL) { type = StoragePoolType.CLVM; } else if (spd.getPoolType() == LibvirtStoragePoolDef.PoolType.GLUSTERFS) { type = StoragePoolType.Gluster; } LibvirtStoragePool pool = new LibvirtStoragePool(uuid, storage.getName(), type, this, storage); if (pool.getType() != StoragePoolType.RBD) pool.setLocalPath(spd.getTargetPath()); else pool.setLocalPath(""); if (pool.getType() == StoragePoolType.RBD || pool.getType() == StoragePoolType.Gluster) { pool.setSourceHost(spd.getSourceHost()); pool.setSourcePort(spd.getSourcePort()); pool.setSourceDir(spd.getSourceDir()); String authUsername = spd.getAuthUserName(); if (authUsername != null) { Secret secret = conn.secretLookupByUUIDString(spd.getSecretUUID()); String secretValue = new String(Base64.encodeBase64(secret.getByteValue()), Charset.defaultCharset()); pool.setAuthUsername(authUsername); pool.setAuthSecret(secretValue); } } /** * On large (RBD) storage pools it can take up to a couple of minutes * for libvirt to refresh the pool. * * Refreshing a storage pool means that libvirt will have to iterate the whole pool * and fetch information of each volume in there * * It is not always required to refresh a pool. So we can control if we want to or not * * By default only the getStorageStats call in the LibvirtComputingResource will ask to * refresh the pool */ if (refreshInfo) { s_logger.info("Asking libvirt to refresh storage pool " + uuid); pool.refresh(); } pool.setCapacity(storage.getInfo().capacity); pool.setUsed(storage.getInfo().allocation); pool.setAvailable(storage.getInfo().available); s_logger.debug("Succesfully refreshed pool " + uuid + " Capacity: " + storage.getInfo().capacity + " Used: " + storage.getInfo().allocation + " Available: " + storage.getInfo().available); return pool; } catch (LibvirtException e) { s_logger.debug("Could not find storage pool " + uuid + " in libvirt"); throw new CloudRuntimeException(e.toString(), e); } } @Override public KVMPhysicalDisk getPhysicalDisk(String volumeUuid, KVMStoragePool pool) { LibvirtStoragePool libvirtPool = (LibvirtStoragePool)pool; try { StorageVol vol = getVolume(libvirtPool.getPool(), volumeUuid); KVMPhysicalDisk disk; LibvirtStorageVolumeDef voldef = getStorageVolumeDef(libvirtPool.getPool().getConnect(), vol); disk = new KVMPhysicalDisk(vol.getPath(), vol.getName(), pool); disk.setSize(vol.getInfo().allocation); disk.setVirtualSize(vol.getInfo().capacity); /** * libvirt returns format = 'unknow', so we have to force * the format to RAW for RBD storage volumes */ if (pool.getType() == StoragePoolType.RBD) { disk.setFormat(PhysicalDiskFormat.RAW); } else if (voldef.getFormat() == null) { File diskDir = new File(disk.getPath()); if (diskDir.exists() && diskDir.isDirectory()) { disk.setFormat(PhysicalDiskFormat.DIR); } else if (volumeUuid.endsWith("tar") || volumeUuid.endsWith(("TAR"))) { disk.setFormat(PhysicalDiskFormat.TAR); } else if (volumeUuid.endsWith("raw") || volumeUuid.endsWith(("RAW"))) { disk.setFormat(PhysicalDiskFormat.RAW); } else { disk.setFormat(pool.getDefaultFormat()); } } else if (voldef.getFormat() == LibvirtStorageVolumeDef.VolumeFormat.QCOW2) { disk.setFormat(PhysicalDiskFormat.QCOW2); } else if (voldef.getFormat() == LibvirtStorageVolumeDef.VolumeFormat.RAW) { disk.setFormat(PhysicalDiskFormat.RAW); } return disk; } catch (LibvirtException e) { s_logger.debug("Failed to get physical disk:", e); throw new CloudRuntimeException(e.toString()); } } @Override public KVMStoragePool createStoragePool(String name, String host, int port, String path, String userInfo, StoragePoolType type) { s_logger.info("Attempting to create storage pool " + name + " (" + type.toString() + ") in libvirt"); StoragePool sp = null; Connect conn = null; try { conn = LibvirtConnection.getConnection(); } catch (LibvirtException e) { throw new CloudRuntimeException(e.toString()); } try { sp = conn.storagePoolLookupByUUIDString(name); if (sp != null && sp.isActive() == 0) { sp.undefine(); sp = null; s_logger.info("Found existing defined storage pool " + name + ". It wasn't running, so we undefined it."); } if (sp != null) { s_logger.info("Found existing defined storage pool " + name + ", using it."); } } catch (LibvirtException e) { sp = null; s_logger.warn("Storage pool " + name + " was not found running in libvirt. Need to create it."); } // libvirt strips trailing slashes off of path, we will too in order to match // existing paths if (path.endsWith("/")) { path = path.substring(0, path.length() - 1); } if (sp == null) { // see if any existing pool by another name is using our storage path. // if anyone is, undefine the pool so we can define it as requested. // This should be safe since a pool in use can't be removed, and no // volumes are affected by unregistering the pool with libvirt. s_logger.info("Didn't find an existing storage pool " + name + " by UUID, checking for pools with duplicate paths"); try { String[] poolnames = conn.listStoragePools(); for (String poolname : poolnames) { s_logger.debug("Checking path of existing pool " + poolname + " against pool we want to create"); StoragePool p = conn.storagePoolLookupByName(poolname); LibvirtStoragePoolDef pdef = getStoragePoolDef(conn, p); String targetPath = pdef.getTargetPath(); if (targetPath != null && targetPath.equals(path)) { s_logger.debug("Storage pool utilizing path '" + path + "' already exists as pool " + poolname + ", undefining so we can re-define with correct name " + name); if (p.isPersistent() == 1) { p.destroy(); p.undefine(); } else { p.destroy(); } } } } catch (LibvirtException e) { s_logger.error("Failure in attempting to see if an existing storage pool might be using the path of the pool to be created:" + e); } s_logger.debug("Attempting to create storage pool " + name); if (type == StoragePoolType.NetworkFilesystem) { try { sp = createNetfsStoragePool(PoolType.NETFS, conn, name, host, path); } catch (LibvirtException e) { s_logger.error("Failed to create netfs mount: " + host + ":" + path , e); s_logger.error(e.getStackTrace()); throw new CloudRuntimeException(e.toString()); } } else if (type == StoragePoolType.Gluster) { try { sp = createNetfsStoragePool(PoolType.GLUSTERFS, conn, name, host, path); } catch (LibvirtException e) { s_logger.error("Failed to create glusterfs mount: " + host + ":" + path , e); s_logger.error(e.getStackTrace()); throw new CloudRuntimeException(e.toString()); } } else if (type == StoragePoolType.SharedMountPoint || type == StoragePoolType.Filesystem) { sp = createSharedStoragePool(conn, name, host, path); } else if (type == StoragePoolType.RBD) { sp = createRBDStoragePool(conn, name, host, port, userInfo, path); } else if (type == StoragePoolType.CLVM) { sp = createCLVMStoragePool(conn, name, host, path); } } if (sp == null) { throw new CloudRuntimeException("Failed to create storage pool: " + name); } try { if (sp.isActive() == 0) { s_logger.debug("Attempting to activate pool " + name); sp.create(0); } return getStoragePool(name); } catch (LibvirtException e) { String error = e.toString(); if (error.contains("Storage source conflict")) { throw new CloudRuntimeException("A pool matching this location already exists in libvirt, " + " but has a different UUID/Name. Cannot create new pool without first " + " removing it. Check for inactive pools via 'virsh pool-list --all'. " + error); } else { throw new CloudRuntimeException(error); } } } @Override public boolean deleteStoragePool(String uuid) { s_logger.info("Attempting to remove storage pool " + uuid + " from libvirt"); Connect conn = null; try { conn = LibvirtConnection.getConnection(); } catch (LibvirtException e) { throw new CloudRuntimeException(e.toString()); } StoragePool sp = null; Secret s = null; try { sp = conn.storagePoolLookupByUUIDString(uuid); } catch (LibvirtException e) { s_logger.warn("Storage pool " + uuid + " doesn't exist in libvirt. Assuming it is already removed"); return true; } /* * Some storage pools, like RBD also have 'secret' information stored in libvirt * Destroy them if they exist */ try { s = conn.secretLookupByUUIDString(uuid); } catch (LibvirtException e) { s_logger.info("Storage pool " + uuid + " has no corresponding secret. Not removing any secret."); } try { if (sp.isPersistent() == 1) { sp.destroy(); sp.undefine(); } else { sp.destroy(); } sp.free(); if (s != null) { s.undefine(); s.free(); } s_logger.info("Storage pool " + uuid + " was succesfully removed from libvirt."); return true; } catch (LibvirtException e) { // handle ebusy error when pool is quickly destroyed if (e.toString().contains("exit status 16")) { String targetPath = _mountPoint + File.separator + uuid; s_logger.error("deleteStoragePool removed pool from libvirt, but libvirt had trouble unmounting the pool. Trying umount location " + targetPath + "again in a few seconds"); String result = Script.runSimpleBashScript("sleep 5 && umount " + targetPath); if (result == null) { s_logger.error("Succeeded in unmounting " + targetPath); return true; } s_logger.error("Failed to unmount " + targetPath); } throw new CloudRuntimeException(e.toString(), e); } } @Override public KVMPhysicalDisk createPhysicalDisk(String name, KVMStoragePool pool, PhysicalDiskFormat format, Storage.ProvisioningType provisioningType, long size) { s_logger.info("Attempting to create volume " + name + " (" + pool.getType().toString() + ") in pool " + pool.getUuid() + " with size " + size); switch (pool.getType()) { case RBD: return createPhysicalDiskByLibVirt(name, pool, format, provisioningType, size); case NetworkFilesystem: case Filesystem: switch (format) { case QCOW2: return createPhysicalDiskByQemuImg(name, pool, format, provisioningType, size); case RAW: return createPhysicalDiskByQemuImg(name, pool, format, provisioningType, size); case DIR: return createPhysicalDiskByLibVirt(name, pool, format, provisioningType, size); case TAR: return createPhysicalDiskByLibVirt(name, pool, format, provisioningType, size); default: throw new CloudRuntimeException("Unexpected disk format is specified."); } default: return createPhysicalDiskByLibVirt(name, pool, format, provisioningType, size); } } private KVMPhysicalDisk createPhysicalDiskByLibVirt(String name, KVMStoragePool pool, PhysicalDiskFormat format, Storage.ProvisioningType provisioningType, long size) { LibvirtStoragePool libvirtPool = (LibvirtStoragePool) pool; StoragePool virtPool = libvirtPool.getPool(); LibvirtStorageVolumeDef.VolumeFormat libvirtformat = LibvirtStorageVolumeDef.VolumeFormat.getFormat(format); String volPath = null; String volName = null; long volAllocation = 0; long volCapacity = 0; LibvirtStorageVolumeDef volDef = new LibvirtStorageVolumeDef(name, size, libvirtformat, null, null); s_logger.debug(volDef.toString()); try { StorageVol vol = virtPool.storageVolCreateXML(volDef.toString(), 0); volPath = vol.getPath(); volName = vol.getName(); volAllocation = vol.getInfo().allocation; volCapacity = vol.getInfo().capacity; } catch (LibvirtException e) { throw new CloudRuntimeException(e.toString()); } KVMPhysicalDisk disk = new KVMPhysicalDisk(volPath, volName, pool); disk.setFormat(format); disk.setSize(volAllocation); disk.setVirtualSize(volCapacity); return disk; } private KVMPhysicalDisk createPhysicalDiskByQemuImg(String name, KVMStoragePool pool, PhysicalDiskFormat format, Storage.ProvisioningType provisioningType, long size) { String volPath = pool.getLocalPath() + "/" + name; String volName = name; long virtualSize = 0; long actualSize = 0; final int timeout = 0; QemuImgFile destFile = new QemuImgFile(volPath); destFile.setFormat(format); destFile.setSize(size); QemuImg qemu = new QemuImg(timeout); Map<String, String> options = new HashMap<String, String>(); if (pool.getType() == StoragePoolType.NetworkFilesystem){ options.put("preallocation", QemuImg.PreallocationType.getPreallocationType(provisioningType).toString()); } try{ qemu.create(destFile, options); Map<String, String> info = qemu.info(destFile); virtualSize = Long.parseLong(info.get(new String("virtual_size"))); actualSize = new File(destFile.getFileName()).length(); } catch (QemuImgException e) { s_logger.error("Failed to create " + volPath + " due to a failed executing of qemu-img: " + e.getMessage()); } KVMPhysicalDisk disk = new KVMPhysicalDisk(volPath, volName, pool); disk.setFormat(format); disk.setSize(actualSize); disk.setVirtualSize(virtualSize); return disk; } @Override public boolean connectPhysicalDisk(String name, KVMStoragePool pool, Map<String, String> details) { // this is for managed storage that needs to prep disks prior to use return true; } @Override public boolean disconnectPhysicalDisk(String uuid, KVMStoragePool pool) { // this is for managed storage that needs to cleanup disks after use return true; } @Override public boolean disconnectPhysicalDisk(Map<String, String> volumeToDisconnect) { // this is for managed storage that needs to cleanup disks after use return false; } @Override public boolean disconnectPhysicalDiskByPath(String localPath) { // we've only ever cleaned up ISOs that are NFS mounted String poolUuid = null; if (localPath != null && localPath.startsWith(_mountPoint) && localPath.endsWith(".iso")) { String[] token = localPath.split("/"); if (token.length > 3) { poolUuid = token[2]; } } else { return false; } if (poolUuid == null) { return false; } try { Connect conn = LibvirtConnection.getConnection(); conn.storagePoolLookupByUUIDString(poolUuid); deleteStoragePool(poolUuid); return true; } catch (LibvirtException ex) { return false; } catch (CloudRuntimeException ex) { return false; } } @Override public boolean deletePhysicalDisk(String uuid, KVMStoragePool pool, Storage.ImageFormat format) { s_logger.info("Attempting to remove volume " + uuid + " from pool " + pool.getUuid()); /** * RBD volume can have snapshots and while they exist libvirt * can't remove the RBD volume * * We have to remove those snapshots first */ if (pool.getType() == StoragePoolType.RBD) { try { s_logger.info("Unprotecting and Removing RBD snapshots of image " + pool.getSourceDir() + "/" + uuid + " prior to removing the image"); Rados r = new Rados(pool.getAuthUserName()); r.confSet("mon_host", pool.getSourceHost() + ":" + pool.getSourcePort()); r.confSet("key", pool.getAuthSecret()); r.confSet("client_mount_timeout", "30"); r.connect(); s_logger.debug("Succesfully connected to Ceph cluster at " + r.confGet("mon_host")); IoCTX io = r.ioCtxCreate(pool.getSourceDir()); Rbd rbd = new Rbd(io); RbdImage image = rbd.open(uuid); s_logger.debug("Fetching list of snapshots of RBD image " + pool.getSourceDir() + "/" + uuid); List<RbdSnapInfo> snaps = image.snapList(); try { for (RbdSnapInfo snap : snaps) { if (image.snapIsProtected(snap.name)) { s_logger.debug("Unprotecting snapshot " + pool.getSourceDir() + "/" + uuid + "@" + snap.name); image.snapUnprotect(snap.name); } else { s_logger.debug("Snapshot " + pool.getSourceDir() + "/" + uuid + "@" + snap.name + " is not protected."); } s_logger.debug("Removing snapshot " + pool.getSourceDir() + "/" + uuid + "@" + snap.name); image.snapRemove(snap.name); } s_logger.info("Succesfully unprotected and removed any remaining snapshots (" + snaps.size() + ") of " + pool.getSourceDir() + "/" + uuid + " Continuing to remove the RBD image"); } catch (RbdException e) { s_logger.error("Failed to remove snapshot with exception: " + e.toString() + ", RBD error: " + ErrorCode.getErrorMessage(e.getReturnValue())); throw new CloudRuntimeException(e.toString() + " - " + ErrorCode.getErrorMessage(e.getReturnValue())); } finally { s_logger.debug("Closing image and destroying context"); rbd.close(image); r.ioCtxDestroy(io); } } catch (RadosException e) { s_logger.error("Failed to remove snapshot with exception: " + e.toString() + ", RBD error: " + ErrorCode.getErrorMessage(e.getReturnValue())); throw new CloudRuntimeException(e.toString() + " - " + ErrorCode.getErrorMessage(e.getReturnValue())); } catch (RbdException e) { s_logger.error("Failed to remove snapshot with exception: " + e.toString() + ", RBD error: " + ErrorCode.getErrorMessage(e.getReturnValue())); throw new CloudRuntimeException(e.toString() + " - " + ErrorCode.getErrorMessage(e.getReturnValue())); } } LibvirtStoragePool libvirtPool = (LibvirtStoragePool)pool; try { StorageVol vol = getVolume(libvirtPool.getPool(), uuid); s_logger.debug("Instructing libvirt to remove volume " + uuid + " from pool " + pool.getUuid()); if(Storage.ImageFormat.DIR.equals(format)){ deleteDirVol(libvirtPool, vol); } else { deleteVol(libvirtPool, vol); } vol.free(); return true; } catch (LibvirtException e) { throw new CloudRuntimeException(e.toString()); } } /** * This function copies a physical disk from Secondary Storage to Primary Storage * or from Primary to Primary Storage * * The first time a template is deployed in Primary Storage it will be copied from * Secondary to Primary. * * If it has been created on Primary Storage, it will be copied on the Primary Storage */ @Override public KVMPhysicalDisk createDiskFromTemplate(KVMPhysicalDisk template, String name, PhysicalDiskFormat format, Storage.ProvisioningType provisioningType, long size, KVMStoragePool destPool, int timeout) { s_logger.info("Creating volume " + name + " from template " + template.getName() + " in pool " + destPool.getUuid() + " (" + destPool.getType().toString() + ") with size " + size); KVMPhysicalDisk disk = null; if (destPool.getType() == StoragePoolType.RBD) { disk = createDiskFromTemplateOnRBD(template, name, format, provisioningType, size, destPool, timeout); } else { try { String newUuid = name; disk = destPool.createPhysicalDisk(newUuid, format, provisioningType, template.getVirtualSize()); if (disk == null) { throw new CloudRuntimeException("Failed to create disk from template " + template.getName()); } if (template.getFormat() == PhysicalDiskFormat.TAR) { Script.runSimpleBashScript("tar -x -f " + template.getPath() + " -C " + disk.getPath(), timeout); // TO BE FIXED to aware provisioningType } else if (template.getFormat() == PhysicalDiskFormat.DIR) { Script.runSimpleBashScript("mkdir -p " + disk.getPath()); Script.runSimpleBashScript("chmod 755 " + disk.getPath()); Script.runSimpleBashScript("tar -x -f " + template.getPath() + "/*.tar -C " + disk.getPath(), timeout); } else if (format == PhysicalDiskFormat.QCOW2) { QemuImg qemu = new QemuImg(timeout); QemuImgFile destFile = new QemuImgFile(disk.getPath(), format); if (size > template.getVirtualSize()) { destFile.setSize(size); } else { destFile.setSize(template.getVirtualSize()); } Map<String, String> options = new HashMap<String, String>(); options.put("preallocation", QemuImg.PreallocationType.getPreallocationType(provisioningType).toString()); switch(provisioningType){ case THIN: QemuImgFile backingFile = new QemuImgFile(template.getPath(), template.getFormat()); qemu.create(destFile, backingFile, options); break; case SPARSE: case FAT: QemuImgFile srcFile = new QemuImgFile(template.getPath(), template.getFormat()); qemu.convert(srcFile, destFile, options); break; } } else if (format == PhysicalDiskFormat.RAW) { QemuImgFile sourceFile = new QemuImgFile(template.getPath(), template.getFormat()); QemuImgFile destFile = new QemuImgFile(disk.getPath(), PhysicalDiskFormat.RAW); if (size > template.getVirtualSize()) { destFile.setSize(size); } else { destFile.setSize(template.getVirtualSize()); } QemuImg qemu = new QemuImg(timeout); Map<String, String> options = new HashMap<String, String>(); qemu.convert(sourceFile, destFile, options); } } catch (QemuImgException e) { s_logger.error("Failed to create " + disk.getPath() + " due to a failed executing of qemu-img: " + e.getMessage()); } } return disk; } private KVMPhysicalDisk createDiskFromTemplateOnRBD(KVMPhysicalDisk template, String name, PhysicalDiskFormat format, Storage.ProvisioningType provisioningType, long size, KVMStoragePool destPool, int timeout){ /* With RBD you can't run qemu-img convert with an existing RBD image as destination qemu-img will exit with the error that the destination already exists. So for RBD we don't create the image, but let qemu-img do that for us. We then create a KVMPhysicalDisk object that we can return */ KVMStoragePool srcPool = template.getPool(); KVMPhysicalDisk disk = null; String newUuid = name; format = PhysicalDiskFormat.RAW; disk = new KVMPhysicalDisk(destPool.getSourceDir() + "/" + newUuid, newUuid, destPool); disk.setFormat(format); if (size > template.getVirtualSize()) { disk.setSize(size); disk.setVirtualSize(size); } else { // leave these as they were if size isn't applicable disk.setSize(template.getVirtualSize()); disk.setVirtualSize(disk.getSize()); } QemuImg qemu = new QemuImg(timeout); QemuImgFile srcFile; QemuImgFile destFile = new QemuImgFile(KVMPhysicalDisk.RBDStringBuilder(destPool.getSourceHost(), destPool.getSourcePort(), destPool.getAuthUserName(), destPool.getAuthSecret(), disk.getPath())); destFile.setFormat(format); if (srcPool.getType() != StoragePoolType.RBD) { srcFile = new QemuImgFile(template.getPath(), template.getFormat()); try{ qemu.convert(srcFile, destFile); } catch (QemuImgException e) { s_logger.error("Failed to create " + disk.getPath() + " due to a failed executing of qemu-img: " + e.getMessage()); } } else { /** * We have to find out if the source file is in the same RBD pool and has * RBD format 2 before we can do a layering/clone operation on the RBD image * * This will be the case when the template is already on Primary Storage and * we want to copy it */ try { if ((srcPool.getSourceHost().equals(destPool.getSourceHost())) && (srcPool.getSourceDir().equals(destPool.getSourceDir()))) { /* We are on the same Ceph cluster, but we require RBD format 2 on the source image */ s_logger.debug("Trying to perform a RBD clone (layering) since we are operating in the same storage pool"); Rados r = new Rados(srcPool.getAuthUserName()); r.confSet("mon_host", srcPool.getSourceHost() + ":" + srcPool.getSourcePort()); r.confSet("key", srcPool.getAuthSecret()); r.confSet("client_mount_timeout", "30"); r.connect(); s_logger.debug("Succesfully connected to Ceph cluster at " + r.confGet("mon_host")); IoCTX io = r.ioCtxCreate(srcPool.getSourceDir()); Rbd rbd = new Rbd(io); RbdImage srcImage = rbd.open(template.getName()); if (srcImage.isOldFormat()) { /* The source image is RBD format 1, we have to do a regular copy */ s_logger.debug("The source image " + srcPool.getSourceDir() + "/" + template.getName() + " is RBD format 1. We have to perform a regular copy (" + disk.getVirtualSize() + " bytes)"); rbd.create(disk.getName(), disk.getVirtualSize(), rbdFeatures, rbdOrder); RbdImage destImage = rbd.open(disk.getName()); s_logger.debug("Starting to copy " + srcImage.getName() + " to " + destImage.getName() + " in Ceph pool " + srcPool.getSourceDir()); rbd.copy(srcImage, destImage); s_logger.debug("Finished copying " + srcImage.getName() + " to " + destImage.getName() + " in Ceph pool " + srcPool.getSourceDir()); rbd.close(destImage); } else { s_logger.debug("The source image " + srcPool.getSourceDir() + "/" + template.getName() + " is RBD format 2. We will perform a RBD clone using snapshot " + rbdTemplateSnapName); /* The source image is format 2, we can do a RBD snapshot+clone (layering) */ s_logger.debug("Checking if RBD snapshot " + srcPool.getSourceDir() + "/" + template.getName() + "@" + rbdTemplateSnapName + " exists prior to attempting a clone operation."); List<RbdSnapInfo> snaps = srcImage.snapList(); s_logger.debug("Found " + snaps.size() + " snapshots on RBD image " + srcPool.getSourceDir() + "/" + template.getName()); boolean snapFound = false; for (RbdSnapInfo snap : snaps) { if (rbdTemplateSnapName.equals(snap.name)) { s_logger.debug("RBD snapshot " + srcPool.getSourceDir() + "/" + template.getName() + "@" + rbdTemplateSnapName + " already exists."); snapFound = true; break; } } if (!snapFound) { s_logger.debug("Creating RBD snapshot " + rbdTemplateSnapName + " on image " + name); srcImage.snapCreate(rbdTemplateSnapName); s_logger.debug("Protecting RBD snapshot " + rbdTemplateSnapName + " on image " + name); srcImage.snapProtect(rbdTemplateSnapName); } rbd.clone(template.getName(), rbdTemplateSnapName, io, disk.getName(), rbdFeatures, rbdOrder); s_logger.debug("Succesfully cloned " + template.getName() + "@" + rbdTemplateSnapName + " to " + disk.getName()); /* We also need to resize the image if the VM was deployed with a larger root disk size */ if (disk.getVirtualSize() > template.getVirtualSize()) { RbdImage diskImage = rbd.open(disk.getName()); diskImage.resize(disk.getVirtualSize()); rbd.close(diskImage); s_logger.debug("Resized " + disk.getName() + " to " + disk.getVirtualSize()); } } rbd.close(srcImage); r.ioCtxDestroy(io); } else { /* The source pool or host is not the same Ceph cluster, we do a simple copy with Qemu-Img */ s_logger.debug("Both the source and destination are RBD, but not the same Ceph cluster. Performing a copy"); Rados rSrc = new Rados(srcPool.getAuthUserName()); rSrc.confSet("mon_host", srcPool.getSourceHost() + ":" + srcPool.getSourcePort()); rSrc.confSet("key", srcPool.getAuthSecret()); rSrc.confSet("client_mount_timeout", "30"); rSrc.connect(); s_logger.debug("Succesfully connected to source Ceph cluster at " + rSrc.confGet("mon_host")); Rados rDest = new Rados(destPool.getAuthUserName()); rDest.confSet("mon_host", destPool.getSourceHost() + ":" + destPool.getSourcePort()); rDest.confSet("key", destPool.getAuthSecret()); rDest.confSet("client_mount_timeout", "30"); rDest.connect(); s_logger.debug("Succesfully connected to source Ceph cluster at " + rDest.confGet("mon_host")); IoCTX sIO = rSrc.ioCtxCreate(srcPool.getSourceDir()); Rbd sRbd = new Rbd(sIO); IoCTX dIO = rDest.ioCtxCreate(destPool.getSourceDir()); Rbd dRbd = new Rbd(dIO); s_logger.debug("Creating " + disk.getName() + " on the destination cluster " + rDest.confGet("mon_host") + " in pool " + destPool.getSourceDir()); dRbd.create(disk.getName(), disk.getVirtualSize(), rbdFeatures, rbdOrder); RbdImage srcImage = sRbd.open(template.getName()); RbdImage destImage = dRbd.open(disk.getName()); s_logger.debug("Copying " + template.getName() + " from Ceph cluster " + rSrc.confGet("mon_host") + " to " + disk.getName() + " on cluster " + rDest.confGet("mon_host")); sRbd.copy(srcImage, destImage); sRbd.close(srcImage); dRbd.close(destImage); rSrc.ioCtxDestroy(sIO); rDest.ioCtxDestroy(dIO); } } catch (RadosException e) { s_logger.error("Failed to perform a RADOS action on the Ceph cluster, the error was: " + e.getMessage()); disk = null; } catch (RbdException e) { s_logger.error("Failed to perform a RBD action on the Ceph cluster, the error was: " + e.getMessage()); disk = null; } } return disk; } @Override public KVMPhysicalDisk createTemplateFromDisk(KVMPhysicalDisk disk, String name, PhysicalDiskFormat format, long size, KVMStoragePool destPool) { return null; } @Override public List<KVMPhysicalDisk> listPhysicalDisks(String storagePoolUuid, KVMStoragePool pool) { LibvirtStoragePool libvirtPool = (LibvirtStoragePool)pool; StoragePool virtPool = libvirtPool.getPool(); List<KVMPhysicalDisk> disks = new ArrayList<KVMPhysicalDisk>(); try { String[] vols = virtPool.listVolumes(); for (String volName : vols) { KVMPhysicalDisk disk = getPhysicalDisk(volName, pool); disks.add(disk); } return disks; } catch (LibvirtException e) { throw new CloudRuntimeException(e.toString()); } } /** * This copies a volume from Primary Storage to Secondary Storage * * In theory it could also do it the other way around, but the current implementation * in ManagementServerImpl shows that the destPool is always a Secondary Storage Pool */ @Override public KVMPhysicalDisk copyPhysicalDisk(KVMPhysicalDisk disk, String name, KVMStoragePool destPool, int timeout) { /** With RBD you can't run qemu-img convert with an existing RBD image as destination qemu-img will exit with the error that the destination already exists. So for RBD we don't create the image, but let qemu-img do that for us. We then create a KVMPhysicalDisk object that we can return It is however very unlikely that the destPool will be RBD, since it isn't supported for Secondary Storage */ KVMStoragePool srcPool = disk.getPool(); PhysicalDiskFormat sourceFormat = disk.getFormat(); String sourcePath = disk.getPath(); KVMPhysicalDisk newDisk; s_logger.debug("copyPhysicalDisk: disk size:" + disk.getSize() + ", virtualsize:" + disk.getVirtualSize()+" format:"+disk.getFormat()); if (destPool.getType() != StoragePoolType.RBD) { if (disk.getFormat() == PhysicalDiskFormat.TAR) { newDisk = destPool.createPhysicalDisk(name, PhysicalDiskFormat.DIR, Storage.ProvisioningType.THIN, disk.getVirtualSize()); } else { newDisk = destPool.createPhysicalDisk(name, Storage.ProvisioningType.THIN, disk.getVirtualSize()); } } else { newDisk = new KVMPhysicalDisk(destPool.getSourceDir() + "/" + name, name, destPool); newDisk.setFormat(PhysicalDiskFormat.RAW); newDisk.setSize(disk.getVirtualSize()); newDisk.setVirtualSize(disk.getSize()); } String destPath = newDisk.getPath(); PhysicalDiskFormat destFormat = newDisk.getFormat(); QemuImg qemu = new QemuImg(timeout); QemuImgFile srcFile = null; QemuImgFile destFile = null; if ((srcPool.getType() != StoragePoolType.RBD) && (destPool.getType() != StoragePoolType.RBD)) { if(sourceFormat == PhysicalDiskFormat.TAR && destFormat == PhysicalDiskFormat.DIR) { //LXC template Script.runSimpleBashScript("cp "+ sourcePath + " " + destPath); } else if (sourceFormat == PhysicalDiskFormat.TAR) { Script.runSimpleBashScript("tar -x -f " + sourcePath + " -C " + destPath, timeout); } else if (sourceFormat == PhysicalDiskFormat.DIR) { Script.runSimpleBashScript("mkdir -p " + destPath); Script.runSimpleBashScript("chmod 755 " + destPath); Script.runSimpleBashScript("cp -p -r " + sourcePath + "/* " + destPath, timeout); } else { srcFile = new QemuImgFile(sourcePath, sourceFormat); try { Map<String, String> info = qemu.info(srcFile); String backingFile = info.get(new String("backing_file")); // qcow2 templates can just be copied into place if (sourceFormat.equals(destFormat) && backingFile == null && sourcePath.endsWith(".qcow2")) { String result = Script.runSimpleBashScript("cp -f " + sourcePath + " " + destPath, timeout); if (result != null) { throw new CloudRuntimeException("Failed to create disk: " + result); } } else { destFile = new QemuImgFile(destPath, destFormat); try { qemu.convert(srcFile, destFile); Map<String, String> destInfo = qemu.info(destFile); Long virtualSize = Long.parseLong(destInfo.get(new String("virtual_size"))); newDisk.setVirtualSize(virtualSize); newDisk.setSize(virtualSize); } catch (QemuImgException e) { s_logger.error("Failed to convert " + srcFile.getFileName() + " to " + destFile.getFileName() + " the error was: " + e.getMessage()); newDisk = null; } } } catch (QemuImgException e) { s_logger.error("Failed to fetch the information of file " + srcFile.getFileName() + " the error was: " + e.getMessage()); newDisk = null; } } } else if ((srcPool.getType() != StoragePoolType.RBD) && (destPool.getType() == StoragePoolType.RBD)) { /** * Using qemu-img we copy the QCOW2 disk to RAW (on RBD) directly. * To do so it's mandatory that librbd on the system is at least 0.67.7 (Ceph Dumpling) */ s_logger.debug("The source image is not RBD, but the destination is. We will convert into RBD format 2"); try { srcFile = new QemuImgFile(sourcePath, sourceFormat); String rbdDestPath = destPool.getSourceDir() + "/" + name; String rbdDestFile = KVMPhysicalDisk.RBDStringBuilder(destPool.getSourceHost(), destPool.getSourcePort(), destPool.getAuthUserName(), destPool.getAuthSecret(), rbdDestPath); destFile = new QemuImgFile(rbdDestFile, destFormat); s_logger.debug("Starting copy from source image " + srcFile.getFileName() + " to RBD image " + rbdDestPath); qemu.convert(srcFile, destFile); s_logger.debug("Succesfully converted source image " + srcFile.getFileName() + " to RBD image " + rbdDestPath); /* We have to stat the RBD image to see how big it became afterwards */ Rados r = new Rados(destPool.getAuthUserName()); r.confSet("mon_host", destPool.getSourceHost() + ":" + destPool.getSourcePort()); r.confSet("key", destPool.getAuthSecret()); r.confSet("client_mount_timeout", "30"); r.connect(); s_logger.debug("Succesfully connected to Ceph cluster at " + r.confGet("mon_host")); IoCTX io = r.ioCtxCreate(destPool.getSourceDir()); Rbd rbd = new Rbd(io); RbdImage image = rbd.open(name); RbdImageInfo rbdInfo = image.stat(); newDisk.setSize(rbdInfo.size); newDisk.setVirtualSize(rbdInfo.size); s_logger.debug("After copy the resulting RBD image " + rbdDestPath + " is " + rbdInfo.size + " bytes long"); rbd.close(image); r.ioCtxDestroy(io); } catch (QemuImgException e) { s_logger.error("Failed to convert from " + srcFile.getFileName() + " to " + destFile.getFileName() + " the error was: " + e.getMessage()); newDisk = null; } catch (RadosException e) { s_logger.error("A Ceph RADOS operation failed (" + e.getReturnValue() + "). The error was: " + e.getMessage()); newDisk = null; } catch (RbdException e) { s_logger.error("A Ceph RBD operation failed (" + e.getReturnValue() + "). The error was: " + e.getMessage()); newDisk = null; } } else { /** We let Qemu-Img do the work here. Although we could work with librbd and have that do the cloning it doesn't benefit us. It's better to keep the current code in place which works */ srcFile = new QemuImgFile(KVMPhysicalDisk.RBDStringBuilder(srcPool.getSourceHost(), srcPool.getSourcePort(), srcPool.getAuthUserName(), srcPool.getAuthSecret(), sourcePath)); srcFile.setFormat(sourceFormat); destFile = new QemuImgFile(destPath); destFile.setFormat(destFormat); try { qemu.convert(srcFile, destFile); } catch (QemuImgException e) { s_logger.error("Failed to convert " + srcFile.getFileName() + " to " + destFile.getFileName() + " the error was: " + e.getMessage()); newDisk = null; } } if (newDisk == null) { throw new CloudRuntimeException("Failed to copy " + disk.getPath() + " to " + name); } return newDisk; } @Override public KVMPhysicalDisk createDiskFromSnapshot(KVMPhysicalDisk snapshot, String snapshotName, String name, KVMStoragePool destPool) { return null; } @Override public boolean refresh(KVMStoragePool pool) { LibvirtStoragePool libvirtPool = (LibvirtStoragePool)pool; StoragePool virtPool = libvirtPool.getPool(); try { refreshPool(virtPool); } catch (LibvirtException e) { return false; } return true; } @Override public boolean deleteStoragePool(KVMStoragePool pool) { return deleteStoragePool(pool.getUuid()); } private void refreshPool(StoragePool pool) throws LibvirtException { pool.refresh(0); return; } private void deleteVol(LibvirtStoragePool pool, StorageVol vol) throws LibvirtException { vol.delete(0); } private void deleteDirVol(LibvirtStoragePool pool, StorageVol vol) throws LibvirtException { Script.runSimpleBashScript("rm -r --interactive=never " + vol.getPath()); } }
// *************************************************************************************************************************** // * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file * // * distributed with this work for additional information regarding copyright ownership. The ASF licenses this file * // * to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance * // * with the License. You may obtain a copy of the License at * // * * // * http://www.apache.org/licenses/LICENSE-2.0 * // * * // * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an * // * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * // * specific language governing permissions and limitations under the License. * // *************************************************************************************************************************** package org.apache.juneau.http.resource; import static org.apache.juneau.internal.ThrowableUtils.*; import static org.apache.juneau.internal.StringUtils.*; import java.io.*; import java.util.*; import java.util.function.*; import org.apache.http.*; import org.apache.juneau.http.entity.*; import org.apache.juneau.http.header.*; import org.apache.juneau.internal.*; /** * Builder for {@link HttpEntity} beans. * * @param <T> The bean type to create for this builder. */ @FluentSetters(returns="HttpResourceBuilder<T>") public class HttpResourceBuilder<T extends BasicResource> { HeaderList headers = HeaderList.EMPTY; HeaderList.Builder headersBuilder; BasicHttpEntity entity; HttpEntityBuilder<?> entityBuilder; /** The HttpEntity implementation class. */ protected final Class<? extends BasicResource> implClass; /** The HttpEntity implementation class. */ protected final Class<? extends BasicHttpEntity> entityImplClass; /** * Constructor. * * @param implClass * The subclass of {@link HttpResponse} to create. * <br>This must contain a public constructor that takes in an {@link HttpResourceBuilder} object. * @param entityImplClass * The subclass of {@link BasicHttpEntity} to create. * <br>This must contain a public constructor that takes in an {@link HttpEntityBuilder} object. */ public HttpResourceBuilder(Class<T> implClass, Class<? extends BasicHttpEntity> entityImplClass) { this.implClass = implClass; this.entityImplClass = entityImplClass; } /** * Copy constructor. * * @param impl * The implementation object of {@link HttpEntity} to copy from. * <br>This must contain a public constructor that takes in an {@link HttpResourceBuilder} object. */ public HttpResourceBuilder(T impl) { implClass = impl.getClass(); headers = impl.headers; entity = impl.entity; this.entityImplClass = entity.getClass(); } /** * Instantiates the entity bean from the settings in this builder. * * @return A new {@link HttpEntity} bean. */ @SuppressWarnings("unchecked") public T build() { try { return (T) implClass.getConstructor(HttpResourceBuilder.class).newInstance(this); } catch (Exception e) { throw runtimeException(e); } } HeaderList headers() { if (headersBuilder != null) return headersBuilder.build(); if (headers == null) return HeaderList.EMPTY; return headers; } BasicHttpEntity entity() { if (entityBuilder != null) return entityBuilder.build(); if (entity == null) return BasicHttpEntity.EMPTY; return entity; } /** * Copies the contents of the specified HTTP response to this builder. * * @param response The response to copy from. Must not be null. * @return This object. * @throws IOException If content could not be retrieved. */ public HttpResourceBuilder<?> copyFrom(HttpResponse response) throws IOException { headers(response.getAllHeaders()); content(response.getEntity().getContent()); return this; } //----------------------------------------------------------------------------------------------------------------- // HttpEntityBuilder setters. //----------------------------------------------------------------------------------------------------------------- /** * Sets the content on this entity bean. * * @param value The entity content, can be <jk>null</jk>. * @return This object. */ @FluentSetter public HttpResourceBuilder<T> content(Object value) { entityBuilder().content(value); return this; } /** * Sets the content on this entity bean from a supplier. * * <p> * Repeatable entities such as {@link StringEntity} use this to allow the entity content to be resolved at * serialization time. * * @param value The entity content, can be <jk>null</jk>. * @return This object. */ @FluentSetter public HttpResourceBuilder<T> contentSupplier(Supplier<?> value) { entityBuilder().contentSupplier(value); return this; } /** * Sets the content type on this entity bean. * * @param value The new <c>Content-Type</ header, or <jk>null</jk> to unset. * @return This object. */ @FluentSetter public HttpResourceBuilder<T> contentType(String value) { entityBuilder().contentType(value); return this; } /** * Sets the content type on this entity bean. * * @param value The new <c>Content-Type</ header, or <jk>null</jk> to unset. * @return This object. */ @FluentSetter public HttpResourceBuilder<T> contentType(ContentType value) { entityBuilder().contentType(value); return this; } /** * Sets the content length on this entity bean. * * @param value The new <c>Content-Length</c> header value, or <c>-1</c> to unset. * @return This object. */ @FluentSetter public HttpResourceBuilder<T> contentLength(long value) { entityBuilder().contentLength(value); return this; } /** * Sets the content encoding header on this entity bean. * * @param value The new <c>Content-Encoding</ header, or <jk>null</jk> to unset. * @return This object. */ @FluentSetter public HttpResourceBuilder<T> contentEncoding(String value) { entityBuilder().contentEncoding(value); return this; } /** * Sets the content encoding header on this entity bean. * * @param value The new <c>Content-Encoding</ header, or <jk>null</jk> to unset. * @return This object. */ @FluentSetter public HttpResourceBuilder<T> contentEncoding(ContentEncoding value) { entityBuilder().contentEncoding(value); return this; } /** * Sets the 'chunked' flag value to <jk>true</jk>. * * <ul class='notes'> * <li>If the {@link HttpEntity#getContentLength()} method returns a negative value, the HttpClient code will always * use chunked encoding. * </ul> * * @return This object. */ @FluentSetter public HttpResourceBuilder<T> chunked() { entityBuilder().chunked(); return this; } /** * Sets the 'chunked' flag value. * * <ul class='notes'> * <li>If the {@link HttpEntity#getContentLength()} method returns a negative value, the HttpClient code will always * use chunked encoding. * </ul> * * @param value The new value for this flag. * @return This object. */ @FluentSetter public HttpResourceBuilder<T> chunked(boolean value) { entityBuilder().chunked(value); return this; } /** * Specifies that the contents of this resource should be cached into an internal byte array so that it can * be read multiple times. * * @return This object. * @throws IOException If entity could not be read into memory. */ @FluentSetter public HttpResourceBuilder<T> cached() throws IOException { entityBuilder().cached(); return this; } //----------------------------------------------------------------------------------------------------------------- // BasicHeaderGroup setters. //----------------------------------------------------------------------------------------------------------------- /** * Sets the protocol version on the status line. * * <p> * If not specified, <js>"HTTP/1.1"</js> will be used. * * @param value The new value. * @return This object. */ @FluentSetter public HttpResourceBuilder<T> headers(HeaderList value) { headers = value; headersBuilder = null; return this; } /** * Removes any headers already in this builder. * * @return This object. */ public HttpResourceBuilder<T> clearHeaders() { headersBuilder().clear(); return this; } /** * Adds the specified header to the end of the headers in this builder. * * @param value The header to add. <jk>null</jk> values are ignored. * @return This object. */ public HttpResourceBuilder<T> header(Header value) { if (value != null) headersBuilder().append(value); return this; } /** * Adds the specified header to the end of the headers in this builder. * * <p> * This is a no-op if either the name or value is <jk>null</jk>. * * @param name The header name. * @param value The header value. * @return This object. */ public HttpResourceBuilder<T> header(String name, String value) { if (name != null && value != null) headersBuilder().append(name, value); return this; } /** * Adds the specified headers to the end of the headers in this builder. * * @param values The headers to add. <jk>null</jk> headers and headers with <jk>null</jk> names or values are ignored. * @return This object. */ public HttpResourceBuilder<T> headers(Header...values) { for (Header h : values) { if (h != null) { String n = h.getName(); String v = h.getValue(); if (isNotEmpty(n)) { if (n.equalsIgnoreCase("content-type")) contentType(v); else if (n.equalsIgnoreCase("content-encoding")) contentEncoding(v); else if (n.equalsIgnoreCase("content-length")) contentLength(Long.parseLong(v)); else headersBuilder().append(h); } } } return this; } /** * Adds the specified headers to the end of the headers in this builder. * * @param values The headers to add. <jk>null</jk> values are ignored. * @return This object. */ public HttpResourceBuilder<T> headers(List<Header> values) { headersBuilder().append(values); return this; } /** * Removes the specified header from this builder. * * @param value The header to remove. <jk>null</jk> values are ignored. * @return This object. */ public HttpResourceBuilder<T> removeHeader(Header value) { headersBuilder().remove(value); return this; } /** * Removes the specified headers from this builder. * * @param values The headers to remove. <jk>null</jk> values are ignored. * @return This object. */ public HttpResourceBuilder<T> removeHeaders(Header...values) { headersBuilder().remove(values); return this; } /** * Removes the specified headers from this builder. * * @param values The headers to remove. <jk>null</jk> values are ignored. * @return This object. */ public HttpResourceBuilder<T> removeHeaders(List<Header> values) { headersBuilder().remove(values); return this; } /** * Replaces the first occurrence of the header with the same name. * * <p> * If no header with the same name is found the given header is added to the end of the list. * * @param value The headers to replace. <jk>null</jk> values are ignored. * @return This object. */ public HttpResourceBuilder<T> updateHeader(Header value) { headersBuilder().set(value); return this; } /** * Replaces the first occurrence of the headers with the same name. * * <p> * If no header with the same name is found the given header is added to the end of the list. * * @param values The headers to replace. <jk>null</jk> values are ignored. * @return This object. */ public HttpResourceBuilder<T> updateHeaders(Header...values) { headersBuilder().set(values); return this; } /** * Replaces the first occurrence of the headers with the same name. * * <p> * If no header with the same name is found the given header is added to the end of the list. * * @param values The headers to replace. <jk>null</jk> values are ignored. * @return This object. */ public HttpResourceBuilder<T> updateHeaders(List<Header> values) { headersBuilder().set(values); return this; } /** * Sets all of the headers contained within this group overriding any existing headers. * * <p> * The headers are added in the order in which they appear in the array. * * @param values The headers to set * @return This object. */ public HttpResourceBuilder<T> setHeaders(Header...values) { headersBuilder().clear().append(values); return this; } /** * Sets all of the headers contained within this group overriding any existing headers. * * <p> * The headers are added in the order in which they appear in the list. * * @param values The headers to set * @return This object. */ public HttpResourceBuilder<T> setHeaders(List<Header> values) { headersBuilder().clear().append(values); return this; } //----------------------------------------------------------------------------------------------------------------- // Other methods //----------------------------------------------------------------------------------------------------------------- private HeaderList.Builder headersBuilder() { if (headersBuilder == null) { headersBuilder = headers == null ? HeaderList.create() : headers.copy(); headers = null; } return headersBuilder; } private HttpEntityBuilder<?> entityBuilder() { if (entityBuilder == null) { entityBuilder = entity == null ? BasicHttpEntity.create(entityImplClass) : entity.copy(); entity = null; } return entityBuilder; } // <FluentSetters> // </FluentSetters> }
package com.packagename.myapp.models; import javax.persistence.*; import javax.validation.constraints.NotNull; import java.util.ArrayList; import java.util.List; import java.util.Set; @Entity @Table(name = "faculty") public class Faculty extends BaseModel { @Id @GeneratedValue(strategy = GenerationType.IDENTITY) @Column(name = "faculty_id") private int id; @OneToMany(mappedBy = "faculty", fetch = FetchType.EAGER) private Set<Department> departments; @NotNull private String name; @NotNull private String abbreviation; public int getId() { return id; } public void setId(int id) { this.id = id; } public String getName() { return name; } @Override public BaseModel getParent() { return null; } @Override public void setParent(BaseModel parent) {} @Override public List<BaseModel> getChildren() { return new ArrayList<>(departments); } public void setName(String name) { this.name = name; } public Set<Department> getDepartments() { return departments; } public void setDepartments(Set<Department> departments) { this.departments = departments; } public String getAbbreviation() { return abbreviation; } public void setAbbreviation(String abbreviation) { this.abbreviation = abbreviation; } @Override public String toString() { return this.getName(); } }
/* * Copyright 2012-2019 Aerospike, Inc. * * Portions may be licensed to Aerospike, Inc. under one or more contributor * license agreements WHICH ARE COMPATIBLE WITH THE APACHE LICENSE, VERSION 2.0. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.aerospike.client.cdt; /** * List sort flags. */ public final class ListSortFlags { /** * Default. Preserve duplicate values when sorting list. */ public static final int DEFAULT = 0; /** * Drop duplicate values when sorting list. */ public static final int DROP_DUPLICATES = 2; }
// Copyright (c) FIRST and other WPILib contributors. // Open Source Software; you can modify and/or share it under the terms of // the WPILib BSD license file in the root directory of this project. package frc.robot.subsystems; //WPI imports import edu.wpi.first.wpilibj2.command.SubsystemBase; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; import edu.wpi.first.wpilibj.util.Color; import edu.wpi.first.wpilibj.DigitalInput; import edu.wpi.first.math.controller.PIDController; //REV imports import com.revrobotics.CANSparkMax; import com.revrobotics.CANSparkMaxLowLevel.MotorType; import com.revrobotics.RelativeEncoder; import com.revrobotics.CANSparkMax.IdleMode; import com.revrobotics.ColorSensorV3; import com.revrobotics.ColorMatchResult; import com.revrobotics.ColorMatch; //Robot imports import frc.robot.Constants.motorConstants; import frc.robot.Constants.colorSensorConstants; import frc.robot.Constants.limitSwitchConstants; public class Shooter extends SubsystemBase { public CANSparkMax shootMot1,shootMot2; public static CANSparkMax feederMot, liftMot; public RelativeEncoder shootEnc1, shootEnc2; public final ColorSensorV3 m_colorSensor; public final ColorMatch m_colorMatcher; public static RelativeEncoder liftEnc; public static DigitalInput topLimitSwitch, botLimitSwitch; //Shoot lift PID values public static double P, I, D, dP, min_Command; public static double PIDout, steeringAdjust; static PIDController testPID = new PIDController(P, I, D); public Shooter() { //Shooter Motors shootMot1 = new CANSparkMax(motorConstants.SPEED_CONT16, MotorType.kBrushless); shootMot2 = new CANSparkMax(motorConstants.SPEED_CONT17, MotorType.kBrushless); feederMot = new CANSparkMax(motorConstants.SPEED_CONT18, MotorType.kBrushless); liftMot = new CANSparkMax(motorConstants.SHOOTER_LIFT, MotorType.kBrushless); shootMot1.setIdleMode(IdleMode.kCoast); shootMot2.setIdleMode(IdleMode.kCoast); feederMot.setIdleMode(IdleMode.kCoast); liftMot.setIdleMode(IdleMode.kCoast); shootMot1.setInverted(motorConstants.SHOOTMOT1_INVERT); liftMot.setInverted(motorConstants.SHOOTER_LIFT_INVERT); //Shooter Encoders shootEnc1 = shootMot1.getEncoder(); shootEnc2 = shootMot2.getEncoder(); liftEnc = liftMot.getEncoder(); //Color Sensor m_colorSensor = new ColorSensorV3(colorSensorConstants.i2cPort); m_colorMatcher = new ColorMatch(); //Smartdashboard SmartDashboard.putNumber("Lift Motor Velocity Input", 0.0); SmartDashboard.putNumber("Shooter Motor Velocity Input", shootEnc1.getVelocity()); //Lift limitswitches topLimitSwitch = new DigitalInput(limitSwitchConstants.TOP_LIMIT_SWITCH_PORT); botLimitSwitch = new DigitalInput(limitSwitchConstants.BOTTOM_LIMIT_SWITCH_PORT); } @Override public void periodic() { SmartDashboard.putNumber("Encoder pos", liftEnc.getPosition()); SmartDashboard.putBoolean("Shooter Top Limit", topLimitSwitch.get()); SmartDashboard.putBoolean("Shooter Bottom Limit", botLimitSwitch.get()); } public static void runFeeder (double speed) { feederMot.set(speed); } public void runShooter () { //Get values from Smartdashboard double speed = SmartDashboard.getNumber("Shooter Motor Velocity Input", 0.0); //Set Motor Speeds shootMot1.set(speed); shootMot2.set(speed); SmartDashboard.putNumber("Motor 1 Velocity", shootEnc1.getVelocity()); SmartDashboard.putNumber("Motor 2 Velocity", shootEnc2.getVelocity()); } public static boolean liftInit() { if (botLimitSwitch.get()){ liftMot.set(0.2);; } if (!botLimitSwitch.get()){ liftMot.set(0.0); liftEnc.setPosition(0); return true; } return false; } public static void runLift() { double speed = SmartDashboard.getNumber("Lift Motor Velocity Input", 0.0); if (speed > 0.0){ if(botLimitSwitch.get()){ liftMot.set(0.2); } else { liftMot.set(0.0); } } else if (speed < 0.0) { if (topLimitSwitch.get()){ liftMot.set(0.2); } else { liftMot.set(0.0); } } else { liftMot.set(0.0); } SmartDashboard.putNumber("Lift position", liftEnc.getPosition()); } public void stopShooter () { shootMot1.set(0.0); shootMot2.set(0.0); } public String readBall () { Color detectedColor = m_colorSensor.getColor(); /** * The sensor returns a raw IR value of the infrared light detected. */ double IR = m_colorSensor.getIR(); double red = detectedColor.red; double blue = detectedColor.blue; if (IR > 15) { SmartDashboard.putBoolean("has_ball", true); } else SmartDashboard.putBoolean("has_ball", false); if (red >= 0.4 && red <= 0.6 && blue <= 0.2) { return "RED"; } else if (blue >= 0.33 && blue <= 0.5 && red <= 0.23) { return "BLUE"; } else return "NONE"; /** * Open Smart Dashboard or Shuffleboard to see the color detected by the * sensor. * SmartDashboard.putNumber("Red", detectedColor.red); SmartDashboard.putNumber("Green", detectedColor.green); SmartDashboard.putNumber("Blue", detectedColor.blue); SmartDashboard.putNumber("IR", IR); SmartDashboard.putString("raw", Integer.toHexString(detectedColor.hashCode())); */ } }
/** * Copyright 2009-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ibatis.submitted.typehandlerinjection; import java.sql.CallableStatement; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import org.apache.ibatis.type.JdbcType; import org.apache.ibatis.type.TypeHandler; public class UserStateTypeHandler<E> implements TypeHandler<Object> { private static Map<String, String> lookup; static { lookup = new HashMap<String, String>(); lookup.put("0", "INACTIVE"); lookup.put("1", "ACTIVE"); } UserStateTypeHandler() { // can only be constructed from this package } @Override public Object getResult(ResultSet rs, String arg) throws SQLException { return lookupValue(rs.getInt(arg)); } @Override public Object getResult(ResultSet rs, int arg) throws SQLException { return lookupValue(rs.getInt(arg)); } @Override public Object getResult(CallableStatement cs, int arg) throws SQLException { return lookupValue(cs.getInt(arg)); } @Override public void setParameter(PreparedStatement ps, int i, Object value, JdbcType jdbcType) throws SQLException { String key = ""; for (Entry<String, String> entry : lookup.entrySet()) { if (value.equals(entry.getValue())) { key = entry.getKey(); } } ps.setInt(i, Integer.valueOf(key)); } private String lookupValue(int val) { return lookup.get(String.valueOf(val)); } }
package com.mossle.meeting.persistence.domain; // Generated by Hibernate Tools import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.Table; /** * MeetingItem . * * @author Lingo */ @Entity @Table(name = "MEETING_ITEM") public class MeetingItem implements java.io.Serializable { private static final long serialVersionUID = 0L; /** null. */ private Long id; /** null. */ private MeetingInfo meetingInfo; /** null. */ private MeetingRoom meetingRoom; /** null. */ private String icon; /** null. */ private String name; /** null. */ private String tenantId; public MeetingItem() { } public MeetingItem(Long id) { this.id = id; } public MeetingItem(Long id, MeetingInfo meetingInfo, MeetingRoom meetingRoom, String icon, String name, String tenantId) { this.id = id; this.meetingInfo = meetingInfo; this.meetingRoom = meetingRoom; this.icon = icon; this.name = name; this.tenantId = tenantId; } /** @return null. */ @Id @Column(name = "ID", unique = true, nullable = false) public Long getId() { return this.id; } /** * @param id * null. */ public void setId(Long id) { this.id = id; } /** @return null. */ @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "INFO_ID") public MeetingInfo getMeetingInfo() { return this.meetingInfo; } /** * @param meetingInfo * null. */ public void setMeetingInfo(MeetingInfo meetingInfo) { this.meetingInfo = meetingInfo; } /** @return null. */ @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "ROOM_ID") public MeetingRoom getMeetingRoom() { return this.meetingRoom; } /** * @param meetingRoom * null. */ public void setMeetingRoom(MeetingRoom meetingRoom) { this.meetingRoom = meetingRoom; } /** @return null. */ @Column(name = "ICON", length = 50) public String getIcon() { return this.icon; } /** * @param icon * null. */ public void setIcon(String icon) { this.icon = icon; } /** @return null. */ @Column(name = "NAME", length = 50) public String getName() { return this.name; } /** * @param name * null. */ public void setName(String name) { this.name = name; } /** @return null. */ @Column(name = "TENANT_ID", length = 64) public String getTenantId() { return this.tenantId; } /** * @param tenantId * null. */ public void setTenantId(String tenantId) { this.tenantId = tenantId; } }