gt
stringclasses
1 value
context
stringlengths
2.05k
161k
package com.deviceinfosample.adapter; import android.app.Activity; import android.content.Context; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.TextView; import com.an.deviceinfo.ads.Ad; import com.an.deviceinfo.device.model.App; import com.an.deviceinfo.device.model.Battery; import com.an.deviceinfo.device.model.Device; import com.an.deviceinfo.device.model.Memory; import com.an.deviceinfo.device.model.Network; import com.an.deviceinfo.location.DeviceLocation; import com.an.deviceinfo.userapps.UserApps; import com.an.deviceinfo.usercontacts.UserContacts; import com.deviceinfosample.R; import java.util.List; public class CustomListAdapter extends RecyclerView.Adapter<CustomListAdapter.CustomViewHolder> { private Context context; private List deviceList; private Object object; public CustomListAdapter(Context context, List deviceList) { this.context = context; this.deviceList = deviceList; } public CustomListAdapter(Context context, Object object) { this.context = context; this.object = object; } @Override public CustomViewHolder onCreateViewHolder(ViewGroup viewGroup, int viewType) { View view = LayoutInflater.from(viewGroup.getContext()).inflate(R.layout.fragment_list_item, null); CustomListAdapter.CustomViewHolder viewHolder = new CustomListAdapter.CustomViewHolder(view); return viewHolder; } @Override public void onBindViewHolder(CustomViewHolder holder, int position) { if(deviceList == null) { if(object instanceof Ad) { handleAdInfo(holder, position); } else if(object instanceof DeviceLocation) { handleLocationInfo(holder, position); } else if(object instanceof App) { handleAppInfo(holder, position); } else if(object instanceof Battery) { handleBatteryInfo(holder, position); } else if(object instanceof Memory) { handleMemoryInfo(holder, position); } else if(object instanceof Network) { handleNetworkInfo(holder, position); } else if(object instanceof Device) { handleDeviceInfo(holder, position); } return; } Object object = deviceList.get(position); if(object instanceof UserApps) { holder.textView.setText(((UserApps) object).getAppName()); holder.desc.setText(((UserApps) object).getPackageName()); } else { holder.textView.setText(((UserContacts) object).getDisplayName()); holder.desc.setText(((UserContacts) object).getMobileNumber()); } } @Override public int getItemCount() { if(deviceList == null) return object.getClass().getDeclaredFields().length; return deviceList.size(); } public class CustomViewHolder extends RecyclerView.ViewHolder { private TextView textView; private TextView desc; public CustomViewHolder(View itemView) { super(itemView); this.textView = (TextView) itemView.findViewById(R.id.textView); this.desc = (TextView) itemView.findViewById(R.id.textDesc); } } private void handleAdInfo(CustomViewHolder holder, int position) { if(position == 0) { holder.textView.setText("AdvertisingId:"); holder.desc.setText(((Ad) object).getAdvertisingId()); } else { holder.textView.setText("Allow to track ads:"); holder.desc.setText(String.valueOf(((Ad) object).isAdDoNotTrack())); } } private void handleLocationInfo(CustomViewHolder holder, int position) { switch (position) { case 0: holder.textView.setText("Lattitude:"); holder.desc.setText(String.valueOf(((DeviceLocation) object).getLatitude())); break; case 1: holder.textView.setText("Longitude:"); holder.desc.setText(String.valueOf(((DeviceLocation) object).getLongitude())); break; case 2: holder.textView.setText("Address Line 1:"); holder.desc.setText(((DeviceLocation) object).getAddressLine1()); break; case 3: holder.textView.setText("City:"); holder.desc.setText(((DeviceLocation) object).getCity()); break; case 4: holder.textView.setText("State:"); holder.desc.setText(((DeviceLocation) object).getState()); break; case 5: holder.textView.setText("CountryCode:"); holder.desc.setText(((DeviceLocation) object).getCountryCode()); break; case 6: holder.textView.setText("Postal Code:"); holder.desc.setText(((DeviceLocation) object).getPostalCode()); break; } } private void handleAppInfo(CustomViewHolder holder, int position) { switch (position) { case 0: holder.textView.setText("App Name:"); holder.desc.setText(((App) object).getAppName()); break; case 1: holder.textView.setText("Package Name:"); holder.desc.setText(((App) object).getPackageName()); break; case 2: holder.textView.setText("Activity Name:"); holder.desc.setText(((App) object).getActivityName()); break; case 3: holder.textView.setText("App Version Name:"); holder.desc.setText(((App) object).getAppVersionName()); break; case 4: holder.textView.setText("App Version Code:"); holder.desc.setText(String.valueOf(((App) object).getAppVersionCode())); break; } } private void handleBatteryInfo(CustomViewHolder holder, int position) { switch (position) { case 0: holder.textView.setText("Battery Percent:"); holder.desc.setText(String.valueOf(((Battery) object).getBatteryPercent())); break; case 1: holder.textView.setText("Is Phone Charging:"); holder.desc.setText(String.valueOf(((Battery) object).isPhoneCharging())); break; case 2: holder.textView.setText("Battery Health:"); holder.desc.setText(((Battery) object).getBatteryHealth()); break; case 3: holder.textView.setText("Battery Technology:"); holder.desc.setText(((Battery) object).getBatteryTechnology()); break; case 4: holder.textView.setText("Battery Temperature:"); holder.desc.setText(String.valueOf(((Battery) object).getBatteryTemperature())); break; case 5: holder.textView.setText("Battery Voltage:"); holder.desc.setText(String.valueOf(((Battery) object).getBatteryVoltage())); break; case 6: holder.textView.setText("Charging Source:"); holder.desc.setText(((Battery) object).getChargingSource()); break; case 7: holder.textView.setText("Is Battery Present:"); holder.desc.setText(String.valueOf(((Battery) object).isBatteryPresent())); break; } } private void handleMemoryInfo(CustomViewHolder holder, int position) { switch (position) { case 0: holder.textView.setText("Has external Momeny Card:"); holder.desc.setText(String.valueOf(((Memory) object).isHasExternalSDCard())); break; case 1: holder.textView.setText("Total RAM:"); holder.desc.setText(String.valueOf(convertToGb(((Memory) object).getTotalRAM())) + " GB"); break; case 2: holder.textView.setText("Total Internal Memory Space:"); holder.desc.setText(String.valueOf(convertToGb(((Memory) object).getTotalInternalMemorySize())) + " GB"); break; case 3: holder.textView.setText("Available Memory Space:"); holder.desc.setText(String.valueOf(convertToGb(((Memory) object).getAvailableInternalMemorySize())) + " GB"); break; case 4: holder.textView.setText("Total External Memory Space:"); holder.desc.setText(String.valueOf(convertToGb( ((Memory) object).getTotalExternalMemorySize()) ) + " GB"); break; case 5: holder.textView.setText("Available External Momory Space:"); holder.desc.setText(String.valueOf(convertToGb (((Memory) object).getAvailableExternalMemorySize()) ) + " GB"); break; } } private void handleNetworkInfo(CustomViewHolder holder, int position) { switch (position) { case 0: holder.textView.setText("IMEI:"); holder.desc.setText(((Network) object).getIMEI()); break; case 1: holder.textView.setText("IMSI:"); holder.desc.setText(((Network) object).getIMSI()); break; case 2: holder.textView.setText("Phone Type:"); holder.desc.setText(((Network) object).getPhoneType()); break; case 3: holder.textView.setText("Phone Number:"); holder.desc.setText(((Network) object).getPhoneNumber()); break; case 4: holder.textView.setText("Carrier:"); holder.desc.setText(((Network) object).getOperator()); break; case 5: holder.textView.setText("SIM Serial:"); holder.desc.setText(((Network) object).getsIMSerial()); break; case 6: holder.textView.setText("is SIM Locked:"); holder.desc.setText(String.valueOf(((Network) object).isSimNetworkLocked())); break; case 7: holder.textView.setText("is Nfc Enabled:"); holder.desc.setText(String.valueOf(((Network) object).isNfcEnabled())); break; case 8: holder.textView.setText("is Nfc Present:"); holder.desc.setText(String.valueOf(((Network) object).isNfcPresent())); break; case 9: holder.textView.setText("is Wifi Enabled:"); holder.desc.setText(String.valueOf(((Network) object).isWifiEnabled())); break; case 10: holder.textView.setText("is Network Available:"); holder.desc.setText(String.valueOf(((Network) object).isNetworkAvailable())); break; case 11: holder.textView.setText("Network Class:"); holder.desc.setText(((Network) object).getNetworkClass()); break; case 12: holder.textView.setText("Network Type:"); holder.desc.setText(((Network) object).getNetworkType()); break; } } private void handleDeviceInfo(CustomViewHolder holder, int position) { switch (position) { case 0: holder.textView.setText("Manufacturer:"); holder.desc.setText(((Device) object).getManufacturer()); break; case 1: holder.textView.setText("Model:"); holder.desc.setText(((Device) object).getModel()); break; case 2: holder.textView.setText("Build VersionCode Name:"); holder.desc.setText(((Device) object).getBuildVersionCodeName()); break; case 3: holder.textView.setText("Release Build Version:"); holder.desc.setText(((Device) object).getReleaseBuildVersion()); break; case 4: holder.textView.setText("Product:"); holder.desc.setText(((Device) object).getProduct()); break; case 5: holder.textView.setText("Fingerprint:"); holder.desc.setText(((Device) object).getFingerprint()); break; case 6: holder.textView.setText("Hardware:"); holder.desc.setText(((Device) object).getHardware()); break; case 7: holder.textView.setText("Radio Version:"); holder.desc.setText(((Device) object).getRadioVersion()); break; case 8: holder.textView.setText("Device:"); holder.desc.setText(((Device) object).getDevice()); break; case 9: holder.textView.setText("Board:"); holder.desc.setText(((Device) object).getBoard()); break; case 10: holder.textView.setText("Display Version:"); holder.desc.setText(((Device) object).getDisplayVersion()); break; case 11: holder.textView.setText("Build Brand:"); holder.desc.setText(((Device) object).getBuildBrand()); break; case 12: holder.textView.setText("Build Host:"); holder.desc.setText(((Device) object).getBuildHost()); break; case 13: holder.textView.setText("Build Time:"); holder.desc.setText(String.valueOf(((Device) object).getBuildTime())); break; case 14: holder.textView.setText("Build User:"); holder.desc.setText(((Device) object).getBuildUser()); break; case 15: holder.textView.setText("Serial:"); holder.desc.setText(((Device) object).getSerial()); break; case 16: holder.textView.setText("OS Version:"); holder.desc.setText(((Device) object).getOsVersion()); break; case 17: holder.textView.setText("Language:"); holder.desc.setText(((Device) object).getLanguage()); break; case 18: holder.textView.setText("SDK Version:"); holder.desc.setText(String.valueOf(((Device) object).getSdkVersion())); break; case 19: holder.textView.setText("Screen Density:"); holder.desc.setText(((Device) object).getScreenDensity()); break; case 20: holder.textView.setText("Screen Height:"); holder.desc.setText(String.valueOf(((Device) object).getScreenHeight())); break; case 21: holder.textView.setText("Screen Width:"); holder.desc.setText(String.valueOf(((Device) object).getScreenWidth())); break; } } private float convertToGb(long valInBytes) { return Float.valueOf(String.format("%.2f", (float) valInBytes / (1024 * 1024 * 1024))); } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2016 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.parosproxy.paros.network; import static java.util.Arrays.asList; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.BDDMockito.given; import static org.mockito.Mockito.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import java.io.IOException; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.junit.jupiter.api.Test; import org.zaproxy.zap.network.HttpBodyTestUtils; import org.zaproxy.zap.network.HttpEncoding; /** Unit test for {@link HttpBody}. */ class HttpBodyUnitTest extends HttpBodyTestUtils { @Test void shouldHaveZeroLengthByDefault() { // Given HttpBody httpBody = new HttpBodyImpl(); // When int length = httpBody.length(); // Then assertThat(length, is(equalTo(0))); } @Test void shouldHaveEmptyByteContentByDefault() { // Given HttpBody httpBody = new HttpBodyImpl(); // When byte[] content = httpBody.getBytes(); // Then assertThat(content, is(not(nullValue()))); assertThat(content.length, is(equalTo(0))); } @Test void shouldHaveEmptyStringRepresentationByDefault() { // Given HttpBody httpBody = new HttpBodyImpl(); // When String stringRepresentation = httpBody.toString(); // Then assertThat(stringRepresentation, is(equalTo(""))); } @Test void shouldCreateBodyWithNullByteArray() { // Given HttpBody httpBody = new HttpBodyImpl((byte[]) null); // When / Then assertThat(httpBody.length(), is(equalTo(0))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(allZeroBytes())); assertThat(httpBody.getBytes().length, is(equalTo(0))); assertThat(httpBody.toString(), is(equalTo(""))); } @Test void shouldCreateBodyWithByteArray() { // Given HttpBody httpBody = new HttpBodyImpl(BODY_1_BYTES_DEFAULT_CHARSET); // When / Then assertThat(httpBody.length(), is(equalTo(BODY_1_BYTES_DEFAULT_CHARSET.length))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(BODY_1_BYTES_DEFAULT_CHARSET))); assertThat(httpBody.getBytes().length, is(equalTo(BODY_1_BYTES_DEFAULT_CHARSET.length))); assertThat(httpBody.toString(), is(equalTo(BODY_1_STRING_DEFAULT_CHARSET))); } @Test void shouldCreateBodyWithNullString() { // Given HttpBody httpBody = new HttpBodyImpl((String) null); // When / Then assertThat(httpBody.length(), is(equalTo(0))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(allZeroBytes())); assertThat(httpBody.getBytes().length, is(equalTo(0))); assertThat(httpBody.toString(), is(equalTo(""))); } @Test void shouldCreateBodyWithStringUsingDefaultCharset() { // Given HttpBody httpBody = new HttpBodyImpl(BODY_1_STRING); // When / Then assertThat(httpBody.length(), is(equalTo(BODY_1_BYTES_DEFAULT_CHARSET.length))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(BODY_1_BYTES_DEFAULT_CHARSET))); assertThat(httpBody.getBytes().length, is(equalTo(BODY_1_BYTES_DEFAULT_CHARSET.length))); assertThat(httpBody.toString(), is(equalTo(BODY_1_STRING_DEFAULT_CHARSET))); } @Test void shouldCreateBodyWithInitialCapacity() { // Given int initialCapacity = 1024; HttpBody httpBody = new HttpBodyImpl(initialCapacity); // When / Then assertThat(httpBody.length(), is(equalTo(initialCapacity))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(allZeroBytes())); assertThat(httpBody.getBytes().length, is(equalTo(initialCapacity))); assertThat(httpBody.toString(), is(equalTo(""))); } @Test void shouldCreateBodyWithZeroLengthIfInitialCapacityIsNegative() { // Given HttpBody httpBody = new HttpBodyImpl(-1); // When / Then assertThat(httpBody.length(), is(equalTo(0))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(allZeroBytes())); assertThat(httpBody.getBytes().length, is(equalTo(0))); assertThat(httpBody.toString(), is(equalTo(""))); } @Test void shouldLimitInitialCapacityTo128kBytes() { // Given HttpBody httpBody = new HttpBodyImpl(500000); // When / Then assertThat(httpBody.length(), is(equalTo(LIMIT_INITIAL_CAPACITY))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(allZeroBytes())); assertThat(httpBody.getBytes().length, is(equalTo(LIMIT_INITIAL_CAPACITY))); assertThat(httpBody.toString(), is(equalTo(""))); } @Test void shouldDetermineCharsetByDefault() { // Given HttpBody httpBody = new HttpBodyImpl(); // When boolean determineCharset = httpBody.isDetermineCharset(); // Then assertThat(determineCharset, is(equalTo(true))); } @Test void shouldSetDetermineCharset() { // Given HttpBody httpBody = new HttpBodyImpl(); // When httpBody.setDetermineCharset(false); // Then assertThat(httpBody.isDetermineCharset(), is(equalTo(false))); } @Test void shouldDetermineCharsetIfSetAndHasNoCharset() { // Given HttpBodyImpl httpBody = new HttpBodyImpl(); httpBody.setDetermineCharset(true); httpBody.setCharset(null); // When httpBody.setBody("X Y Z"); // Then assertThat(httpBody.isDetermineCharsetCalled(), is(equalTo(true))); } @Test void shouldNotDetermineCharsetIfNotSet() { // Given HttpBodyImpl httpBody = new HttpBodyImpl(); httpBody.setDetermineCharset(false); httpBody.setCharset(null); // When httpBody.setBody("X Y Z"); // Then assertThat(httpBody.isDetermineCharsetCalled(), is(equalTo(false))); } @Test void shouldHaveIso8859CharsetByDefault() { // Given HttpBody httpBody = new HttpBodyImpl(); // When String charset = httpBody.getCharset(); // Then assertThat(charset, is(equalTo(DEFAULT_CHARSET_NAME))); } @Test void shouldSetValidCharset() { // Given HttpBody httpBody = new HttpBodyImpl(); // When httpBody.setCharset(UTF_8_NAME); // Then assertThat(httpBody.getCharset(), is(equalTo(UTF_8_NAME))); } @Test void shouldResetCharsetWithNullCharset() { // Given HttpBody httpBody = new HttpBodyImpl(); httpBody.setCharset(UTF_8_NAME); // When httpBody.setCharset(null); // Then assertThat(httpBody.getCharset(), is(equalTo(DEFAULT_CHARSET_NAME))); } @Test void shouldResetCharsetWithEmptyCharset() { // Given HttpBody httpBody = new HttpBodyImpl(); httpBody.setCharset(UTF_8_NAME); // When httpBody.setCharset(""); // Then assertThat(httpBody.getCharset(), is(equalTo(DEFAULT_CHARSET_NAME))); } @Test void shouldIgnoreInvalidCharset() { // Given HttpBody httpBody = new HttpBodyImpl(); httpBody.setCharset(UTF_8_NAME); // When httpBody.setCharset("$_NotACharsetName"); // Then assertThat(httpBody.getCharset(), is(equalTo(UTF_8_NAME))); } @Test void shouldIgnoreUnsupportedCharset() { // Given HttpBody httpBody = new HttpBodyImpl(); httpBody.setCharset(UTF_8_NAME); // When httpBody.setCharset("UnsupportedCharset-12345"); // Then assertThat(httpBody.getCharset(), is(equalTo(UTF_8_NAME))); } @Test void shouldIgnoreAlreadySetCharset() { // Given HttpBody httpBody = new HttpBodyImpl(); httpBody.setCharset(UTF_8_NAME); // When httpBody.setCharset(UTF_8_NAME); // Then assertThat(httpBody.getCharset(), is(equalTo(UTF_8_NAME))); } @Test void shouldIgnoreNullBytesBodySet() { // Given HttpBody httpBody = new HttpBodyImpl("\0"); // When httpBody.setBody((byte[]) null); // Then assertThat(httpBody.length(), is(equalTo(1))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(new byte[] {0}))); assertThat(httpBody.getBytes().length, is(equalTo(1))); assertThat(httpBody.toString(), is(equalTo("\0"))); } @Test void shouldSetContentEncodings() { // Given HttpBody httpBody = new HttpBodyImpl(); List<HttpEncoding> encodings = Arrays.asList(mock(HttpEncoding.class)); // When httpBody.setContentEncodings(encodings); // Then assertThat(httpBody.getContentEncodings(), is(equalTo(encodings))); } @Test void shouldSetContentEncodingsAndCopyList() { // Given HttpBody httpBody = new HttpBodyImpl(); List<HttpEncoding> encodings = new ArrayList<>(); encodings.add(mock(HttpEncoding.class)); // When httpBody.setContentEncodings(encodings); encodings.add(mock(HttpEncoding.class)); // Then assertThat(httpBody.getContentEncodings(), is(not(equalTo(encodings)))); assertThat(httpBody.getContentEncodings(), hasSize(1)); } @Test void shouldSetContentEncodingsAndNotAllowModificationsToReturnedList() { // Given HttpBody httpBody = new HttpBodyImpl(); httpBody.setContentEncodings(Arrays.asList(mock(HttpEncoding.class))); // When / Then assertThrows( UnsupportedOperationException.class, () -> httpBody.getContentEncodings().add(mock(HttpEncoding.class))); } @Test void shouldSetEmptyContentEncodings() { // Given HttpBody httpBody = new HttpBodyImpl(); List<HttpEncoding> encodings = Collections.emptyList(); // When httpBody.setContentEncodings(encodings); // Then assertThat(httpBody.getContentEncodings(), is(equalTo(encodings))); } @Test void shouldResetContentEncodingErrorsWhenSettingContentEncodings() { // Given HttpBody httpBody = new HttpBodyImpl(); List<HttpEncoding> encodings = Collections.emptyList(); // When httpBody.setContentEncodings(encodings); // Then assertThat(httpBody.hasContentEncodingErrors(), is(equalTo(false))); } @Test void shouldToStringWithContentEncodingsSet() throws IOException { // Given HttpBody httpBody = new HttpBodyImpl(); HttpEncoding contentEncoding = mock(HttpEncoding.class); String bodyData = "ABC"; byte[] encodedContent = bytes(bodyData); given(contentEncoding.decode(any())).willReturn(encodedContent); // When httpBody.toString(); // force the creation of the "old" string representation httpBody.setContentEncodings(asList(contentEncoding)); // Then assertThat(httpBody.toString(), is(equalTo(bodyData))); } @Test void shouldThrowExceptionWhenSettingNullContentEncodings() { // Given HttpBody httpBody = new HttpBodyImpl(); List<HttpEncoding> encodings = null; // When / Then assertThrows(NullPointerException.class, () -> httpBody.setContentEncodings(encodings)); } @Test void shouldThrowExceptionWhenSettingANullContentEncoding() { // Given HttpBody httpBody = new HttpBodyImpl(); List<HttpEncoding> encodings = Arrays.asList(mock(HttpEncoding.class), null); // When / Then assertThrows(NullPointerException.class, () -> httpBody.setContentEncodings(encodings)); } @Test void shouldIgnoreNullStringBodySet() { // Given HttpBody httpBody = new HttpBodyImpl("\0"); // When httpBody.setBody((String) null); // Then assertThat(httpBody.length(), is(equalTo(1))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(new byte[] {0}))); assertThat(httpBody.getBytes().length, is(equalTo(1))); assertThat(httpBody.toString(), is(equalTo("\0"))); } @Test void shouldSetBytesBodyUsingDefaultCharset() { // Given HttpBody httpBody = new HttpBodyImpl(); // When httpBody.setBody(BODY_1_BYTES_DEFAULT_CHARSET); // Then assertThat(httpBody.length(), is(equalTo(BODY_1_BYTES_DEFAULT_CHARSET.length))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(BODY_1_BYTES_DEFAULT_CHARSET))); assertThat(httpBody.getBytes().length, is(equalTo(BODY_1_BYTES_DEFAULT_CHARSET.length))); assertThat(httpBody.toString(), is(equalTo(BODY_1_STRING_DEFAULT_CHARSET))); } @Test void shouldSetBytesBodyUsingDefaultCharsetAndNotContentEncode() throws IOException { // Given HttpBody httpBody = new HttpBodyImpl(); HttpEncoding encoding = mock(HttpEncoding.class); httpBody.setContentEncodings(Arrays.asList(encoding)); given(encoding.decode(BODY_1_BYTES_DEFAULT_CHARSET)) .willReturn(BODY_1_BYTES_DEFAULT_CHARSET); // When httpBody.setBody(BODY_1_BYTES_DEFAULT_CHARSET); // Then assertThat(httpBody.length(), is(equalTo(BODY_1_BYTES_DEFAULT_CHARSET.length))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(BODY_1_BYTES_DEFAULT_CHARSET))); assertThat(httpBody.getBytes().length, is(equalTo(BODY_1_BYTES_DEFAULT_CHARSET.length))); assertThat(httpBody.toString(), is(equalTo(BODY_1_STRING_DEFAULT_CHARSET))); verify(encoding).decode(BODY_1_BYTES_DEFAULT_CHARSET); verifyNoMoreInteractions(encoding); } @Test void shouldSetStringBodyUsingDefaultCharset() { // Given HttpBody httpBody = new HttpBodyImpl(); // When httpBody.setBody(BODY_1_STRING); // Then assertThat(httpBody.length(), is(equalTo(BODY_1_BYTES_DEFAULT_CHARSET.length))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(BODY_1_BYTES_DEFAULT_CHARSET))); assertThat(httpBody.getBytes().length, is(equalTo(BODY_1_BYTES_DEFAULT_CHARSET.length))); assertThat(httpBody.toString(), is(equalTo(BODY_1_STRING_DEFAULT_CHARSET))); } @Test void shouldSetStringBodyUsingDefaultCharsetAndContentEncode() throws IOException { // Given HttpBody httpBody = new HttpBodyImpl(); HttpEncoding encoding = mock(HttpEncoding.class); httpBody.setContentEncodings(Arrays.asList(encoding)); given(encoding.encode(BODY_1_BYTES_DEFAULT_CHARSET)) .willReturn(BODY_1_BYTES_DEFAULT_CHARSET); // When httpBody.setBody(BODY_1_STRING); // Then assertThat(httpBody.length(), is(equalTo(BODY_1_BYTES_DEFAULT_CHARSET.length))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(BODY_1_BYTES_DEFAULT_CHARSET))); assertThat(httpBody.getBytes().length, is(equalTo(BODY_1_BYTES_DEFAULT_CHARSET.length))); assertThat(httpBody.toString(), is(equalTo(BODY_1_STRING_DEFAULT_CHARSET))); verify(encoding).encode(BODY_1_BYTES_DEFAULT_CHARSET); verifyNoMoreInteractions(encoding); } @Test void shouldSetBytesBodyUsingCharsetSet() { // Given HttpBody httpBody = new HttpBodyImpl(); httpBody.setCharset(UTF_8_NAME); // When httpBody.setBody(BODY_1_BYTES_UTF_8); // Then assertThat(httpBody.length(), is(equalTo(BODY_1_BYTES_UTF_8.length))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(BODY_1_BYTES_UTF_8))); assertThat(httpBody.getBytes().length, is(equalTo(BODY_1_BYTES_UTF_8.length))); assertThat(httpBody.toString(), is(equalTo(BODY_1_STRING_UTF_8))); } @Test void shouldSetBytesBodyUsingCharsetSetAndNotContentEncode() throws IOException { // Given HttpBody httpBody = new HttpBodyImpl(); httpBody.setCharset(UTF_8_NAME); HttpEncoding encoding = mock(HttpEncoding.class); httpBody.setContentEncodings(Arrays.asList(encoding)); given(encoding.decode(BODY_1_BYTES_UTF_8)).willReturn(BODY_1_BYTES_UTF_8); // When httpBody.setBody(BODY_1_BYTES_UTF_8); // Then assertThat(httpBody.length(), is(equalTo(BODY_1_BYTES_UTF_8.length))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(BODY_1_BYTES_UTF_8))); assertThat(httpBody.getBytes().length, is(equalTo(BODY_1_BYTES_UTF_8.length))); assertThat(httpBody.toString(), is(equalTo(BODY_1_STRING_UTF_8))); verify(encoding).decode(BODY_1_BYTES_UTF_8); verifyNoMoreInteractions(encoding); } @Test void shouldSetStringBodyUsingCharsetSet() { // Given HttpBody httpBody = new HttpBodyImpl(); httpBody.setCharset(UTF_8_NAME); // When httpBody.setBody(BODY_1_STRING_UTF_8); // Then assertThat(httpBody.length(), is(equalTo(BODY_1_BYTES_UTF_8.length))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(BODY_1_BYTES_UTF_8))); assertThat(httpBody.getBytes().length, is(equalTo(BODY_1_BYTES_UTF_8.length))); assertThat(httpBody.toString(), is(equalTo(BODY_1_STRING_UTF_8))); } @Test void shouldSetStringBodyUsingCharsetSetAndContentEncode() throws IOException { // Given HttpBody httpBody = new HttpBodyImpl(); httpBody.setCharset(UTF_8_NAME); HttpEncoding encoding = mock(HttpEncoding.class); httpBody.setContentEncodings(Arrays.asList(encoding)); given(encoding.encode(BODY_1_BYTES_UTF_8)).willReturn(BODY_1_BYTES_UTF_8); // When httpBody.setBody(BODY_1_STRING_UTF_8); // Then assertThat(httpBody.length(), is(equalTo(BODY_1_BYTES_UTF_8.length))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(BODY_1_BYTES_UTF_8))); assertThat(httpBody.getBytes().length, is(equalTo(BODY_1_BYTES_UTF_8.length))); assertThat(httpBody.toString(), is(equalTo(BODY_1_STRING_UTF_8))); verify(encoding).encode(BODY_1_BYTES_UTF_8); verifyNoMoreInteractions(encoding); } @Test void shouldIgnoreNullBytesBodyAppended() { // Given HttpBody httpBody = new HttpBodyImpl("\0"); // When httpBody.append((byte[]) null); // Then assertThat(httpBody.length(), is(equalTo(1))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(new byte[] {0}))); assertThat(httpBody.getBytes().length, is(equalTo(1))); assertThat(httpBody.toString(), is(equalTo("\0"))); } @Test void shouldIgnoreNullStringBodyAppended() { // Given HttpBody httpBody = new HttpBodyImpl("\0"); // When httpBody.append((String) null); // Then assertThat(httpBody.length(), is(equalTo(1))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(new byte[] {0}))); assertThat(httpBody.getBytes().length, is(equalTo(1))); assertThat(httpBody.toString(), is(equalTo("\0"))); } @Test void shouldAppendBytesBodyUsingDefaultCharset() { // Given HttpBody httpBody = new HttpBodyImpl(BODY_1_STRING); // When httpBody.append(BODY_2_BYTES_DEFAULT_CHARSET); // Then assertThat(httpBody.length(), is(equalTo(BODY_1_AND_2_BYTES_DEFAULT_CHARSET.length))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(BODY_1_AND_2_BYTES_DEFAULT_CHARSET))); assertThat( httpBody.getBytes().length, is(equalTo(BODY_1_AND_2_BYTES_DEFAULT_CHARSET.length))); assertThat(httpBody.toString(), is(equalTo(BODY_1_AND_2_STRING_DEFAULT_CHARSET))); } @Test void shouldAppendBytesBodyUsingDefaultCharsetAndNotContentEncode() throws IOException { // Given HttpBody httpBody = new HttpBodyImpl(BODY_1_STRING); HttpEncoding encoding = mock(HttpEncoding.class); httpBody.setContentEncodings(Arrays.asList(encoding)); given(encoding.decode(BODY_1_AND_2_BYTES_DEFAULT_CHARSET)) .willReturn(BODY_1_AND_2_BYTES_DEFAULT_CHARSET); // When httpBody.append(BODY_2_BYTES_DEFAULT_CHARSET); // Then assertThat(httpBody.length(), is(equalTo(BODY_1_AND_2_BYTES_DEFAULT_CHARSET.length))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(BODY_1_AND_2_BYTES_DEFAULT_CHARSET))); assertThat( httpBody.getBytes().length, is(equalTo(BODY_1_AND_2_BYTES_DEFAULT_CHARSET.length))); assertThat(httpBody.toString(), is(equalTo(BODY_1_AND_2_STRING_DEFAULT_CHARSET))); verify(encoding).decode(BODY_1_AND_2_BYTES_DEFAULT_CHARSET); verifyNoMoreInteractions(encoding); } @Test void shouldAppendStringBodyUsingDefaultCharset() { // Given HttpBody httpBody = new HttpBodyImpl(BODY_1_STRING); // When httpBody.append(BODY_2_STRING); // Then assertThat(httpBody.length(), is(equalTo(BODY_1_AND_2_BYTES_DEFAULT_CHARSET.length))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(BODY_1_AND_2_BYTES_DEFAULT_CHARSET))); assertThat( httpBody.getBytes().length, is(equalTo(BODY_1_AND_2_BYTES_DEFAULT_CHARSET.length))); assertThat(httpBody.toString(), is(equalTo(BODY_1_AND_2_STRING_DEFAULT_CHARSET))); } @Test void shouldAppendStringBodyUsingDefaultCharsetAndContentEncode() throws IOException { // Given HttpBody httpBody = new HttpBodyImpl(BODY_1_STRING); HttpEncoding encoding = mock(HttpEncoding.class); given(encoding.decode(BODY_1_BYTES_DEFAULT_CHARSET)) .willReturn(BODY_1_BYTES_DEFAULT_CHARSET); given(encoding.encode(BODY_1_AND_2_BYTES_DEFAULT_CHARSET)) .willReturn(BODY_1_AND_2_BYTES_DEFAULT_CHARSET); given(encoding.decode(BODY_1_AND_2_BYTES_DEFAULT_CHARSET)) .willReturn(BODY_1_AND_2_BYTES_DEFAULT_CHARSET); httpBody.setContentEncodings(asList(encoding)); // When httpBody.append(BODY_2_STRING); // Then assertThat(httpBody.length(), is(equalTo(BODY_1_AND_2_BYTES_DEFAULT_CHARSET.length))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(BODY_1_AND_2_BYTES_DEFAULT_CHARSET))); assertThat( httpBody.getBytes().length, is(equalTo(BODY_1_AND_2_BYTES_DEFAULT_CHARSET.length))); assertThat(httpBody.toString(), is(equalTo(BODY_1_AND_2_STRING_DEFAULT_CHARSET))); verify(encoding).decode(BODY_1_BYTES_DEFAULT_CHARSET); verify(encoding).encode(BODY_1_AND_2_BYTES_DEFAULT_CHARSET); verify(encoding).decode(BODY_1_AND_2_BYTES_DEFAULT_CHARSET); verifyNoMoreInteractions(encoding); } @Test void shouldAppendBytesBodyUsingCharsetSet() { // Given HttpBody httpBody = new HttpBodyImpl(BODY_1_BYTES_UTF_8); httpBody.setCharset(UTF_8_NAME); // When httpBody.append(BODY_2_BYTES_UTF_8); // Then assertThat(httpBody.length(), is(equalTo(BODY_1_AND_2_BYTES_UTF_8.length))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(BODY_1_AND_2_BYTES_UTF_8))); assertThat(httpBody.getBytes().length, is(equalTo(BODY_1_AND_2_BYTES_UTF_8.length))); assertThat(httpBody.toString(), is(equalTo(BODY_1_AND_2_STRING_UTF_8))); } @Test void shouldAppendBytesBodyUsingCharsetSetAndNotContentEncode() throws IOException { // Given HttpBody httpBody = new HttpBodyImpl(BODY_1_BYTES_UTF_8); httpBody.setCharset(UTF_8_NAME); HttpEncoding encoding = mock(HttpEncoding.class); httpBody.setContentEncodings(asList(encoding)); given(encoding.decode(BODY_1_AND_2_BYTES_UTF_8)).willReturn(BODY_1_AND_2_BYTES_UTF_8); // When httpBody.append(BODY_2_BYTES_UTF_8); // Then assertThat(httpBody.length(), is(equalTo(BODY_1_AND_2_BYTES_UTF_8.length))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(BODY_1_AND_2_BYTES_UTF_8))); assertThat(httpBody.getBytes().length, is(equalTo(BODY_1_AND_2_BYTES_UTF_8.length))); assertThat(httpBody.toString(), is(equalTo(BODY_1_AND_2_STRING_UTF_8))); verify(encoding).decode(BODY_1_AND_2_BYTES_UTF_8); verifyNoMoreInteractions(encoding); } @Test void shouldAppendStringBodyUsingCharsetSet() { // Given HttpBody httpBody = new HttpBodyImpl(BODY_1_BYTES_UTF_8); httpBody.setCharset(UTF_8_NAME); // When httpBody.append(BODY_2_STRING_UTF_8); // Then assertThat(httpBody.length(), is(equalTo(BODY_1_AND_2_BYTES_UTF_8.length))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(BODY_1_AND_2_BYTES_UTF_8))); assertThat(httpBody.getBytes().length, is(equalTo(BODY_1_AND_2_BYTES_UTF_8.length))); assertThat(httpBody.toString(), is(equalTo(BODY_1_AND_2_STRING_UTF_8))); } @Test void shouldAppendStringBodyUsingCharsetSetAndContentEncode() throws IOException { // Given HttpBody httpBody = new HttpBodyImpl(BODY_1_BYTES_UTF_8); httpBody.setCharset(UTF_8_NAME); HttpEncoding encoding = mock(HttpEncoding.class); given(encoding.decode(BODY_1_BYTES_UTF_8)).willReturn(BODY_1_BYTES_UTF_8); given(encoding.encode(BODY_1_AND_2_BYTES_UTF_8)).willReturn(BODY_1_AND_2_BYTES_UTF_8); given(encoding.decode(BODY_1_AND_2_BYTES_UTF_8)).willReturn(BODY_1_AND_2_BYTES_UTF_8); httpBody.setContentEncodings(Arrays.asList(encoding)); // When httpBody.append(BODY_2_STRING_UTF_8); // Then assertThat(httpBody.length(), is(equalTo(BODY_1_AND_2_BYTES_UTF_8.length))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(BODY_1_AND_2_BYTES_UTF_8))); assertThat(httpBody.getBytes().length, is(equalTo(BODY_1_AND_2_BYTES_UTF_8.length))); assertThat(httpBody.toString(), is(equalTo(BODY_1_AND_2_STRING_UTF_8))); verify(encoding).decode(BODY_1_BYTES_UTF_8); verify(encoding).encode(BODY_1_AND_2_BYTES_UTF_8); verify(encoding).decode(BODY_1_AND_2_BYTES_UTF_8); verifyNoMoreInteractions(encoding); } @Test void shouldAppendFullByteArray() { // Given byte[] chunk = {0, 1, 2, 3, 4, 5}; HttpBody httpBody = new HttpBodyImpl(); // When httpBody.append(chunk, chunk.length); // Then assertThat(httpBody.length(), is(equalTo(chunk.length))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(chunk))); assertThat(httpBody.getBytes().length, is(equalTo(chunk.length))); assertThat(httpBody.toString(), is(equalTo("\0\1\2\3\4\5"))); } @Test void shouldAppendByteArrayChunk() { // Given byte[] bytes = {1, 2, 3, 4, 5}; int chunkLen = 3; byte[] chunk = java.util.Arrays.copyOf(bytes, chunkLen); HttpBody httpBody = new HttpBodyImpl(); // When httpBody.append(bytes, chunkLen); // Then assertThat(httpBody.length(), is(equalTo(chunk.length))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(chunk))); assertThat(httpBody.getBytes().length, is(equalTo(chunk.length))); assertThat(httpBody.toString(), is(equalTo("\1\2\3"))); } @Test void shouldAppendByteArrayToExistingData() { // Given byte[] bytes = {1, 2, 3, 4, 5, 6, 7}; byte[] chunk = Arrays.copyOfRange(bytes, 3, bytes.length); HttpBody httpBody = new HttpBodyImpl(Arrays.copyOf(bytes, bytes.length - chunk.length)); // When httpBody.append(chunk, chunk.length); // Then assertThat(httpBody.length(), is(equalTo(bytes.length))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(bytes))); assertThat(httpBody.getBytes().length, is(equalTo(bytes.length))); assertThat(httpBody.toString(), is(equalTo("\1\2\3\4\5\6\7"))); } @Test void shouldAppendByteArrayToBodyWithHigherInitialCapacity() { // Given int initialCapacity = 10; byte[] chunk = {1, 2, 3, 4, 5}; HttpBody httpBody = new HttpBodyImpl(initialCapacity); byte[] expectedBytes = Arrays.copyOf(chunk, initialCapacity); // When httpBody.append(chunk, chunk.length); // Then assertThat(httpBody.length(), is(equalTo(expectedBytes.length))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(expectedBytes))); assertThat(httpBody.getBytes().length, is(equalTo(expectedBytes.length))); assertThat(httpBody.toString(), is(equalTo("\1\2\3\4\5"))); } @Test void shouldAppendByteArrayToBodyWithLowerInitialCapacity() { // Given byte[] chunk = {1, 2, 3, 4, 5}; HttpBody httpBody = new HttpBodyImpl(3); // When httpBody.append(chunk, chunk.length); // Then assertThat(httpBody.length(), is(equalTo(chunk.length))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(chunk))); assertThat(httpBody.getBytes().length, is(equalTo(chunk.length))); assertThat(httpBody.toString(), is(equalTo("\1\2\3\4\5"))); } @Test void shouldIgnoreAppendOfNullByteArray() { // Given HttpBody httpBody = new HttpBodyImpl(); // When httpBody.append(null, 5); // Then assertThat(httpBody.length(), is(equalTo(0))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(allZeroBytes())); assertThat(httpBody.getBytes().length, is(equalTo(0))); assertThat(httpBody.toString(), is(equalTo(""))); } @Test void shouldIgnoreAppendOfByteArrayIfNegativeLength() { // Given byte[] chunk = {1, 2, 3, 4, 5}; HttpBody httpBody = new HttpBodyImpl(); // When httpBody.append(chunk, -1); // Then assertThat(httpBody.length(), is(equalTo(0))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(allZeroBytes())); assertThat(httpBody.getBytes().length, is(equalTo(0))); assertThat(httpBody.toString(), is(equalTo(""))); } @Test void shouldApplyCharsetSetToStringRepresentation() { // Given HttpBody httpBody = new HttpBodyImpl(); // When httpBody.setCharset(UTF_8_NAME); httpBody.toString(); // force the creation of the "old" string representation httpBody.setBody(BODY_1_BYTES_UTF_8); // Then assertThat(httpBody.toString(), is(equalTo(BODY_1_STRING_UTF_8))); } @Test void shouldExpandBodyWithSetLength() { // Given HttpBody httpBody = new HttpBodyImpl(); // When httpBody.setLength(50); // Then assertThat(httpBody.length(), is(equalTo(50))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(allZeroBytes())); assertThat(httpBody.getBytes().length, is(equalTo(50))); assertThat(httpBody.toString(), is(equalTo(""))); } @Test void shouldTruncateBodyWithSetLength() { // Given byte[] body = {1, 2, 3, 4, 5}; HttpBody httpBody = new HttpBodyImpl(body); byte[] expectedBytes = Arrays.copyOf(body, 3); // When httpBody.setLength(expectedBytes.length); // Then assertThat(httpBody.length(), is(equalTo(expectedBytes.length))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(expectedBytes))); assertThat(httpBody.getBytes().length, is(equalTo(expectedBytes.length))); assertThat(httpBody.toString(), is(equalTo("\1\2\3"))); } @Test void shouldProduceSameStringRepresentationEvenIfBodyIsExpandedWithSetLength() { // Given byte[] body = {1, 2, 3, 4, 5}; HttpBody httpBody = new HttpBodyImpl(body); byte[] expectedBytes = concatenate(body, new byte[] {0, 0}); // When httpBody.setLength(expectedBytes.length); // Then assertThat(httpBody.length(), is(equalTo(expectedBytes.length))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(equalTo(expectedBytes))); assertThat(httpBody.getBytes().length, is(equalTo(expectedBytes.length))); assertThat(httpBody.toString(), is(equalTo("\1\2\3\4\5"))); } @Test void shouldIgnoreNegativeLengthSet() { // Given HttpBody httpBody = new HttpBodyImpl(); // When httpBody.setLength(-1); // Then assertThat(httpBody.length(), is(equalTo(0))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(allZeroBytes())); assertThat(httpBody.getBytes().length, is(equalTo(0))); assertThat(httpBody.toString(), is(equalTo(""))); } @Test void shouldIgnoreSameLengthSet() { // Given HttpBody httpBody = new HttpBodyImpl(50); // When httpBody.setLength(50); // Then assertThat(httpBody.length(), is(equalTo(50))); assertThat(httpBody.getBytes(), is(not(nullValue()))); assertThat(httpBody.getBytes(), is(allZeroBytes())); assertThat(httpBody.getBytes().length, is(equalTo(50))); assertThat(httpBody.toString(), is(equalTo(""))); } @Test void shouldReturnSameInstanceStringRepresentationOnConsecutiveCalls() { // Given String body = " X Y Z "; HttpBody httpBody = new HttpBodyImpl(body); // When String string1 = httpBody.toString(); String string2 = httpBody.toString(); // Then assertThat(string1, is(equalTo(body))); assertThat(string2, is(sameInstance(string2))); } @Test void shouldGetContentDecoded() throws IOException { // Given String bodyData = "ABC"; HttpBody httpBody = new HttpBodyImpl(bodyData); HttpEncoding contentEncoding = mock(HttpEncoding.class); byte[] decodedContent = bytes(bodyData); given(contentEncoding.decode(any())).willReturn(decodedContent); httpBody.setContentEncodings(asList(contentEncoding)); // When byte[] content = httpBody.getContent(); // Then assertThat(content, is(sameInstance(decodedContent))); assertThat(httpBody.toString(), is(equalTo(bodyData))); } @Test void shouldGetSameDecodedContent() throws IOException { // Given HttpBody httpBody = new HttpBodyImpl(""); HttpEncoding contentEncoding = mock(HttpEncoding.class); given(contentEncoding.decode(any())).willReturn(bytes("ABC")); httpBody.setContentEncodings(asList(contentEncoding)); // When byte[] content = httpBody.getContent(); byte[] otherContent = httpBody.getContent(); // Then assertThat(content, is(sameInstance(otherContent))); } @Test void shouldGetContentSameAsBytesIfNoContentEncoding() { // Given String body = " X Y Z "; HttpBody httpBody = new HttpBodyImpl(body); // When byte[] content = httpBody.getContent(); // Then assertThat(content, is(sameInstance(httpBody.getBytes()))); } @Test void shouldSetContentAndEncode() throws IOException { // Given HttpBody httpBody = new HttpBodyImpl(); HttpEncoding contentEncoding = mock(HttpEncoding.class); byte[] encodedContent = bytes("ABC"); given(contentEncoding.encode(any())).willReturn(encodedContent); httpBody.setContentEncodings(asList(contentEncoding)); String bodyData = "CBA"; byte[] decodedContent = bytes(bodyData); // When httpBody.setContent(decodedContent); // Then assertThat(httpBody.getBytes(), is(equalTo(encodedContent))); assertThat(httpBody.getContent(), is(sameInstance(decodedContent))); assertThat(httpBody.toString(), is(equalTo(bodyData))); } @Test void shouldReturnToStringForContentSet() { // Given HttpBody httpBody = new HttpBodyImpl(); String bodyData = "ABC"; byte[] content = bytes(bodyData); // When httpBody.toString(); // force the creation of the "old" string representation httpBody.setContent(content); // Then assertThat(httpBody.getBytes(), is(equalTo(content))); assertThat(httpBody.toString(), is(equalTo(bodyData))); } @Test void shouldSetContentWithoutEncodingIfNoContentEncoding() { // Given HttpBody httpBody = new HttpBodyImpl(); String bodyData = "ABC"; byte[] content = bytes(bodyData); // When httpBody.setContent(content); // Then assertThat(httpBody.getBytes(), is(equalTo(content))); assertThat(httpBody.toString(), is(equalTo(bodyData))); } @Test void shouldNotSetContentIfNull() { // Given String bodyData = "ABC"; HttpBody httpBody = new HttpBodyImpl(bodyData); // When httpBody.setContent(null); // Then assertThat(httpBody.getBytes(), is(equalTo(bytes(bodyData)))); assertThat(httpBody.toString(), is(equalTo(bodyData))); } @Test void shouldHandleContentEncodingErrorsWhenDecoding() throws IOException { // Given HttpBody httpBody = new HttpBodyImpl(); HttpEncoding contentEncoding = mock(HttpEncoding.class); given(contentEncoding.decode(any())).willThrow(IOException.class); httpBody.setContentEncodings(asList(contentEncoding)); String bodyData = "CBA"; byte[] bodyBytes = bytes(bodyData); // When httpBody.setBody(bodyBytes); // Then assertThat(httpBody.getBytes(), is(equalTo(bodyBytes))); assertThat(httpBody.getContent(), is(equalTo(bodyBytes))); assertThat(httpBody.toString(), is(equalTo(bodyData))); assertThat(httpBody.hasContentEncodingErrors(), is(equalTo(true))); } @Test void shouldHandleContentEncodingErrorsWhenEncoding() throws IOException { // Given HttpBody httpBody = new HttpBodyImpl(); HttpEncoding contentEncoding = mock(HttpEncoding.class); given(contentEncoding.encode(any())).willThrow(IOException.class); httpBody.setContentEncodings(asList(contentEncoding)); String bodyData = "CBA"; byte[] bodyBytes = bytes(bodyData); // When httpBody.setBody(bodyData); // Then assertThat(httpBody.getBytes(), is(equalTo(bodyBytes))); assertThat(httpBody.getContent(), is(equalTo(bodyBytes))); assertThat(httpBody.toString(), is(equalTo(bodyData))); assertThat(httpBody.hasContentEncodingErrors(), is(equalTo(true))); } @Test void shouldNotBeEqualToNull() { // Given HttpBody httpBody = new HttpBodyImpl(); // When boolean equals = httpBody.equals(null); // Then assertThat(equals, is(equalTo(false))); } @Test void shouldBeEqualToEqualHttpBody() { // Given HttpBody httpBody = new HttpBodyImpl(); HttpBody otherHttpBody = new HttpBodyImpl(); // When / Then assertThat(httpBody, is(equalTo(otherHttpBody))); } @Test void shouldBeEqualToEqualEncodings() { // Given List<HttpEncoding> encodings = Arrays.asList(mock(HttpEncoding.class)); HttpBody httpBody = new HttpBodyImpl(); httpBody.setContentEncodings(encodings); HttpBody otherHttpBody = new HttpBodyImpl(); otherHttpBody.setContentEncodings(encodings); // When / Then assertThat(httpBody, is(equalTo(otherHttpBody))); } @Test void shouldBeEqualToEqualHttpBodyAndEncodings() { // Given List<HttpEncoding> encodings = Arrays.asList(mock(HttpEncoding.class)); HttpBody httpBody = new HttpBodyImpl("Body"); httpBody.setContentEncodings(encodings); HttpBody otherHttpBody = new HttpBodyImpl("Body"); otherHttpBody.setContentEncodings(encodings); // When / Then assertThat(httpBody, is(equalTo(otherHttpBody))); } @Test void shouldBeEqualToSameInstance() { // Given HttpBody httpBody = new HttpBodyImpl(); // When / Then assertThat(httpBody, is(equalTo(httpBody))); } @Test void shouldNotBeEqualToDifferentHttpBody() { // Given HttpBody httpBody = new HttpBodyImpl(); HttpBody otherDifferentHttpBody = new HttpBodyImpl("Different Contents"); // When / Then assertThat(httpBody, is(not(equalTo(otherDifferentHttpBody)))); } @Test void shouldNotBeEqualToDifferentEncodings() { // Given HttpBody httpBody = new HttpBodyImpl("Body"); httpBody.setContentEncodings(Arrays.asList(mock(HttpEncoding.class))); HttpBody otherDifferentHttpBody = new HttpBodyImpl("Body"); otherDifferentHttpBody.setContentEncodings(Arrays.asList(mock(HttpEncoding.class))); // When / Then assertThat(httpBody, is(not(equalTo(otherDifferentHttpBody)))); } @Test void shouldNotBeEqualToDifferentHttpBodyAndEncodings() { // Given HttpBody httpBody = new HttpBodyImpl("Body"); httpBody.setContentEncodings(Arrays.asList(mock(HttpEncoding.class))); HttpBody otherDifferentHttpBody = new HttpBodyImpl("Different Contents"); otherDifferentHttpBody.setContentEncodings(Arrays.asList(mock(HttpEncoding.class))); // When / Then assertThat(httpBody, is(not(equalTo(otherDifferentHttpBody)))); } @Test void shouldNotBeEqualToDifferentHttpBodyImplementation() { // Given HttpBody httpBody = new HttpBodyImpl(); HttpBody otherHttpBodyImplementation = new HttpBody() {}; // When / Then assertThat(httpBody, is(not(equalTo(otherHttpBodyImplementation)))); } @Test void shouldProduceSameHashCodeForEqualBody() { // Given HttpBody httpBody = new HttpBodyImpl("X A"); HttpBody otherHttpBody = new HttpBodyImpl("X A"); // When / Then assertThat(httpBody.hashCode(), is(equalTo(otherHttpBody.hashCode()))); } @Test void shouldProduceSameHashCodeForEqualBodyAndEncodings() { // Given List<HttpEncoding> encodings = Arrays.asList(mock(HttpEncoding.class)); HttpBody httpBody = new HttpBodyImpl("X A"); httpBody.setContentEncodings(encodings); HttpBody otherHttpBody = new HttpBodyImpl("X A"); otherHttpBody.setContentEncodings(encodings); // When / Then assertThat(httpBody.hashCode(), is(equalTo(otherHttpBody.hashCode()))); } @Test void shouldProduceDifferentHashCodeFromDifferentBody() { // Given HttpBody httpBody = new HttpBodyImpl("_ X A 1"); HttpBody otherHttpBody = new HttpBodyImpl("X A 2"); // When / Then assertThat(httpBody.hashCode(), is(not(equalTo(otherHttpBody.hashCode())))); } @Test void shouldProduceDifferentHashCodeFromDifferentEncodings() { // Given HttpBody httpBody = new HttpBodyImpl("X A"); httpBody.setContentEncodings(Arrays.asList(mock(HttpEncoding.class))); HttpBody otherHttpBody = new HttpBodyImpl("X A"); otherHttpBody.setContentEncodings(Arrays.asList(mock(HttpEncoding.class))); // When / Then assertThat(httpBody.hashCode(), is(not(equalTo(otherHttpBody.hashCode())))); } @Test void shouldProduceDifferentHashCodeFromDifferentBodyAndEncodings() { // Given HttpBody httpBody = new HttpBodyImpl("_ X A 1"); httpBody.setContentEncodings(Arrays.asList(mock(HttpEncoding.class))); HttpBody otherHttpBody = new HttpBodyImpl("X A 2"); otherHttpBody.setContentEncodings(Arrays.asList(mock(HttpEncoding.class))); // When / Then assertThat(httpBody.hashCode(), is(not(equalTo(otherHttpBody.hashCode())))); } private static byte[] bytes(String data) { return data.getBytes(StandardCharsets.US_ASCII); } private static class HttpBodyImpl extends HttpBody { private boolean determineCharsetCalled; HttpBodyImpl() {} HttpBodyImpl(int capacity) { super(capacity); } HttpBodyImpl(String data) { super(data); } HttpBodyImpl(byte[] data) { super(data); } @Override protected Charset determineCharset(String contents) { determineCharsetCalled = true; return super.determineCharset(contents); } boolean isDetermineCharsetCalled() { return determineCharsetCalled; } } }
/* * Copyright (c) 2016 Tim Malseed * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.simplecityapps.recyclerview_fastscroll.views; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.animation.ObjectAnimator; import android.content.Context; import android.content.res.Resources; import android.content.res.TypedArray; import android.graphics.Canvas; import android.graphics.Paint; import android.graphics.Point; import android.graphics.Rect; import android.graphics.Typeface; import android.support.annotation.ColorInt; import android.support.v4.view.animation.FastOutLinearInInterpolator; import android.support.v4.view.animation.LinearOutSlowInInterpolator; import android.support.v7.widget.RecyclerView; import android.util.AttributeSet; import android.view.MotionEvent; import android.view.ViewConfiguration; import com.simplecityapps.recyclerview_fastscroll.R; import com.simplecityapps.recyclerview_fastscroll.interfaces.OnFastScrollStateChangeListener; import com.simplecityapps.recyclerview_fastscroll.utils.Utils; public class FastScroller { private static final int DEFAULT_AUTO_HIDE_DELAY = 1500; private FastScrollRecyclerView mRecyclerView; private FastScrollPopup mPopup; private int mThumbHeight; private int mWidth; private Paint mThumb; private Paint mTrack; private Rect mTmpRect = new Rect(); private Rect mInvalidateRect = new Rect(); private Rect mInvalidateTmpRect = new Rect(); // The inset is the buffer around which a point will still register as a click on the scrollbar private int mTouchInset; // This is the offset from the top of the scrollbar when the user first starts touching. To // prevent jumping, this offset is applied as the user scrolls. private int mTouchOffset; public Point mThumbPosition = new Point(-1, -1); public Point mOffset = new Point(0, 0); private boolean mIsDragging; private Animator mAutoHideAnimator; boolean mAnimatingShow; private int mAutoHideDelay = DEFAULT_AUTO_HIDE_DELAY; private boolean mAutoHideEnabled = true; private final Runnable mHideRunnable; public FastScroller(Context context, FastScrollRecyclerView recyclerView, AttributeSet attrs) { Resources resources = context.getResources(); mRecyclerView = recyclerView; mPopup = new FastScrollPopup(resources, recyclerView); mThumbHeight = Utils.toPixels(resources, 48); mWidth = Utils.toPixels(resources, 8); mTouchInset = Utils.toPixels(resources, -24); mThumb = new Paint(Paint.ANTI_ALIAS_FLAG); mTrack = new Paint(Paint.ANTI_ALIAS_FLAG); TypedArray typedArray = context.getTheme().obtainStyledAttributes( attrs, R.styleable.FastScrollRecyclerView, 0, 0); try { mAutoHideEnabled = typedArray.getBoolean(R.styleable.FastScrollRecyclerView_fastScrollAutoHide, true); mAutoHideDelay = typedArray.getInteger(R.styleable.FastScrollRecyclerView_fastScrollAutoHideDelay, DEFAULT_AUTO_HIDE_DELAY); int trackColor = typedArray.getColor(R.styleable.FastScrollRecyclerView_fastScrollTrackColor, 0x1f000000); int thumbColor = typedArray.getColor(R.styleable.FastScrollRecyclerView_fastScrollThumbColor, 0xff000000); int popupBgColor = typedArray.getColor(R.styleable.FastScrollRecyclerView_fastScrollPopupBgColor, 0xff000000); int popupTextColor = typedArray.getColor(R.styleable.FastScrollRecyclerView_fastScrollPopupTextColor, 0xffffffff); int popupTextSize = typedArray.getDimensionPixelSize(R.styleable.FastScrollRecyclerView_fastScrollPopupTextSize, Utils.toScreenPixels(resources, 56)); int popupBackgroundSize = typedArray.getDimensionPixelSize(R.styleable.FastScrollRecyclerView_fastScrollPopupBackgroundSize, Utils.toPixels(resources, 88)); mTrack.setColor(trackColor); mThumb.setColor(thumbColor); mPopup.setBgColor(popupBgColor); mPopup.setTextColor(popupTextColor); mPopup.setTextSize(popupTextSize); mPopup.setBackgroundSize(popupBackgroundSize); } finally { typedArray.recycle(); } mHideRunnable = new Runnable() { @Override public void run() { if (!mIsDragging) { if (mAutoHideAnimator != null) { mAutoHideAnimator.cancel(); } mAutoHideAnimator = ObjectAnimator.ofInt(FastScroller.this, "offsetX", (Utils.isRtl(mRecyclerView.getResources()) ? -1 : 1) * mWidth); mAutoHideAnimator.setInterpolator(new FastOutLinearInInterpolator()); mAutoHideAnimator.setDuration(200); mAutoHideAnimator.start(); } } }; mRecyclerView.addOnScrollListener(new RecyclerView.OnScrollListener() { @Override public void onScrolled(RecyclerView recyclerView, int dx, int dy) { super.onScrolled(recyclerView, dx, dy); show(); } }); if (mAutoHideEnabled) { postAutoHideDelayed(); } } public int getThumbHeight() { return mThumbHeight; } public int getWidth() { return mWidth; } public boolean isDragging() { return mIsDragging; } /** * Handles the touch event and determines whether to show the fast scroller (or updates it if * it is already showing). */ public void handleTouchEvent(MotionEvent ev, int downX, int downY, int lastY, OnFastScrollStateChangeListener stateChangeListener) { ViewConfiguration config = ViewConfiguration.get(mRecyclerView.getContext()); int action = ev.getAction(); int y = (int) ev.getY(); switch (action) { case MotionEvent.ACTION_DOWN: if (isNearPoint(downX, downY)) { mTouchOffset = downY - mThumbPosition.y; } break; case MotionEvent.ACTION_MOVE: // Check if we should start scrolling if (!mIsDragging && isNearPoint(downX, downY) && Math.abs(y - downY) > config.getScaledTouchSlop()) { mRecyclerView.getParent().requestDisallowInterceptTouchEvent(true); mIsDragging = true; mTouchOffset += (lastY - downY); mPopup.animateVisibility(true); if (stateChangeListener != null) { stateChangeListener.onFastScrollStart(); } } if (mIsDragging) { // Update the fastscroller section name at this touch position int top = 0; int bottom = mRecyclerView.getHeight() - mThumbHeight; float boundedY = (float) Math.max(top, Math.min(bottom, y - mTouchOffset)); String sectionName = mRecyclerView.scrollToPositionAtProgress((boundedY - top) / (bottom - top)); mPopup.setSectionName(sectionName); mPopup.animateVisibility(!sectionName.isEmpty()); mRecyclerView.invalidate(mPopup.updateFastScrollerBounds(mRecyclerView, mThumbPosition.y)); } break; case MotionEvent.ACTION_UP: case MotionEvent.ACTION_CANCEL: mTouchOffset = 0; if (mIsDragging) { mIsDragging = false; mPopup.animateVisibility(false); if (stateChangeListener != null) { stateChangeListener.onFastScrollStop(); } } break; } } public void draw(Canvas canvas) { if (mThumbPosition.x < 0 || mThumbPosition.y < 0) { return; } //Background canvas.drawRect(mThumbPosition.x + mOffset.x, mThumbHeight / 2 + mOffset.y, mThumbPosition.x + mOffset.x + mWidth, mRecyclerView.getHeight() + mOffset.y - mThumbHeight / 2, mTrack); //Handle canvas.drawRect(mThumbPosition.x + mOffset.x, mThumbPosition.y + mOffset.y, mThumbPosition.x + mOffset.x + mWidth, mThumbPosition.y + mOffset.y + mThumbHeight, mThumb); //Popup mPopup.draw(canvas); } /** * Returns whether the specified points are near the scroll bar bounds. */ private boolean isNearPoint(int x, int y) { mTmpRect.set(mThumbPosition.x, mThumbPosition.y, mThumbPosition.x + mWidth, mThumbPosition.y + mThumbHeight); mTmpRect.inset(mTouchInset, mTouchInset); return mTmpRect.contains(x, y); } public void setThumbPosition(int x, int y) { if (mThumbPosition.x == x && mThumbPosition.y == y) { return; } // do not create new objects here, this is called quite often mInvalidateRect.set(mThumbPosition.x + mOffset.x, mOffset.y, mThumbPosition.x + mOffset.x + mWidth, mRecyclerView.getHeight() + mOffset.y); mThumbPosition.set(x, y); mInvalidateTmpRect.set(mThumbPosition.x + mOffset.x, mOffset.y, mThumbPosition.x + mOffset.x + mWidth, mRecyclerView.getHeight() + mOffset.y); mInvalidateRect.union(mInvalidateTmpRect); mRecyclerView.invalidate(mInvalidateRect); } public void setOffset(int x, int y) { if (mOffset.x == x && mOffset.y == y) { return; } // do not create new objects here, this is called quite often mInvalidateRect.set(mThumbPosition.x + mOffset.x, mOffset.y, mThumbPosition.x + mOffset.x + mWidth, mRecyclerView.getHeight() + mOffset.y); mOffset.set(x, y); mInvalidateTmpRect.set(mThumbPosition.x + mOffset.x, mOffset.y, mThumbPosition.x + mOffset.x + mWidth, mRecyclerView.getHeight() + mOffset.y); mInvalidateRect.union(mInvalidateTmpRect); mRecyclerView.invalidate(mInvalidateRect); } // Setter/getter for the popup alpha for animations public void setOffsetX(int x) { setOffset(x, mOffset.y); } public int getOffsetX() { return mOffset.x; } public void show() { if (!mAnimatingShow) { if (mAutoHideAnimator != null) { mAutoHideAnimator.cancel(); } mAutoHideAnimator = ObjectAnimator.ofInt(this, "offsetX", 0); mAutoHideAnimator.setInterpolator(new LinearOutSlowInInterpolator()); mAutoHideAnimator.setDuration(150); mAutoHideAnimator.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationCancel(Animator animation) { super.onAnimationCancel(animation); mAnimatingShow = false; } @Override public void onAnimationEnd(Animator animation) { super.onAnimationEnd(animation); mAnimatingShow = false; } }); mAnimatingShow = true; mAutoHideAnimator.start(); } if (mAutoHideEnabled) { postAutoHideDelayed(); } else { cancelAutoHide(); } } protected void postAutoHideDelayed() { if (mRecyclerView != null) { cancelAutoHide(); mRecyclerView.postDelayed(mHideRunnable, mAutoHideDelay); } } protected void cancelAutoHide() { if (mRecyclerView != null) { mRecyclerView.removeCallbacks(mHideRunnable); } } public void setThumbColor(@ColorInt int color) { mThumb.setColor(color); mRecyclerView.invalidate(mInvalidateRect); } public void setTrackColor(@ColorInt int color) { mTrack.setColor(color); mRecyclerView.invalidate(mInvalidateRect); } public void setPopupBgColor(@ColorInt int color) { mPopup.setBgColor(color); } public void setPopupTextColor(@ColorInt int color) { mPopup.setTextColor(color); } public void setPopupTypeface(Typeface typeface) { mPopup.setTypeface(typeface); } public void setPopupTextSize(int size) { mPopup.setTextSize(size); } public void setAutoHideDelay(int hideDelay) { mAutoHideDelay = hideDelay; if (mAutoHideEnabled) { postAutoHideDelayed(); } } public void setAutoHideEnabled(boolean autoHideEnabled) { mAutoHideEnabled = autoHideEnabled; if (autoHideEnabled) { postAutoHideDelayed(); } else { cancelAutoHide(); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.lucene.misc; import org.apache.lucene.search.DefaultSimilarity; import org.apache.lucene.search.Similarity; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.index.FieldInvertState; /** * Test of the SweetSpotSimilarity */ public class TestSweetSpotSimilarity extends LuceneTestCase { public void testSweetSpotComputeNorm() { SweetSpotSimilarity ss = new SweetSpotSimilarity(); ss.setLengthNormFactors(1,1,0.5f); Similarity d = new DefaultSimilarity(); Similarity s = ss; // base case, should degrade final FieldInvertState invertState = new FieldInvertState(); invertState.setBoost(1.0f); for (int i = 1; i < 1000; i++) { invertState.setLength(i); assertEquals("base case: i="+i, d.computeNorm("foo", invertState), s.computeNorm("foo", invertState), 0.0f); } // make a sweet spot ss.setLengthNormFactors(3,10,0.5f); for (int i = 3; i <=10; i++) { invertState.setLength(i); assertEquals("3,10: spot i="+i, 1.0f, s.computeNorm("foo", invertState), 0.0f); } for (int i = 10; i < 1000; i++) { invertState.setLength(i-9); final float normD = d.computeNorm("foo", invertState); invertState.setLength(i); final float normS = s.computeNorm("foo", invertState); assertEquals("3,10: 10<x : i="+i, normD, normS, 0.0f); } // seperate sweet spot for certain fields ss.setLengthNormFactors("bar",8,13, 0.5f, false); ss.setLengthNormFactors("yak",6,9, 0.5f, false); for (int i = 3; i <=10; i++) { invertState.setLength(i); assertEquals("f: 3,10: spot i="+i, 1.0f, s.computeNorm("foo", invertState), 0.0f); } for (int i = 10; i < 1000; i++) { invertState.setLength(i-9); final float normD = d.computeNorm("foo", invertState); invertState.setLength(i); final float normS = s.computeNorm("foo", invertState); assertEquals("f: 3,10: 10<x : i="+i, normD, normS, 0.0f); } for (int i = 8; i <=13; i++) { invertState.setLength(i); assertEquals("f: 8,13: spot i="+i, 1.0f, s.computeNorm("bar", invertState), 0.0f); } for (int i = 6; i <=9; i++) { invertState.setLength(i); assertEquals("f: 6,9: spot i="+i, 1.0f, s.computeNorm("yak", invertState), 0.0f); } for (int i = 13; i < 1000; i++) { invertState.setLength(i-12); final float normD = d.computeNorm("foo", invertState); invertState.setLength(i); final float normS = s.computeNorm("bar", invertState); assertEquals("f: 8,13: 13<x : i="+i, normD, normS, 0.0f); } for (int i = 9; i < 1000; i++) { invertState.setLength(i-8); final float normD = d.computeNorm("foo", invertState); invertState.setLength(i); final float normS = s.computeNorm("yak", invertState); assertEquals("f: 6,9: 9<x : i="+i, normD, normS, 0.0f); } // steepness ss.setLengthNormFactors("a",5,8,0.5f, false); ss.setLengthNormFactors("b",5,8,0.1f, false); for (int i = 9; i < 1000; i++) { invertState.setLength(i); final float normSS = ss.computeNorm("a", invertState); final float normS = s.computeNorm("b", invertState); assertTrue("s: i="+i+" : a="+normSS+ " < b="+normS, normSS < normS); } } public void testSweetSpotTf() { SweetSpotSimilarity ss = new SweetSpotSimilarity(); Similarity d = new DefaultSimilarity(); Similarity s = ss; // tf equal ss.setBaselineTfFactors(0.0f, 0.0f); for (int i = 1; i < 1000; i++) { assertEquals("tf: i="+i, d.tf(i), s.tf(i), 0.0f); } // tf higher ss.setBaselineTfFactors(1.0f, 0.0f); for (int i = 1; i < 1000; i++) { assertTrue("tf: i="+i+" : d="+d.tf(i)+ " < s="+s.tf(i), d.tf(i) < s.tf(i)); } // tf flat ss.setBaselineTfFactors(1.0f, 6.0f); for (int i = 1; i <=6; i++) { assertEquals("tf flat1: i="+i, 1.0f, s.tf(i), 0.0f); } ss.setBaselineTfFactors(2.0f, 6.0f); for (int i = 1; i <=6; i++) { assertEquals("tf flat2: i="+i, 2.0f, s.tf(i), 0.0f); } for (int i = 6; i <=1000; i++) { assertTrue("tf: i="+i+" : s="+s.tf(i)+ " < d="+d.tf(i), s.tf(i) < d.tf(i)); } // stupidity assertEquals("tf zero", 0.0f, s.tf(0), 0.0f); } public void testHyperbolicSweetSpot() { SweetSpotSimilarity ss = new SweetSpotSimilarity() { @Override public float tf(int freq) { return hyperbolicTf(freq); } }; ss.setHyperbolicTfFactors(3.3f, 7.7f, Math.E, 5.0f); Similarity s = ss; for (int i = 1; i <=1000; i++) { assertTrue("MIN tf: i="+i+" : s="+s.tf(i), 3.3f <= s.tf(i)); assertTrue("MAX tf: i="+i+" : s="+s.tf(i), s.tf(i) <= 7.7f); } assertEquals("MID tf", 3.3f+(7.7f - 3.3f)/2.0f, s.tf(5), 0.00001f); // stupidity assertEquals("tf zero", 0.0f, s.tf(0), 0.0f); } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.apple; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.emptyString; import static org.hamcrest.Matchers.in; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assume.assumeTrue; import com.dd.plist.NSDictionary; import com.dd.plist.NSNumber; import com.dd.plist.NSString; import com.dd.plist.PropertyListParser; import com.facebook.buck.apple.toolchain.ApplePlatform; import com.facebook.buck.core.model.BuildTarget; import com.facebook.buck.core.model.BuildTargetFactory; import com.facebook.buck.core.model.Flavor; import com.facebook.buck.core.model.InternalFlavor; import com.facebook.buck.core.model.impl.BuildTargetPaths; import com.facebook.buck.cxx.toolchain.LinkerMapMode; import com.facebook.buck.cxx.toolchain.StripStyle; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.io.filesystem.TestProjectFilesystems; import com.facebook.buck.testutil.ProcessResult; import com.facebook.buck.testutil.TemporaryPaths; import com.facebook.buck.testutil.TestConsole; import com.facebook.buck.testutil.integration.BuckBuildLog; import com.facebook.buck.testutil.integration.FakeAppleDeveloperEnvironment; import com.facebook.buck.testutil.integration.ProjectWorkspace; import com.facebook.buck.testutil.integration.TestDataHelper; import com.facebook.buck.util.DefaultProcessExecutor; import com.facebook.buck.util.ProcessExecutor; import com.facebook.buck.util.environment.Platform; import com.google.common.base.Charsets; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import java.io.IOException; import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.nio.file.Path; import java.nio.file.Paths; import org.hamcrest.Matchers; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; public class AppleBundleIntegrationTest { @Rule public TemporaryPaths tmp = new TemporaryPaths(); @Rule public ExpectedException thrown = ExpectedException.none(); private ProjectFilesystem filesystem; @Before public void setUp() { filesystem = TestProjectFilesystems.createProjectFilesystem(tmp.getRoot()); assumeTrue(Platform.detect() == Platform.MACOS); assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX)); } private boolean checkCodeSigning(Path absoluteBundlePath) throws IOException, InterruptedException { if (!Files.exists(absoluteBundlePath)) { throw new NoSuchFileException(absoluteBundlePath.toString()); } return CodeSigning.hasValidSignature( new DefaultProcessExecutor(new TestConsole()), absoluteBundlePath); } private ProjectWorkspace runApplicationBundleTestWithScenarioAndBuildTarget( String scenario, String fqtn) throws IOException, InterruptedException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(this, scenario, tmp); workspace.setUp(); BuildTarget target = workspace.newBuildTarget(fqtn); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); workspace.verify( Paths.get("DemoApp_output.expected"), BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s")); Path appPath = workspace.getPath( BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(target.getShortName() + ".app")); assertTrue(Files.exists(appPath.resolve(target.getShortName()))); assertTrue(checkCodeSigning(appPath)); // Non-Swift target shouldn't include Frameworks/ assertFalse(Files.exists(appPath.resolve("Frameworks"))); return workspace; } private void runSimpleApplicationBundleTestWithBuildTarget(String fqtn) throws IOException, InterruptedException { runApplicationBundleTestWithScenarioAndBuildTarget("simple_application_bundle_no_debug", fqtn); } @Test public void testDisablingFatBinaryCaching() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "simple_application_bundle_no_debug", tmp); workspace.setUp(); String bundleTarget = "//:DemoApp#iphonesimulator-x86_64,iphonesimulator-i386,no-debug,no-include-frameworks"; String binaryTarget = "//:DemoAppBinary#iphonesimulator-x86_64,iphonesimulator-i386,strip-non-global"; workspace.enableDirCache(); workspace.runBuckBuild("-c", "cxx.cache_links=false", bundleTarget).assertSuccess(); workspace.runBuckCommand("clean", "--keep-cache"); workspace.runBuckBuild("-c", "cxx.cache_links=false", bundleTarget).assertSuccess(); workspace.getBuildLog().assertTargetBuiltLocally(binaryTarget); } @Test public void testDisablingBundleCaching() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "simple_application_bundle_no_debug", tmp); workspace.setUp(); String target = "//:DemoApp#iphonesimulator-x86_64,no-debug,no-include-frameworks"; workspace.enableDirCache(); workspace.runBuckBuild("-c", "apple.cache_bundles_and_packages=false", target).assertSuccess(); workspace.runBuckCommand("clean", "--keep-cache"); workspace.runBuckBuild("-c", "apple.cache_bundles_and_packages=false", target).assertSuccess(); workspace.getBuildLog().assertTargetBuiltLocally(target); } @Test public void simpleApplicationBundle() throws IOException, InterruptedException { runSimpleApplicationBundleTestWithBuildTarget("//:DemoApp#iphonesimulator-x86_64,no-debug"); } @Test public void simpleApplicationBundleWithLinkerMapDoesNotAffectOutput() throws IOException, InterruptedException { runSimpleApplicationBundleTestWithBuildTarget("//:DemoApp#iphonesimulator-x86_64,no-debug"); } @Test public void simpleApplicationBundleWithoutLinkerMapDoesNotAffectOutput() throws IOException, InterruptedException { runSimpleApplicationBundleTestWithBuildTarget( "//:DemoApp#iphonesimulator-x86_64,no-debug,no-linkermap"); } @Test public void applicationBundleWithDefaultPlatform() throws IOException, InterruptedException { runApplicationBundleTestWithScenarioAndBuildTarget( "default_platform_in_rules", "//:DemoApp#no-debug"); } @Test public void applicationBundleWithDefaultPlatformAndFlavor() throws IOException, InterruptedException { runApplicationBundleTestWithScenarioAndBuildTarget( "default_platform_in_rules", "//:DemoApp#iphonesimulator-i386,no-debug"); } @Test public void applicationBundleFatBinaryWithDefaultPlatform() throws IOException, InterruptedException { runApplicationBundleTestWithScenarioAndBuildTarget( "default_platform_in_rules", "//:DemoApp#iphonesimulator-x86_64,iphonesimulator-i386,no-debug"); } @Test public void applicationBundleWithDefaultPlatformIgnoresConfigOverride() throws IOException, InterruptedException { ProjectWorkspace workspace = runApplicationBundleTestWithScenarioAndBuildTarget( "default_platform_in_rules", "//:DemoApp#no-debug"); BuildTarget target = workspace.newBuildTarget("//:DemoApp#no-debug"); workspace .runBuckCommand( "build", target.getFullyQualifiedName(), "--config", "cxx.default_platform=doesnotexist") .assertSuccess(); } @Test public void simpleApplicationBundleWithCodeSigning() throws Exception { assumeTrue(FakeAppleDeveloperEnvironment.supportsCodeSigning()); ProjectWorkspace workspace = runApplicationBundleTestWithScenarioAndBuildTarget( "simple_application_bundle_with_codesigning", "//:DemoApp#iphoneos-arm64,no-debug"); // Do not match iOS profiles on tvOS targets. BuildTarget target = workspace.newBuildTarget("//:DemoApp#appletvos-arm64,no-debug"); ProcessResult result = workspace.runBuckCommand("build", target.getFullyQualifiedName()); result.assertFailure(); assertTrue(result.getStderr().contains("No valid non-expired provisioning profiles match")); // Match tvOS profile. workspace.addBuckConfigLocalOption( "apple", "provisioning_profile_search_path", "provisioning_profiles_tvos"); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); } @Test public void simpleApplicationBundleWithCodeSigningResources() throws Exception { assumeTrue(FakeAppleDeveloperEnvironment.supportsCodeSigning()); ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "simple_application_bundle_with_codesigning", tmp); workspace.setUp(); BuildTarget target = workspace.newBuildTarget("//:DemoAppWithAppleResource#iphoneos-arm64,no-debug"); ProcessResult result = workspace.runBuckCommand("build", target.getFullyQualifiedName()); result.assertSuccess(); Path appPath = workspace.getPath( BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve("DemoAppWithAppleResource.app")); Path codesignedResourcePath = appPath.resolve("BinaryToBeCodesigned"); assertTrue(Files.exists(codesignedResourcePath)); assertTrue(checkCodeSigning(codesignedResourcePath)); Path nonCodesignedResourcePath = appPath.resolve("OtherBinary"); assertTrue(Files.exists(nonCodesignedResourcePath)); assertFalse(checkCodeSigning(nonCodesignedResourcePath)); } @Test public void simpleApplicationBundleWithTargetCodeSigning() throws Exception { assertTargetCodesignToolIsUsedFor("//:DemoApp#iphoneos-arm64,no-debug"); } @Test public void simpleFatApplicationBundleWithTargetCodeSigning() throws Exception { assertTargetCodesignToolIsUsedFor("//:DemoApp#iphoneos-arm64,iphoneos-armv7,no-debug"); } private void assertTargetCodesignToolIsUsedFor(String fullyQualifiedName) throws Exception { assumeTrue(FakeAppleDeveloperEnvironment.supportsCodeSigning()); ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "simple_application_bundle_with_target_codesigning", tmp); workspace.setUp(); BuildTarget target = workspace.newBuildTarget(fullyQualifiedName); ProcessResult buildResult = workspace.runBuckCommand("build", target.getFullyQualifiedName()); // custom codesign tool exits with non-zero error code and prints a message to the stderr, so // that its use can be detected assertThat(buildResult.getStderr(), containsString("codesign was here")); } private NSDictionary verifyAndParsePlist(Path path) throws Exception { assertTrue(Files.exists(path)); String resultContents = filesystem.readFileIfItExists(path).get(); return (NSDictionary) PropertyListParser.parse(resultContents.getBytes(Charsets.UTF_8)); } @Test public void simpleApplicationBundleWithDryRunCodeSigning() throws Exception { assumeTrue(FakeAppleDeveloperEnvironment.supportsCodeSigning()); ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "simple_application_bundle_with_codesigning", tmp); workspace.setUp(); workspace.addBuckConfigLocalOption("apple", "dry_run_code_signing", "true"); BuildTarget target = workspace.newBuildTarget("//:DemoAppWithFramework#iphoneos-arm64,no-debug"); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); Path appPath = workspace.getPath( BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(target.getShortName() + ".app")); Path codeSignResultsPath = appPath.resolve("BUCK_code_sign_entitlements.plist"); assertTrue(Files.exists(codeSignResultsPath)); NSDictionary resultPlist = verifyAndParsePlist(appPath.resolve("BUCK_pp_dry_run.plist")); assertEquals(new NSString("com.example.DemoApp"), resultPlist.get("bundle-id")); assertEquals(new NSString("12345ABCDE"), resultPlist.get("team-identifier")); assertEquals( new NSString("00000000-0000-0000-0000-000000000000"), resultPlist.get("provisioning-profile-uuid")); // Codesigning main bundle resultPlist = verifyAndParsePlist(appPath.resolve("BUCK_code_sign_args.plist")); assertEquals(new NSNumber(true), resultPlist.get("use-entitlements")); // Codesigning embedded framework bundle resultPlist = verifyAndParsePlist( appPath.resolve("Frameworks/DemoFramework.framework/BUCK_code_sign_args.plist")); assertEquals(new NSNumber(false), resultPlist.get("use-entitlements")); } @Test public void simpleApplicationBundleWithEmbeddedFrameworks() throws Exception { assumeTrue(FakeAppleDeveloperEnvironment.supportsCodeSigning()); ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "simple_application_bundle_with_codesigning", tmp); workspace.setUp(); BuildTarget appTarget = workspace.newBuildTarget( "//:DemoAppWithFramework#iphoneos-arm64,no-debug,include-frameworks"); workspace.runBuckCommand("build", appTarget.getFullyQualifiedName()).assertSuccess(); workspace.verify( Paths.get("DemoAppWithFramework_output.expected"), BuildTargetPaths.getGenPath(filesystem, appTarget, "%s")); Path appPath = workspace.getPath( BuildTargetPaths.getGenPath(filesystem, appTarget, "%s") .resolve(appTarget.getShortName() + ".app")); assertTrue(Files.exists(appPath.resolve(appTarget.getShortName()))); assertTrue(checkCodeSigning(appPath)); BuildTarget frameworkTarget = workspace.newBuildTarget("//:DemoFramework#iphoneos-arm64,no-debug,no-include-frameworks"); Path frameworkPath = workspace.getPath( BuildTargetPaths.getGenPath(filesystem, frameworkTarget, "%s") .resolve(frameworkTarget.getShortName() + ".framework")); assertFalse(checkCodeSigning(frameworkPath)); Path embeddedFrameworkPath = appPath.resolve(Paths.get("Frameworks/DemoFramework.framework")); assertTrue(Files.exists(embeddedFrameworkPath.resolve(frameworkTarget.getShortName()))); assertTrue(checkCodeSigning(embeddedFrameworkPath)); } // Specifying entitlments file via apple_binary entitlements_file @Test public void simpleApplicationBundleWithCodeSigningAndEntitlements() throws IOException, InterruptedException { assumeTrue(FakeAppleDeveloperEnvironment.supportsCodeSigning()); ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "simple_application_bundle_with_codesigning_and_entitlements", tmp); workspace.setUp(); BuildTarget target = BuildTargetFactory.newInstance("//:DemoApp#iphoneos-arm64,no-debug"); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); workspace.verify( Paths.get("DemoApp_output.expected"), BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s")); workspace.assertFilesEqual( Paths.get("DemoApp.xcent.expected"), BuildTargetPaths.getScratchPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s.xcent")); Path appPath = workspace.getPath( BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(target.getShortName() + ".app")); assertTrue(Files.exists(appPath.resolve(target.getShortName()))); assertTrue(checkCodeSigning(appPath)); } // Legacy method -- specifying entitlments file via info_plist_substitutions @Test public void simpleApplicationBundleWithCodeSigningAndEntitlementsUsingInfoPlistSubstitutions() throws IOException, InterruptedException { assumeTrue(FakeAppleDeveloperEnvironment.supportsCodeSigning()); ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "simple_application_bundle_with_codesigning_and_entitlements", tmp); workspace.setUp(); BuildTarget target = BuildTargetFactory.newInstance( "//:DemoAppUsingInfoPlistSubstitutions#iphoneos-arm64,no-debug"); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); workspace.assertFilesEqual( Paths.get("DemoApp.xcent.expected"), BuildTargetPaths.getScratchPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s.xcent")); Path appPath = workspace.getPath( BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(target.getShortName() + ".app")); assertTrue(Files.exists(appPath.resolve(target.getShortName()))); assertTrue(checkCodeSigning(appPath)); } @Test public void macOsApplicationBundleWithCodeSigningAndEntitlements() throws IOException, InterruptedException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "macos_application_bundle_with_codesigning_and_entitlements", tmp); workspace.setUp(); Path outputPath = workspace.buildAndReturnOutput( "//:App#macosx-x86_64", "--config", "apple.use_entitlements_when_adhoc_code_signing=true"); assertTrue( CodeSigning.hasEntitlement( new DefaultProcessExecutor(new TestConsole()), outputPath, "com.apple.security.device.camera")); assertTrue(checkCodeSigning(outputPath)); } @Test public void simpleApplicationBundleWithFatBinary() throws IOException, InterruptedException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "simple_fat_application_bundle_no_debug", tmp); workspace.setUp(); BuildTarget target = workspace.newBuildTarget("//:DemoApp#iphonesimulator-i386,iphonesimulator-x86_64,no-debug"); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); workspace.verify( Paths.get("DemoApp_output.expected"), BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s")); Path appPath = workspace.getPath( BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(target.getShortName() + ".app")); Path outputFile = appPath.resolve(target.getShortName()); assertTrue(Files.exists(outputFile)); ProcessExecutor.Result result = workspace.runCommand("lipo", outputFile.toString(), "-verify_arch", "i386", "x86_64"); assertEquals(0, result.getExitCode()); } @Test public void bundleHasOutputPath() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "simple_application_bundle_no_debug", tmp); workspace.setUp(); BuildTarget target = BuildTargetFactory.newInstance("//:DemoApp#no-debug"); ProcessResult result = workspace.runBuckCommand("targets", "--show-output", target.getFullyQualifiedName()); result.assertSuccess(); Path appPath = BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(target.getShortName() + ".app"); assertEquals( String.format("%s %s", target.getFullyQualifiedName(), appPath), result.getStdout().trim()); } @Test public void extensionBundle() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(this, "simple_extension", tmp); workspace.setUp(); BuildTarget target = BuildTargetFactory.newInstance("//:DemoExtension#no-debug"); ProcessResult result = workspace.runBuckCommand("targets", "--show-output", target.getFullyQualifiedName()); result.assertSuccess(); Path extensionPath = BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(target.getShortName() + ".appex"); assertEquals( String.format("%s %s", target.getFullyQualifiedName(), extensionPath), result.getStdout().trim()); result = workspace.runBuckCommand("build", target.getFullyQualifiedName()); result.assertSuccess(); Path outputBinary = workspace.getPath(extensionPath.resolve(target.getShortName())); assertTrue( String.format( "Extension binary could not be found inside the appex dir [%s].", outputBinary), Files.exists(outputBinary)); } @Test public void appBundleWithExtensionBundleDependency() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(this, "simple_app_with_extension", tmp); workspace.setUp(); BuildTarget target = BuildTargetFactory.newInstance("//:DemoAppWithExtension#no-debug"); ProcessResult result = workspace.runBuckCommand("targets", "--show-output", target.getFullyQualifiedName()); result.assertSuccess(); Path appPath = BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(target.getShortName() + ".app"); assertEquals( String.format("%s %s", target.getFullyQualifiedName(), appPath), result.getStdout().trim()); result = workspace.runBuckCommand("build", target.getFullyQualifiedName()); result.assertSuccess(); assertTrue(Files.exists(workspace.getPath(appPath.resolve("DemoAppWithExtension")))); assertTrue( Files.exists( workspace.getPath(appPath.resolve("PlugIns/DemoExtension.appex/DemoExtension")))); } @Test public void bundleBinaryHasDsymBundle() throws Exception { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "simple_application_bundle_dwarf_and_dsym", tmp); workspace.setUp(); BuildTarget target = workspace.newBuildTarget("//:DemoApp#dwarf-and-dsym,iphonesimulator-x86_64"); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); workspace.verify( Paths.get("DemoApp_output.expected"), BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s")); Path bundlePath = workspace.getPath( BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(target.getShortName() + ".app")); Path dwarfPath = bundlePath.getParent().resolve("DemoApp.app.dSYM/Contents/Resources/DWARF/DemoApp"); Path binaryPath = bundlePath.resolve("DemoApp"); assertTrue(Files.exists(dwarfPath)); AppleDsymTestUtil.checkDsymFileHasDebugSymbolForMain(workspace, dwarfPath); ProcessExecutor.Result result = workspace.runCommand("dsymutil", "-o", binaryPath + ".test.dSYM", binaryPath.toString()); String dsymutilOutput = ""; if (result.getStderr().isPresent()) { dsymutilOutput = result.getStderr().get(); } if (dsymutilOutput.isEmpty()) { assertThat(result.getStdout().isPresent(), is(true)); dsymutilOutput = result.getStdout().get(); } assertThat(dsymutilOutput, containsString("warning: no debug symbols in executable")); } @Test public void bundleBinaryHasLinkerMapFile() throws Exception { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "simple_application_bundle_dwarf_and_dsym", tmp); workspace.setUp(); BuildTarget target = workspace.newBuildTarget("//:DemoApp#iphonesimulator-x86_64"); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); workspace.verify( Paths.get("DemoApp_output.expected"), BuildTargetPaths.getGenPath( filesystem, target .withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR) .withAppendedFlavors(AppleDebugFormat.DWARF_AND_DSYM.getFlavor()), "%s")); BuildTarget binaryWithLinkerMap = workspace.newBuildTarget("//:DemoAppBinary#iphonesimulator-x86_64"); Path binaryWithLinkerMapPath = BuildTargetPaths.getGenPath(filesystem, binaryWithLinkerMap, "%s"); Path linkMapPath = BuildTargetPaths.getGenPath(filesystem, binaryWithLinkerMap, "%s-LinkMap.txt"); assertThat(Files.exists(workspace.resolve(binaryWithLinkerMapPath)), Matchers.equalTo(true)); assertThat(Files.exists(workspace.resolve(linkMapPath)), Matchers.equalTo(true)); BuildTarget binaryWithoutLinkerMap = workspace .newBuildTarget("//:DemoAppBinary#iphonesimulator-x86_64") .withAppendedFlavors(LinkerMapMode.NO_LINKER_MAP.getFlavor()); Path binaryWithoutLinkerMapPath = BuildTargetPaths.getGenPath(filesystem, binaryWithoutLinkerMap, "%s"); assertThat( Files.exists(workspace.resolve(binaryWithoutLinkerMapPath)), Matchers.equalTo(false)); } public String runSimpleBuildWithDefinedStripStyle(StripStyle stripStyle) throws Exception { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "simple_application_bundle_no_debug", tmp); workspace.setUp(); BuildTarget target = workspace.newBuildTarget( "//:DemoApp#iphonesimulator-x86_64," + stripStyle.getFlavor().getName()); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); workspace.verify( Paths.get("DemoApp_output.expected"), BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors( AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR, stripStyle.getFlavor(), AppleDebugFormat.NONE.getFlavor()), "%s")); Path bundlePath = workspace.getPath( BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors( AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR, stripStyle.getFlavor(), AppleDebugFormat.NONE.getFlavor()), "%s") .resolve(target.getShortName() + ".app")); Path binaryPath = bundlePath.resolve("DemoApp"); ProcessExecutor.Result result = workspace.runCommand("nm", binaryPath.toString()); return result.getStdout().orElse(""); } @Test public void bundleBinaryWithStripStyleAllDoesNotContainAnyDebugInfo() throws Exception { String nmOutput = runSimpleBuildWithDefinedStripStyle(StripStyle.ALL_SYMBOLS); assertThat(nmOutput, not(containsString("t -[AppDelegate window]"))); assertThat(nmOutput, not(containsString("S _OBJC_METACLASS_$_AppDelegate"))); } @Test public void bundleBinaryWithStripStyleNonGlobalContainsOnlyGlobals() throws Exception { String nmOutput = runSimpleBuildWithDefinedStripStyle(StripStyle.NON_GLOBAL_SYMBOLS); assertThat(nmOutput, not(containsString("t -[AppDelegate window]"))); assertThat(nmOutput, containsString("S _OBJC_METACLASS_$_AppDelegate")); } @Test public void bundleBinaryWithStripStyleDebuggingContainsGlobalsAndLocals() throws Exception { String nmOutput = runSimpleBuildWithDefinedStripStyle(StripStyle.DEBUGGING_SYMBOLS); assertThat(nmOutput, containsString("t -[AppDelegate window]")); assertThat(nmOutput, containsString("S _OBJC_METACLASS_$_AppDelegate")); } @Test public void appBundleWithResources() throws Exception { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(this, "app_bundle_with_resources", tmp); workspace.setUp(); BuildTarget target = workspace.newBuildTarget("//:DemoApp#iphonesimulator-x86_64,no-debug"); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); workspace.verify( Paths.get("DemoApp_output.expected"), BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s")); } @Test public void appBundleWithPlatformBinaryWithResources() throws Exception { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(this, "app_bundle_with_resources", tmp); workspace.setUp(); BuildTarget target = workspace.newBuildTarget("//:DemoAppWithPlatformBinary#iphonesimulator-x86_64,no-debug"); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); workspace.verify( Paths.get("DemoAppWithPlatformBinary_output.expected"), BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s")); } @Test public void appBundleWithConflictingFileAndFolderResources() throws Exception { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "app_bundle_with_conflicting_resources", tmp); workspace.setUp(); BuildTarget target = workspace.newBuildTarget("//:DemoApp#iphonesimulator-x86_64,no-debug"); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertFailure(); } @Test public void appBundleWithConflictingNestedFolderResources() throws Exception { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "app_bundle_with_conflicting_nested_resources", tmp); workspace.setUp(); BuildTarget target = workspace.newBuildTarget("//:DemoApp#iphonesimulator-x86_64,no-debug"); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertFailure(); } @Test public void appBundleWithConflictingFilenamesInNestedFolders() throws Exception { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "app_bundle_with_conflicting_filenames_in_nested_folders", tmp); workspace.setUp(); BuildTarget target = workspace.newBuildTarget("//:DemoApp#iphonesimulator-x86_64,no-debug"); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); workspace.verify( Paths.get("DemoApp_output.expected"), BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors( AppleDebugFormat.DWARF_AND_DSYM.getFlavor(), AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s")); } @Test public void appBundleVariantDirectoryMustEndInLproj() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "app_bundle_with_invalid_variant", tmp); workspace.setUp(); ProcessResult processResult = workspace.runBuckCommand("build", "//:DemoApp#iphonesimulator-x86_64,no-debug"); processResult.assertFailure(); assertThat( processResult.getStderr(), allOf( containsString("Variant files have to be in a directory with name ending in '.lproj',"), containsString("/cc/Localizable.strings' is not."))); } @Test public void defaultPlatformInBuckConfig() throws Exception { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "default_platform_in_buckconfig_app_bundle", tmp); workspace.setUp(); BuildTarget target = workspace.newBuildTarget("//:DemoApp"); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); workspace.verify( Paths.get("DemoApp_output.expected"), BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors( AppleDebugFormat.DWARF_AND_DSYM.getFlavor(), AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s")); Path appPath = BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors( AppleDebugFormat.DWARF_AND_DSYM.getFlavor(), AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(target.getShortName() + ".app"); assertTrue(Files.exists(workspace.getPath(appPath.resolve(target.getShortName())))); } @Test public void defaultPlatformInBuckConfigWithFlavorSpecified() throws Exception { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "default_platform_in_buckconfig_flavored_app_bundle", tmp); workspace.setUp(); BuildTarget target = workspace.newBuildTarget("//:DemoApp#iphonesimulator-x86_64,no-debug"); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); workspace.verify( Paths.get("DemoApp_output.expected"), BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s")); Path appPath = BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(target.getShortName() + ".app"); assertTrue(Files.exists(workspace.getPath(appPath.resolve(target.getShortName())))); } @Test public void appleAssetCatalogsAreIncludedInBundle() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "apple_asset_catalogs_are_included_in_bundle", tmp); workspace.setUp(); BuildTarget target = BuildTargetFactory.newInstance("//:DemoApp#no-debug"); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); Path outputPath = BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s"); workspace.verify(Paths.get("DemoApp_output.expected"), outputPath); Path appPath = outputPath.resolve(target.getShortName() + ".app"); assertTrue(Files.exists(workspace.getPath(appPath.resolve("Assets.car")))); } @Test public void generatedAppleAssetCatalogsAreIncludedInBundle() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "apple_asset_catalogs_are_included_in_bundle", tmp); workspace.setUp(); BuildTarget appTarget = BuildTargetFactory.newInstance("//:CombinedAssetsApp#no-debug"); BuildTarget genruleTarget = BuildTargetFactory.newInstance("//:MakeCombinedAssets"); BuildTarget assetTarget = appTarget.withAppendedFlavors(AppleAssetCatalog.FLAVOR); workspace.runBuckCommand("build", appTarget.getFullyQualifiedName()).assertSuccess(); // Check that the genrule was invoked workspace.getBuildLog().assertTargetBuiltLocally(genruleTarget); // Check the actool output: Merged.bundle/Assets.car assertFileInOutputContainsString( "Image2", workspace, assetTarget, "%s/Merged.bundle/Assets.car"); // Check the app package: Assets.car assertFileInOutputContainsString( "Image2", workspace, appTarget.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s/" + appTarget.getShortName() + ".app/Assets.car"); } @Test public void appleAssetCatalogsWithCompilationOptions() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "apple_asset_catalogs_are_included_in_bundle", tmp); workspace.setUp(); BuildTarget target = BuildTargetFactory.newInstance("//:DemoAppWithAssetCatalogCompilationOptions#no-debug"); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); } private void assertFileInOutputContainsString( String needle, ProjectWorkspace workspace, BuildTarget target, String genPathFormat) throws IOException { Path outputPath = BuildTargetPaths.getGenPath(filesystem, target, genPathFormat); Path path = workspace.getPath(outputPath); assertTrue(Files.exists(path)); String contents = workspace.getFileContents(outputPath); assertTrue(contents.contains(needle)); } @Test public void appleAssetCatalogsWithMoreThanOneAppIconOrLaunchImageShouldFail() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "apple_asset_catalogs_are_included_in_bundle", tmp); workspace.setUp(); BuildTarget target = BuildTargetFactory.newInstance("//:DemoAppWithMoreThanOneIconAndLaunchImage#no-debug"); ProcessResult processResult = workspace.runBuckCommand("build", target.getFullyQualifiedName()); processResult.assertFailure(); assertThat( processResult.getStderr(), containsString("At most one asset catalog in the dependencies of")); } @Test public void appleBundleDoesNotPropagateIncludeFrameworkFlavors() throws Exception { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(this, "simple_app_with_extension", tmp); workspace.setUp(); BuildTarget target = BuildTargetFactory.newInstance("//:DemoAppWithExtension#no-debug"); ProcessResult result = workspace.runBuckCommand("build", "--show-output", target.getFullyQualifiedName()); result.assertSuccess(); BuckBuildLog buckBuildLog = workspace.getBuildLog(); ImmutableSet<String> targetsThatShouldContainIncludeFrameworkFlavors = ImmutableSet.of("//:DemoAppWithExtension", "//:DemoExtension"); ImmutableSet<Flavor> includeFrameworkFlavors = ImmutableSet.of( InternalFlavor.of("no-include-frameworks"), InternalFlavor.of("include-frameworks")); for (BuildTarget builtTarget : buckBuildLog.getAllTargets()) { if (Sets.intersection(builtTarget.getFlavors().getSet(), includeFrameworkFlavors).isEmpty()) { assertThat( builtTarget.getUnflavoredBuildTarget().getFullyQualifiedName(), not(in(targetsThatShouldContainIncludeFrameworkFlavors))); } else { assertThat( builtTarget.getUnflavoredBuildTarget().getFullyQualifiedName(), in(targetsThatShouldContainIncludeFrameworkFlavors)); } } } @Test public void infoPlistSubstitutionsAreApplied() throws Exception { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "application_bundle_with_substitutions", tmp); workspace.setUp(); BuildTarget target = workspace.newBuildTarget("//:DemoApp#iphonesimulator-x86_64,no-debug"); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); workspace.verify( Paths.get("DemoApp_output.expected"), BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s")); Path appPath = BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(target.getShortName() + ".app"); assertTrue(Files.exists(workspace.getPath(appPath.resolve(target.getShortName())))); NSDictionary plist = (NSDictionary) PropertyListParser.parse( Files.readAllBytes(workspace.getPath(appPath.resolve("Info.plist")))); assertThat( "Should contain xcode build version", (String) plist.get("DTXcodeBuild").toJavaObject(), not(emptyString())); } @Test public void infoPlistSubstitutionsAreAppliedToEntitlements() throws IOException, InterruptedException { assumeTrue(FakeAppleDeveloperEnvironment.supportsCodeSigning()); ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "application_bundle_with_entitlements_substitutions", tmp); workspace.setUp(); BuildTarget target = BuildTargetFactory.newInstance("//:DemoApp#iphoneos-arm64,no-debug"); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); workspace.verify( Paths.get("DemoApp_output.expected"), BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s")); workspace.assertFilesEqual( Paths.get("DemoApp.xcent.expected"), BuildTargetPaths.getScratchPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s.xcent")); Path appPath = workspace.getPath( BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(target.getShortName() + ".app")); assertTrue(Files.exists(appPath.resolve(target.getShortName()))); assertTrue(checkCodeSigning(appPath)); } @Test public void productNameChangesBundleAndBinaryNames() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "application_bundle_with_product_name", tmp); workspace.setUp(); workspace.runBuckCommand("build", "//:DemoApp#iphonesimulator-x86_64,no-debug").assertSuccess(); BuildTarget target = BuildTargetFactory.newInstance("//:DemoApp#iphonesimulator-x86_64,no-debug"); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); workspace.verify(); String productName = "BrandNewProduct"; Path appPath = BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(productName + ".app"); assertTrue(Files.exists(workspace.getPath(appPath.resolve(productName)))); } @Test public void infoPlistWithUnrecognizedVariableFails() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "application_bundle_with_invalid_substitutions", tmp); workspace.setUp(); workspace.runBuckCommand("build", "//:DemoApp#iphonesimulator-x86_64,no-debug").assertFailure(); } @Test public void resourcesAreCompiled() throws Exception { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "app_bundle_with_compiled_resources", tmp); workspace.setUp(); BuildTarget target = workspace.newBuildTarget("//:DemoApp#iphonesimulator-x86_64,no-debug"); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); workspace.verify( Paths.get("DemoApp_output.expected"), BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s")); Path appPath = BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(target.getShortName() + ".app"); assertTrue(Files.exists(workspace.getPath(appPath.resolve("AppViewController.nib")))); assertTrue(Files.exists(workspace.getPath(appPath.resolve("Model.momd")))); assertTrue(Files.exists(workspace.getPath(appPath.resolve("Model2.momd")))); assertTrue(Files.exists(workspace.getPath(appPath.resolve("DemoApp.scnassets")))); } @Test public void watchApplicationBundle() throws IOException { assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.WATCHOS)); ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(this, "watch_application_bundle", tmp); workspace.setUp(); BuildTarget target = BuildTargetFactory.newInstance("//:DemoApp#no-debug"); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); workspace.verify( Paths.get("DemoApp_output.expected"), BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s")); Path appPath = workspace.getPath( BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors( AppleDebugFormat.NONE.getFlavor(), AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(target.getShortName() + ".app")); Path watchAppPath = appPath.resolve("Watch/DemoWatchApp.app"); assertTrue(Files.exists(watchAppPath.resolve("DemoWatchApp"))); assertTrue( Files.exists( watchAppPath.resolve("PlugIns/DemoWatchAppExtension.appex/DemoWatchAppExtension"))); assertTrue(Files.exists(watchAppPath.resolve("Interface.plist"))); } @Test public void appClipApplicationBundle() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(this, "app_bundle_with_appclip", tmp); workspace.setUp(); BuildTarget target = BuildTargetFactory.newInstance("//:ExampleApp#iphonesimulator-x86_64"); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); Path appPath = workspace.getPath( BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors( AppleDebugFormat.DWARF_AND_DSYM.getFlavor(), AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(target.getShortName() + ".app")); assertTrue(Files.exists(appPath.resolve("ExampleApp"))); assertTrue(Files.exists(appPath.resolve("Info.plist"))); Path appClipPath = appPath.resolve("AppClips/Clip.app/"); assertTrue(Files.exists(appClipPath.resolve("Clip"))); assertTrue(Files.exists(appClipPath.resolve("Info.plist"))); assertFalse(Files.exists(appClipPath.resolve("Frameworks"))); } @Test public void copiesFrameworkBundleIntoFrameworkDirectory() throws Exception { assumeTrue( AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.IPHONESIMULATOR)); ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "app_bundle_with_embedded_framework", tmp); workspace.setUp(); BuildTarget target = BuildTargetFactory.newInstance("//:DemoApp#no-debug"); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); Path appPath = workspace.getPath( BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors( AppleDebugFormat.NONE.getFlavor(), AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(target.getShortName() + ".app")); Path frameworkPath = appPath.resolve("Frameworks/TestFramework.framework"); assertTrue(Files.exists(frameworkPath.resolve("TestFramework"))); } @Test public void onlyIncludesResourcesInBundlesWhichStaticallyLinkThem() throws Exception { assumeTrue( AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.IPHONESIMULATOR)); ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "app_bundle_with_embedded_framework_and_resources", tmp); workspace.setUp(); BuildTarget target = BuildTargetFactory.newInstance("//:DemoApp#no-debug"); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); Path appPath = workspace.getPath( BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors( AppleDebugFormat.NONE.getFlavor(), AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(target.getShortName() + ".app")); String resourceName = "Resource.plist"; assertFalse( "The resource should be absent in the app bundle.", Files.exists(appPath.resolve(resourceName))); Path frameworkPath = appPath.resolve("Frameworks/TestFramework.framework"); assertTrue( "The resource should be present in the embedded framework.", Files.exists(frameworkPath.resolve(resourceName))); } @Test public void resourceGroupDoesNotDuplicateResourcesInAppAndFramework() throws Exception { assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX)); ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "app_bundle_with_embedded_framework_and_resource_groups", tmp); workspace.setUp(); workspace.addBuckConfigLocalOption("cxx", "link_groups_enabled", "true"); workspace.addBuckConfigLocalOption("apple", "codesign", "/usr/bin/true"); BuildTarget target = BuildTargetFactory.newInstance("//:App#no-debug,macosx-x86_64"); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); Path appPath = workspace.getPath( BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors( AppleDebugFormat.NONE.getFlavor(), AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(target.getShortName() + ".app")); String appResourceName = "resource_app.txt"; String utilityResourceName = "resource_utility.txt"; String frameworkResourceName = "resource_framework.txt"; Path appBundleResourcesPath = appPath.resolve("Contents/Resources"); assertTrue( "App resource should be present in the app bundle.", Files.exists(appBundleResourcesPath.resolve(appResourceName))); assertTrue( "Utility resource should be present in the app bundle.", Files.exists(appBundleResourcesPath.resolve(utilityResourceName))); assertFalse( "Framework resource should be absent in the app bundle.", Files.exists(appBundleResourcesPath.resolve(frameworkResourceName))); Path frameworkResourcesPath = appPath.resolve("Contents/Frameworks/AppFramework.framework/Resources"); assertFalse( "App resource should be absent the framework bundle.", Files.exists(frameworkResourcesPath.resolve(appResourceName))); assertTrue( "Utility resource should be present in the framework bundle.", Files.exists(frameworkResourcesPath.resolve(utilityResourceName))); assertTrue( "Framework resource should be present in the framework bundle.", Files.exists(frameworkResourcesPath.resolve(frameworkResourceName))); } @Test public void testTargetOutputForAppleBundle() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "simple_application_bundle_no_debug", tmp); workspace.setUp(); ProcessResult result; // test no-debug output BuildTarget target = BuildTargetFactory.newInstance("//:DemoApp#no-debug"); result = workspace.runBuckCommand("targets", "--show-output", target.getFullyQualifiedName()); result.assertSuccess(); Path appPath = BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(target.getShortName() + ".app"); assertThat( result.getStdout(), Matchers.startsWith(target.getFullyQualifiedName() + " " + appPath)); // test debug output target = BuildTargetFactory.newInstance("//:DemoApp#dwarf-and-dsym"); result = workspace.runBuckCommand("targets", "--show-output", target.getFullyQualifiedName()); result.assertSuccess(); appPath = BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(target.getShortName() + ".app"); assertThat( result.getStdout(), Matchers.startsWith(target.getFullyQualifiedName() + " " + appPath)); } @Test public void macAppWithExtraBinary() throws IOException { assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX)); ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "apple_osx_app_with_extra_binary", tmp); workspace.setUp(); BuildTarget target = BuildTargetFactory.newInstance("//:App#no-debug"); ProcessResult buildResult = workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); buildResult.assertSuccess(); Path appPath = workspace.getPath( BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors( AppleDebugFormat.NONE.getFlavor(), AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(target.getShortName() + ".app")); Path AppBinaryPath = appPath.resolve("Contents/MacOS/App"); Path WorkerBinaryPath = appPath.resolve("Contents/MacOS/Worker"); assertTrue(Files.exists(AppBinaryPath)); assertTrue(Files.exists(WorkerBinaryPath)); } @Test public void macAppWithXPCService() throws IOException { assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX)); ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "apple_osx_app_with_xpc_service", tmp); workspace.setUp(); BuildTarget target = BuildTargetFactory.newInstance("//:App#no-debug"); ProcessResult buildResult = workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); buildResult.assertSuccess(); Path appPath = workspace.getPath( BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors( AppleDebugFormat.NONE.getFlavor(), AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(target.getShortName() + ".app")); Path XPCServicePath = appPath.resolve("Contents/XPCServices/Service.xpc"); Path XPCServiceBinaryPath = XPCServicePath.resolve("Contents/MacOS/Service"); Path XPCServiceInfoPlistPath = XPCServicePath.resolve("Contents/Info.plist"); assertTrue(Files.exists(XPCServiceBinaryPath)); assertTrue(Files.exists(XPCServiceInfoPlistPath)); } @Test public void macAppWithPlugin() throws IOException { assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX)); ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(this, "apple_osx_app_with_plugin", tmp); workspace.setUp(); BuildTarget target = BuildTargetFactory.newInstance("//:App#no-debug"); ProcessResult buildResult = workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); buildResult.assertSuccess(); Path appPath = workspace.getPath( BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors( AppleDebugFormat.NONE.getFlavor(), AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(target.getShortName() + ".app")); Path pluginPath = appPath.resolve("Contents/PlugIns/Plugin.plugin"); Path pluginBinaryPath = pluginPath.resolve("Contents/MacOS/Plugin"); Path pluginInfoPlistPath = pluginPath.resolve("Contents/Info.plist"); assertTrue(Files.exists(pluginBinaryPath)); assertTrue(Files.exists(pluginInfoPlistPath)); } @Test public void macAppWithPrefPane() throws IOException { assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX)); ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(this, "apple_osx_app_with_prefpane", tmp); workspace.setUp(); BuildTarget target = BuildTargetFactory.newInstance("//:App#no-debug"); ProcessResult buildResult = workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); buildResult.assertSuccess(); Path appPath = workspace.getPath( BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors( AppleDebugFormat.NONE.getFlavor(), AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(target.getShortName() + ".app")); Path prefPanePath = appPath.resolve("Contents/Resources/PrefPane.prefPane"); Path prefPaneBinaryPath = prefPanePath.resolve("Contents/MacOS/PrefPane"); Path prefPaneInfoPlistPath = prefPanePath.resolve("Contents/Info.plist"); assertTrue(Files.exists(prefPaneBinaryPath)); assertTrue(Files.exists(prefPaneInfoPlistPath)); } @Test public void macAppWithQuickLook() throws IOException { assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX)); ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(this, "apple_osx_app_with_quicklook", tmp); workspace.setUp(); BuildTarget target = BuildTargetFactory.newInstance("//:App#no-debug"); ProcessResult buildResult = workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); buildResult.assertSuccess(); Path appPath = workspace.getPath( BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors( AppleDebugFormat.NONE.getFlavor(), AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(target.getShortName() + ".app")); Path quicklookPath = appPath.resolve("Contents/Library/QuickLook/QuickLook.qlgenerator"); Path quicklookBinaryPath = quicklookPath.resolve("Contents/MacOS/QuickLook"); Path quicklookInfoPlistPath = quicklookPath.resolve("Contents/Info.plist"); assertTrue(Files.exists(quicklookBinaryPath)); assertTrue(Files.exists(quicklookInfoPlistPath)); } @Test public void resourcesFromOtherCellsCanBeProperlyIncluded() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "bundle_with_resources_from_other_cells", tmp); workspace.setUp(); Path outputPath = workspace.buildAndReturnOutput("//:bundle#iphonesimulator-x86_64"); assertTrue("Resource file should exist.", Files.isRegularFile(outputPath.resolve("file.txt"))); } @Test public void resourcesWithFrameworksDestinationsAreProperlyCopiedOnIosPlatform() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "bundle_with_resource_with_frameworks_destination", tmp); workspace.setUp(); Path outputPath = workspace.buildAndReturnOutput("//:bundle#iphonesimulator-x86_64"); assertTrue( "Resource file should exist in Frameworks directory.", Files.isRegularFile(outputPath.resolve("Frameworks/file.txt"))); } @Test public void resourcesWithFrameworksDestinationsAreProperlyCopiedOnMacosxPlatform() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "bundle_with_resource_with_frameworks_destination", tmp); workspace.setUp(); Path outputPath = workspace.buildAndReturnOutput( "//:bundle#macosx-x86_64", "--config", "apple.codesign=/usr/bin/true"); assertTrue( "Resource file should exist in Frameworks directory.", Files.isRegularFile(outputPath.resolve("Contents/Frameworks/file.txt"))); } @Test public void resourcesWithExecutablesDestinationsAreProperlyCopiedOnIosPlatform() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "bundle_with_resource_with_executables_destination", tmp); workspace.setUp(); Path outputPath = workspace.buildAndReturnOutput("//:bundle#iphonesimulator-x86_64"); assertTrue( "Resource file should exist in Executables directory.", Files.isRegularFile(outputPath.resolve("file.txt"))); } @Test public void resourcesWithExecutablesDestinationsAreProperlyCopiedOnMacosxPlatform() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "bundle_with_resource_with_executables_destination", tmp); workspace.setUp(); Path outputPath = workspace.buildAndReturnOutput( "//:bundle#macosx-x86_64", "--config", "apple.codesign=/usr/bin/true"); assertTrue( "Resource file should exist in Executables directory.", Files.isRegularFile(outputPath.resolve("Contents/MacOS/file.txt"))); } @Test public void resourcesWithResourcesDestinationsAreProperlyCopiedOnIosPlatform() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "bundle_with_resource_with_resources_destination", tmp); workspace.setUp(); Path outputPath = workspace.buildAndReturnOutput("//:bundle#iphonesimulator-x86_64"); assertTrue( "Resource file should exist in Resources directory.", Files.isRegularFile(outputPath.resolve("file.txt"))); } @Test public void resourcesWithResourcesDestinationsAreProperlyCopiedOnMacosxPlatform() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "bundle_with_resource_with_resources_destination", tmp); workspace.setUp(); Path outputPath = workspace.buildAndReturnOutput( "//:bundle#macosx-x86_64", "--config", "apple.codesign=/usr/bin/true"); assertTrue( "Resource file should exist in Resources directory.", Files.isRegularFile(outputPath.resolve("Contents/Resources/file.txt"))); } @Test public void bundleTraversesAppleResourceResourcesFromDepsForAdditionalResources() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "app_bundle_with_resources_from_deps", tmp); workspace.setUp(); Path outputPath = workspace.buildAndReturnOutput("//:bundle#iphonesimulator-x86_64"); assertTrue( "Resource file should exist.", Files.isRegularFile(outputPath.resolve("other_resource.txt"))); } @Test public void bundleTraversesAppleResourceResourcesFromPlatformDepsForAdditionalResources() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "app_bundle_with_resources_from_deps", tmp); workspace.setUp(); Path outputPath = workspace.buildAndReturnOutput("//:mybundle#iphonesimulator-x86_64"); assertTrue( "Resource file matching platform should exist.", Files.isRegularFile(outputPath.resolve("sim.txt"))); assertFalse( "Resource file not matching platform should not exist.", Files.isRegularFile(outputPath.resolve("device.txt"))); } @Test public void defaultBinaryIsUsedWhenOnTargetPlatformMismatch() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "app_bundle_with_platform_binary", tmp); workspace.setUp(); workspace.runBuckBuild("//:bundle#iphoneos-armv7").assertFailure(); } @Test public void binaryMatchingTargetPlatformIsUsed() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "app_bundle_with_platform_binary", tmp); workspace.setUp(); workspace.runBuckBuild("//:bundle#iphonesimulator-x86_64").assertSuccess(); } @Test public void defaultBinaryIsNotUsedWhenPlatformSpecificBinaryIsSpecified() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "app_bundle_with_platform_binary", tmp); workspace.setUp(); workspace .runBuckBuild("//:bundle_with_broken_default_binary#iphonesimulator-x86_64") .assertSuccess(); } @Test public void errorMessageForBundleWithoutBinaryIsDisplayed() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "app_bundle_with_platform_binary", tmp); workspace.setUp(); ProcessResult processResult = workspace.runBuckBuild("//:bundle_without_binary#iphonesimulator-x86_64"); processResult.assertFailure(); assertThat( processResult.getStderr(), containsString( "Binary matching target platform iphonesimulator-x86_64 cannot be found" + " and binary default is not specified.")); } @Test public void errorMessageForBundleWithMultipleMatchingBinariesIsDisplayed() throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "app_bundle_with_platform_binary", tmp); workspace.setUp(); ProcessResult processResult = workspace.runBuckBuild("//:bundle_with_multiple_matching_binaries#iphonesimulator-x86_64"); processResult.assertFailure(); assertThat( processResult.getStderr(), containsString( "There must be at most one binary matching the target platform " + "iphonesimulator-x86_64 but all of [//:binary, //:binary] matched. " + "Please make your pattern more precise and remove any duplicates.")); } @Test public void crossCellApplicationBundle() throws IOException, InterruptedException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenarioWithoutDefaultCell( this, "simple_cross_cell_application_bundle/primary", tmp.newFolder()); workspace.setUp(); ProjectWorkspace secondary = TestDataHelper.createProjectWorkspaceForScenarioWithoutDefaultCell( this, "simple_cross_cell_application_bundle/secondary", tmp.newFolder()); secondary.setUp(); TestDataHelper.overrideBuckconfig( workspace, ImmutableMap.of( "repositories", ImmutableMap.of("secondary", secondary.getPath(".").normalize().toString()))); BuildTarget target = workspace.newBuildTarget("//:DemoApp#dwarf-and-dsym,iphonesimulator-x86_64,no_debug"); workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess(); workspace.verify( Paths.get("DemoApp_output.expected"), BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s")); Path appPath = workspace.getPath( BuildTargetPaths.getGenPath( filesystem, target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR), "%s") .resolve(target.getShortName() + ".app")); assertTrue(Files.exists(appPath.resolve(target.getShortName()))); assertTrue(checkCodeSigning(appPath)); // Non-Swift target shouldn't include Frameworks/ assertFalse(Files.exists(appPath.resolve("Frameworks"))); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.util; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInput; import java.io.DataInputStream; import java.io.DataOutput; import java.io.DataOutputStream; import java.io.File; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.math.BigInteger; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.URL; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Random; import java.util.UUID; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.cluster.ClusterGroup; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.compute.ComputeJob; import org.apache.ignite.compute.ComputeJobAdapter; import org.apache.ignite.internal.processors.igfs.IgfsUtils; import org.apache.ignite.internal.util.lang.GridPeerDeployAware; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteBiTuple; import org.apache.ignite.lang.IgnitePredicate; import org.apache.ignite.lang.IgniteProductVersion; import org.apache.ignite.spi.discovery.tcp.internal.TcpDiscoveryNode; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.http.GridEmbeddedHttpServer; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.apache.ignite.testframework.junits.common.GridCommonTest; import org.jetbrains.annotations.Nullable; import static org.junit.Assert.assertArrayEquals; /** * Grid utils tests. */ @GridCommonTest(group = "Utils") public class IgniteUtilsSelfTest extends GridCommonAbstractTest { /** */ public static final int[] EMPTY = new int[0]; /** * @return 120 character length string. */ private String text120() { char[] chs = new char[120]; Arrays.fill(chs, 'x'); return new String(chs); } /** * */ public void testIsPow2() { assertTrue(U.isPow2(1)); assertTrue(U.isPow2(2)); assertTrue(U.isPow2(4)); assertTrue(U.isPow2(8)); assertTrue(U.isPow2(16)); assertTrue(U.isPow2(16 * 16)); assertTrue(U.isPow2(32 * 32)); assertFalse(U.isPow2(-4)); assertFalse(U.isPow2(-3)); assertFalse(U.isPow2(-2)); assertFalse(U.isPow2(-1)); assertFalse(U.isPow2(0)); assertFalse(U.isPow2(3)); assertFalse(U.isPow2(5)); assertFalse(U.isPow2(6)); assertFalse(U.isPow2(7)); assertFalse(U.isPow2(9)); } /** * @throws Exception If failed. */ public void testAllLocalIps() throws Exception { Collection<String> ips = U.allLocalIps(); System.out.println("All local IPs: " + ips); } /** * @throws Exception If failed. */ public void testAllLocalMACs() throws Exception { Collection<String> macs = U.allLocalMACs(); System.out.println("All local MACs: " + macs); } /** * On linux NetworkInterface.getHardwareAddress() returns null from time to time. * * @throws Exception If failed. */ public void testAllLocalMACsMultiThreaded() throws Exception { GridTestUtils.runMultiThreaded(new Runnable() { @Override public void run() { for (int i = 0; i < 30; i++) { Collection<String> macs = U.allLocalMACs(); assertTrue("Mac address are not defined.", !macs.isEmpty()); } } }, 32, "thread"); } /** * @throws Exception If failed. */ public void testByteArray2String() throws Exception { assertEquals("{0x0A,0x14,0x1E,0x28,0x32,0x3C,0x46,0x50,0x5A}", U.byteArray2String(new byte[]{10, 20, 30, 40, 50, 60, 70, 80, 90}, "0x%02X", ",0x%02X")); } /** * @throws Exception If failed. */ public void testFormatMins() throws Exception { printFormatMins(0); printFormatMins(1); printFormatMins(2); printFormatMins(59); printFormatMins(60); printFormatMins(61); printFormatMins(60 * 24 - 1); printFormatMins(60 * 24); printFormatMins(60 * 24 + 1); printFormatMins(5 * 60 * 24 - 1); printFormatMins(5 * 60 * 24); printFormatMins(5 * 60 * 24 + 1); } /** * Helper method for {@link #testFormatMins()} * * @param mins Minutes to test. */ private void printFormatMins(long mins) { System.out.println("For " + mins + " minutes: " + X.formatMins(mins)); } /** * @throws Exception If failed. */ public void testDownloadUrlFromHttp() throws Exception { GridEmbeddedHttpServer srv = null; try { String urlPath = "/testDownloadUrl/"; srv = GridEmbeddedHttpServer.startHttpServer().withFileDownloadingHandler(urlPath, GridTestUtils.resolveIgnitePath("/modules/core/src/test/config/tests.properties")); File file = new File(System.getProperty("java.io.tmpdir") + File.separator + "url-http.file"); file = U.downloadUrl(new URL(srv.getBaseUrl() + urlPath), file); assert file.exists(); assert file.delete(); } finally { if (srv != null) srv.stop(1); } } /** * @throws Exception If failed. */ public void testDownloadUrlFromHttps() throws Exception { GridEmbeddedHttpServer srv = null; try { String urlPath = "/testDownloadUrl/"; srv = GridEmbeddedHttpServer.startHttpsServer().withFileDownloadingHandler(urlPath, GridTestUtils.resolveIgnitePath("modules/core/src/test/config/tests.properties")); File file = new File(System.getProperty("java.io.tmpdir") + File.separator + "url-http.file"); file = U.downloadUrl(new URL(srv.getBaseUrl() + urlPath), file); assert file.exists(); assert file.delete(); } finally { if (srv != null) srv.stop(1); } } /** * @throws Exception If failed. */ public void testDownloadUrlFromLocalFile() throws Exception { File file = new File(System.getProperty("java.io.tmpdir") + File.separator + "url-http.file"); file = U.downloadUrl( GridTestUtils.resolveIgnitePath("modules/core/src/test/config/tests.properties").toURI().toURL(), file); assert file.exists(); assert file.delete(); } /** * @throws Exception If failed. */ public void testOs() throws Exception { System.out.println("OS string: " + U.osString()); System.out.println("JDK string: " + U.jdkString()); System.out.println("OS/JDK string: " + U.osJdkString()); System.out.println("Is Windows: " + U.isWindows()); System.out.println("Is Windows 95: " + U.isWindows95()); System.out.println("Is Windows 98: " + U.isWindows98()); System.out.println("Is Windows NT: " + U.isWindowsNt()); System.out.println("Is Windows 2000: " + U.isWindows2k()); System.out.println("Is Windows 2003: " + U.isWindows2003()); System.out.println("Is Windows XP: " + U.isWindowsXp()); System.out.println("Is Windows Vista: " + U.isWindowsVista()); System.out.println("Is Linux: " + U.isLinux()); System.out.println("Is Mac OS: " + U.isMacOs()); System.out.println("Is Netware: " + U.isNetWare()); System.out.println("Is Solaris: " + U.isSolaris()); System.out.println("Is Solaris SPARC: " + U.isSolarisSparc()); System.out.println("Is Solaris x86: " + U.isSolarisX86()); System.out.println("Is Windows7: " + U.isWindows7()); System.out.println("Is Sufficiently Tested OS: " + U.isSufficientlyTestedOs()); } /** * @throws Exception If failed. */ public void testJavaSerialization() throws Exception { ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); ObjectOutputStream objOut = new ObjectOutputStream(byteOut); objOut.writeObject(new byte[] {1, 2, 3, 4, 5, 5}); objOut.flush(); byte[] sBytes = byteOut.toByteArray(); ObjectInputStream in = new ObjectInputStream(new ByteArrayInputStream(sBytes)); in.readObject(); } /** * */ public void testHidePassword() { Collection<String> uriList = new ArrayList<>(); uriList.add("ftp://anonymous:111111;freq=5000@unknown.host:21/pub/gg-test"); uriList.add("ftp://anonymous:111111;freq=5000@localhost:21/pub/gg-test"); uriList.add("http://freq=5000@localhost/tasks"); uriList.add("http://freq=5000@unknownhost.host/tasks"); for (String uri : uriList) X.println(uri + " -> " + U.hidePassword(uri)); } /** * Test job to test possible indefinite recursion in detecting peer deploy aware. */ @SuppressWarnings({"UnusedDeclaration"}) private class SelfReferencedJob extends ComputeJobAdapter implements GridPeerDeployAware { /** */ private SelfReferencedJob ref; /** */ private SelfReferencedJob[] arr; /** */ private Collection<SelfReferencedJob> col; /** */ private ClusterNode node; /** */ private ClusterGroup subGrid; /** * @param ignite Grid. */ private SelfReferencedJob(Ignite ignite) throws IgniteCheckedException { node = ignite.cluster().localNode(); ref = this; arr = new SelfReferencedJob[] {this, this}; col = Arrays.asList(this, this, this); newContext(); subGrid = ignite.cluster().forNodes(Collections.singleton(node)); } /** {@inheritDoc} */ @Override public Object execute() { return null; } /** {@inheritDoc} */ @Override public Class<?> deployClass() { return getClass(); } /** {@inheritDoc} */ @Override public ClassLoader classLoader() { return getClass().getClassLoader(); } } /** * @throws Exception If test fails. */ public void testDetectPeerDeployAwareInfiniteRecursion() throws Exception { Ignite g = startGrid(1); try { final SelfReferencedJob job = new SelfReferencedJob(g); GridPeerDeployAware d = U.detectPeerDeployAware(U.peerDeployAware(job)); assert d != null; assert SelfReferencedJob.class == d.deployClass(); assert d.classLoader() == SelfReferencedJob.class.getClassLoader(); } finally { stopGrid(1); } } /** * @param r Runnable. * @return Job created for given runnable. */ private static ComputeJob job(final Runnable r) { return new ComputeJobAdapter() { @Nullable @Override public Object execute() { r.run(); return null; } }; } /** * * @throws Exception If failed. */ public void testParseIsoDate() throws Exception { Calendar cal = U.parseIsoDate("2009-12-08T13:30:44.000Z"); assert cal.get(Calendar.YEAR) == 2009; assert cal.get(Calendar.MONTH) == 11; assert cal.get(Calendar.DAY_OF_MONTH) == 8; assert cal.get(Calendar.HOUR_OF_DAY) == 13; assert cal.get(Calendar.MINUTE) == 30; assert cal.get(Calendar.SECOND) == 44; assert cal.get(Calendar.MILLISECOND) == 0; assert cal.get(Calendar.ZONE_OFFSET) == 0 : "Unexpected value: " + cal.get(Calendar.ZONE_OFFSET); cal = U.parseIsoDate("2009-12-08T13:30:44.000+03:00"); assert cal.get(Calendar.YEAR) == 2009; assert cal.get(Calendar.MONTH) == 11; assert cal.get(Calendar.DAY_OF_MONTH) == 8; assert cal.get(Calendar.HOUR_OF_DAY) == 13; assert cal.get(Calendar.MINUTE) == 30; assert cal.get(Calendar.SECOND) == 44; assert cal.get(Calendar.MILLISECOND) == 0; assert cal.get(Calendar.ZONE_OFFSET) == 3 * 60 * 60 * 1000 : "Unexpected value: " + cal.get(Calendar.ZONE_OFFSET); cal = U.parseIsoDate("2009-12-08T13:30:44.000+0300"); assert cal.get(Calendar.YEAR) == 2009; assert cal.get(Calendar.MONTH) == 11; assert cal.get(Calendar.DAY_OF_MONTH) == 8; assert cal.get(Calendar.HOUR_OF_DAY) == 13; assert cal.get(Calendar.MINUTE) == 30; assert cal.get(Calendar.SECOND) == 44; assert cal.get(Calendar.MILLISECOND) == 0; assert cal.get(Calendar.ZONE_OFFSET) == 3 * 60 * 60 * 1000 : "Unexpected value: " + cal.get(Calendar.ZONE_OFFSET); } /** * @throws Exception If test failed. */ public void testPeerDeployAware0() throws Exception { Collection<Object> col = new ArrayList<>(); col.add(null); col.add(null); col.add(null); GridPeerDeployAware pda = U.peerDeployAware0(col); assert pda != null; col.clear(); col.add(null); pda = U.peerDeployAware0(col); assert pda != null; col.clear(); pda = U.peerDeployAware0(col); assert pda != null; col.clear(); col.add(null); col.add("Test"); col.add(null); pda = U.peerDeployAware0(col); assert pda != null; col.clear(); col.add("Test"); pda = U.peerDeployAware0(col); assert pda != null; col.clear(); col.add("Test"); col.add(this); pda = U.peerDeployAware0(col); assert pda != null; col.clear(); col.add(null); col.add("Test"); col.add(null); col.add(this); col.add(null); pda = U.peerDeployAware0(col); assert pda != null; } /** * Test UUID to bytes array conversion. */ public void testsGetBytes() { for (int i = 0; i < 100; i++) { UUID id = UUID.randomUUID(); byte[] bytes = U.uuidToBytes(id); BigInteger n = new BigInteger(bytes); assert n.shiftRight(Long.SIZE).longValue() == id.getMostSignificantBits(); assert n.longValue() == id.getLeastSignificantBits(); } } /** * */ @SuppressWarnings("ZeroLengthArrayAllocation") public void testReadByteArray() { assertTrue(Arrays.equals(new byte[0], U.readByteArray(ByteBuffer.allocate(0)))); assertTrue(Arrays.equals(new byte[0], U.readByteArray(ByteBuffer.allocate(0), ByteBuffer.allocate(0)))); Random rnd = new Random(); byte[] bytes = new byte[13]; rnd.nextBytes(bytes); assertTrue(Arrays.equals(bytes, U.readByteArray(ByteBuffer.wrap(bytes)))); assertTrue(Arrays.equals(bytes, U.readByteArray(ByteBuffer.wrap(bytes), ByteBuffer.allocate(0)))); assertTrue(Arrays.equals(bytes, U.readByteArray(ByteBuffer.allocate(0), ByteBuffer.wrap(bytes)))); for (int i = 0; i < 1000; i++) { int n = rnd.nextInt(100); bytes = new byte[n]; rnd.nextBytes(bytes); ByteBuffer[] bufs = new ByteBuffer[1 + rnd.nextInt(10)]; int x = 0; for (int j = 0; j < bufs.length - 1; j++) { int size = x == n ? 0 : rnd.nextInt(n - x); bufs[j] = (ByteBuffer)ByteBuffer.wrap(bytes).position(x).limit(x += size); } bufs[bufs.length - 1] = (ByteBuffer)ByteBuffer.wrap(bytes).position(x).limit(n); assertTrue(Arrays.equals(bytes, U.readByteArray(bufs))); } } /** * */ @SuppressWarnings("ZeroLengthArrayAllocation") public void testHashCodeFromBuffers() { assertEquals(Arrays.hashCode(new byte[0]), U.hashCode(ByteBuffer.allocate(0))); assertEquals(Arrays.hashCode(new byte[0]), U.hashCode(ByteBuffer.allocate(0), ByteBuffer.allocate(0))); Random rnd = new Random(); for (int i = 0; i < 1000; i++) { ByteBuffer[] bufs = new ByteBuffer[1 + rnd.nextInt(15)]; for (int j = 0; j < bufs.length; j++) { byte[] bytes = new byte[rnd.nextInt(25)]; rnd.nextBytes(bytes); bufs[j] = ByteBuffer.wrap(bytes); } assertEquals(U.hashCode(bufs), Arrays.hashCode(U.readByteArray(bufs))); } } /** * Test annotation look up. */ public void testGetAnnotations() { assert U.getAnnotation(A1.class, Ann1.class) != null; assert U.getAnnotation(A2.class, Ann1.class) != null; assert U.getAnnotation(A1.class, Ann2.class) != null; assert U.getAnnotation(A2.class, Ann2.class) != null; assert U.getAnnotation(A3.class, Ann1.class) == null; assert U.getAnnotation(A3.class, Ann2.class) != null; } /** * */ public void testUnique() { int[][][] arrays = new int[][][]{ new int[][]{EMPTY, EMPTY, EMPTY}, new int[][]{new int[]{1, 2, 3}, EMPTY, new int[]{1, 2, 3}}, new int[][]{new int[]{1, 2, 3}, new int[]{1, 2, 3}, new int[]{1, 2, 3}}, new int[][]{new int[]{1, 2, 3}, new int[]{1, 3}, new int[]{1, 2, 3}}, new int[][]{new int[]{1, 2, 30, 40, 50}, new int[]{2, 40}, new int[]{1, 2, 30, 40, 50}}, new int[][]{new int[]{-100, -13, 1, 2, 5, 30, 40, 50}, new int[]{1, 2, 6, 100, 113}, new int[]{-100, -13, 1, 2, 5, 6, 30, 40, 50, 100, 113}} }; for (int[][] a : arrays) { assertArrayEquals(a[2], U.unique(a[0], a[0].length, a[1], a[1].length)); assertArrayEquals(a[2], U.unique(a[1], a[1].length, a[0], a[0].length)); } assertArrayEquals(new int[]{1, 2, 3, 4}, U.unique(new int[]{1, 2, 3, 8}, 3, new int[]{2, 4, 5}, 2)); assertArrayEquals(new int[]{2, 4}, U.unique(new int[]{1, 2, 3, 8}, 0, new int[]{2, 4, 5}, 2)); assertArrayEquals(new int[]{1, 2, 4, 5}, U.unique(new int[]{1, 2, 3, 8}, 2, new int[]{2, 4, 5, 6}, 3)); assertArrayEquals(new int[]{1, 2}, U.unique(new int[]{1, 2, 3, 8}, 2, new int[]{2, 4, 5, 6}, 0)); } /** * */ public void testDifference() { int[][][] arrays = new int[][][]{ new int[][]{EMPTY, EMPTY, EMPTY}, new int[][]{new int[]{1, 2, 3}, EMPTY, new int[]{1, 2, 3}}, new int[][]{EMPTY, new int[]{1, 2, 3}, EMPTY}, new int[][]{new int[]{1, 2, 3}, new int[]{1, 2, 3}, EMPTY}, new int[][]{new int[]{-100, -50, 1, 2, 3}, new int[]{-50, -1, 1, 3}, new int[]{-100, 2}}, new int[][]{new int[]{-100, 1, 2, 30, 40, 50}, new int[]{2, 40}, new int[]{-100, 1, 30, 50}}, new int[][]{new int[]{-1, 1, 2, 30, 40, 50}, new int[]{1, 2, 100, 113}, new int[]{-1, 30, 40, 50}} }; for (int[][] a : arrays) assertArrayEquals(a[2], U.difference(a[0], a[0].length, a[1], a[1].length)); assertArrayEquals(new int[]{1, 2}, U.difference(new int[]{1, 2, 30, 40, 50}, 3, new int[]{30, 40}, 2)); assertArrayEquals(EMPTY, U.difference(new int[]{1, 2, 30, 40, 50}, 0, new int[]{30, 40}, 2)); assertArrayEquals(new int[]{1, 2, 40}, U.difference(new int[]{1, 2, 30, 40, 50}, 4, new int[]{30, 40}, 1)); assertArrayEquals(new int[]{1, 2, 30, 40}, U.difference(new int[]{1, 2, 30, 40, 50}, 4, new int[]{30, 40}, 0)); } /** * */ public void testCopyIfExceeded() { int[][] arrays = new int[][]{new int[]{13, 14, 17, 11}, new int[]{13}, EMPTY}; for (int[] a : arrays) { int[] b = Arrays.copyOf(a, a.length); assertEquals(a, U.copyIfExceeded(a, a.length)); assertArrayEquals(b, U.copyIfExceeded(a, a.length)); for (int j = 0; j < a.length - 1; j++) assertArrayEquals(Arrays.copyOf(b, j), U.copyIfExceeded(a, j)); } } /** * */ public void testIsIncreasingArray() { assertTrue(U.isIncreasingArray(EMPTY, 0)); assertTrue(U.isIncreasingArray(new int[]{Integer.MIN_VALUE, -10, 1, 13, Integer.MAX_VALUE}, 5)); assertTrue(U.isIncreasingArray(new int[]{1, 2, 3, -1, 5}, 0)); assertTrue(U.isIncreasingArray(new int[]{1, 2, 3, -1, 5}, 3)); assertFalse(U.isIncreasingArray(new int[]{1, 2, 3, -1, 5}, 4)); assertFalse(U.isIncreasingArray(new int[]{1, 2, 3, -1, 5}, 5)); assertFalse(U.isIncreasingArray(new int[]{1, 2, 3, 3, 5}, 4)); assertTrue(U.isIncreasingArray(new int[]{1, -1}, 1)); assertFalse(U.isIncreasingArray(new int[]{1, -1}, 2)); assertTrue(U.isIncreasingArray(new int[]{13, 13, 13}, 1)); assertFalse(U.isIncreasingArray(new int[]{13, 13, 13}, 2)); assertFalse(U.isIncreasingArray(new int[]{13, 13, 13}, 3)); } /** * */ public void testIsNonDecreasingArray() { assertTrue(U.isNonDecreasingArray(EMPTY, 0)); assertTrue(U.isNonDecreasingArray(new int[]{Integer.MIN_VALUE, -10, 1, 13, Integer.MAX_VALUE}, 5)); assertTrue(U.isNonDecreasingArray(new int[]{1, 2, 3, -1, 5}, 0)); assertTrue(U.isNonDecreasingArray(new int[]{1, 2, 3, -1, 5}, 3)); assertFalse(U.isNonDecreasingArray(new int[]{1, 2, 3, -1, 5}, 4)); assertFalse(U.isNonDecreasingArray(new int[]{1, 2, 3, -1, 5}, 5)); assertTrue(U.isNonDecreasingArray(new int[]{1, 2, 3, 3, 5}, 4)); assertTrue(U.isNonDecreasingArray(new int[]{1, -1}, 1)); assertFalse(U.isNonDecreasingArray(new int[]{1, -1}, 2)); assertTrue(U.isNonDecreasingArray(new int[]{13, 13, 13}, 1)); assertTrue(U.isNonDecreasingArray(new int[]{13, 13, 13}, 2)); assertTrue(U.isNonDecreasingArray(new int[]{13, 13, 13}, 3)); } /** * Test InetAddress Comparator. */ public void testInetAddressesComparator() { List<InetSocketAddress> ips = new ArrayList<InetSocketAddress>() { { add(new InetSocketAddress("127.0.0.1", 1)); add(new InetSocketAddress("10.0.0.1", 1)); add(new InetSocketAddress("172.16.0.1", 1)); add(new InetSocketAddress("192.168.0.1", 1)); add(new InetSocketAddress("100.0.0.1", 1)); add(new InetSocketAddress("XXX", 1)); } }; Collections.sort(ips, U.inetAddressesComparator(true)); assertTrue(ips.get(0).getAddress().isLoopbackAddress()); assertTrue(ips.get(ips.size() - 1).isUnresolved()); Collections.sort(ips, U.inetAddressesComparator(false)); assertTrue(ips.get(ips.size() - 2).getAddress().isLoopbackAddress()); assertTrue(ips.get(ips.size() - 1).isUnresolved()); } public void testMD5Calculation() throws Exception { String md5 = U.calculateMD5(new ByteArrayInputStream("Corrupted information.".getBytes())); assertEquals("d7dbe555be2eee7fa658299850169fa1", md5); } /** * @throws Exception If failed. */ public void testResolveLocalAddresses() throws Exception { InetAddress inetAddress = InetAddress.getByName("0.0.0.0"); IgniteBiTuple<Collection<String>, Collection<String>> addrs = U.resolveLocalAddresses(inetAddress); Collection<String> hostNames = addrs.get2(); assertFalse(hostNames.contains(null)); assertFalse(hostNames.contains("")); assertFalse(hostNames.contains("127.0.0.1")); assertFalse(F.exist(hostNames, new IgnitePredicate<String>() { @Override public boolean apply(String hostName) { return hostName.contains("localhost") || hostName.contains("0:0:0:0:0:0:0:1"); } })); } /** * */ public void testToSocketAddressesNoDuplicates() { Collection<String> addrs = new ArrayList<>(); addrs.add("127.0.0.1"); addrs.add("localhost"); Collection<String> hostNames = new ArrayList<>(); int port = 1234; assertEquals(1, U.toSocketAddresses(addrs, hostNames, port).size()); } /** * Composes a test String of given tlength. * * @param len The length. * @return The String. */ private static String composeString(int len) { StringBuilder sb = new StringBuilder(); for (int i=0; i<len; i++) sb.append((char)i); String x = sb.toString(); assertEquals(len, x.length()); return x; } /** * Writes the given String to a DataOutput, reads from DataInput, then checks if they are the same. * * @param s0 The String to check serialization for. * @throws Exception On error. */ private static void checkString(String s0) throws Exception { ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutput dout = new DataOutputStream(baos); IgfsUtils.writeUTF(dout, s0); DataInput din = new DataInputStream(new ByteArrayInputStream(baos.toByteArray())); String s1 = IgfsUtils.readUTF(din); assertEquals(s0, s1); } /** * Tests long String serialization/deserialization, * * @throws Exception If failed. */ public void testLongStringWriteUTF() throws Exception { checkString(null); checkString(""); checkString("a"); checkString("Quick brown fox jumps over the lazy dog."); String x = composeString(0xFFFF / 4 - 1); checkString(x); x = composeString(0xFFFF / 4); checkString(x); x = composeString(0xFFFF / 4 + 1); checkString(x); } /** * */ public void testCeilPow2() throws Exception { assertEquals(2, U.ceilPow2(2)); assertEquals(4, U.ceilPow2(3)); assertEquals(4, U.ceilPow2(4)); assertEquals(8, U.ceilPow2(5)); assertEquals(8, U.ceilPow2(6)); assertEquals(8, U.ceilPow2(7)); assertEquals(8, U.ceilPow2(8)); assertEquals(16, U.ceilPow2(9)); assertEquals(1 << 15, U.ceilPow2((1 << 15) - 1)); assertEquals(1 << 15, U.ceilPow2(1 << 15)); assertEquals(1 << 16, U.ceilPow2((1 << 15) + 1)); assertEquals(1 << 26, U.ceilPow2((1 << 26) - 100)); assertEquals(1 << 26, U.ceilPow2(1 << 26)); assertEquals(1 << 27, U.ceilPow2((1 << 26) + 100)); for (int i = (int)Math.pow(2, 30); i < Integer.MAX_VALUE; i++) assertEquals((int)Math.pow(2, 30), U.ceilPow2(i)); for (int i = Integer.MIN_VALUE; i < 0; i++) assertEquals(0, U.ceilPow2(i)); } /** * */ public void testIsOldestNodeVersionAtLeast() { IgniteProductVersion v240 = IgniteProductVersion.fromString("2.4.0"); IgniteProductVersion v241 = IgniteProductVersion.fromString("2.4.1"); IgniteProductVersion v250 = IgniteProductVersion.fromString("2.5.0"); IgniteProductVersion v250ts = IgniteProductVersion.fromString("2.5.0-b1-3"); TcpDiscoveryNode node240 = new TcpDiscoveryNode(); node240.version(v240); TcpDiscoveryNode node241 = new TcpDiscoveryNode(); node241.version(v241); TcpDiscoveryNode node250 = new TcpDiscoveryNode(); node250.version(v250); TcpDiscoveryNode node250ts = new TcpDiscoveryNode(); node250ts.version(v250ts); assertTrue(U.isOldestNodeVersionAtLeast(v240, Arrays.asList(node240, node241, node250, node250ts))); assertFalse(U.isOldestNodeVersionAtLeast(v241, Arrays.asList(node240, node241, node250, node250ts))); assertTrue(U.isOldestNodeVersionAtLeast(v250, Arrays.asList(node250, node250ts))); assertTrue(U.isOldestNodeVersionAtLeast(v250ts, Arrays.asList(node250, node250ts))); } /** * Test enum. */ private enum TestEnum { E1, E2, E3 } @Documented @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.TYPE) private @interface Ann1 {} @Documented @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.TYPE) private @interface Ann2 {} private static class A1 implements I3, I5 {} private static class A2 extends A1 {} private static class A3 implements I5 {} @Ann1 private interface I1 {} private interface I2 extends I1 {} private interface I3 extends I2 {} @Ann2 private interface I4 {} private interface I5 extends I4 {} }
/* * Copyright 2015 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.reteoo.integrationtests; import org.drools.compiler.Address; import org.drools.compiler.Cheese; import org.drools.compiler.DomainObject; import org.drools.compiler.InsertedObject; import org.drools.compiler.Interval; import org.drools.compiler.Person; import org.drools.compiler.Worker; import org.drools.compiler.integrationtests.SerializationHelper; import org.drools.core.base.ClassObjectType; import org.drools.core.base.DroolsQuery; import org.drools.core.common.InternalFactHandle; import org.drools.core.impl.StatefulKnowledgeSessionImpl; import org.drools.core.reteoo.EntryPointNode; import org.drools.core.reteoo.ObjectTypeNode; import org.drools.core.reteoo.ObjectTypeNode.ObjectTypeNodeMemory; import org.drools.core.runtime.rule.impl.FlatQueryResults; import org.drools.core.spi.ObjectType; import org.junit.Test; import org.kie.api.definition.rule.Rule; import org.kie.api.runtime.conf.QueryListenerOption; import org.kie.api.runtime.rule.FactHandle; import org.kie.api.runtime.rule.LiveQuery; import org.kie.api.runtime.rule.QueryResults; import org.kie.api.runtime.rule.QueryResultsRow; import org.kie.api.runtime.rule.Row; import org.kie.api.runtime.rule.Variable; import org.kie.api.runtime.rule.ViewChangedEventListener; import org.kie.internal.KnowledgeBase; import org.kie.internal.builder.conf.RuleEngineOption; import org.kie.internal.runtime.StatefulKnowledgeSession; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; public class QueryTest extends CommonTestMethodBase { @Test public void testQuery() throws Exception { KnowledgeBase kbase = SerializationHelper.serializeObject(loadKnowledgeBase("simple_query_test.drl")); StatefulKnowledgeSession session = createKnowledgeSession( kbase ); final Cheese stilton = new Cheese( "stinky", 5 ); session.insert( stilton ); session = SerializationHelper.getSerialisedStatefulKnowledgeSession(session, true); org.kie.api.runtime.rule.QueryResults results = session.getQueryResults( "simple query" ); assertEquals( 1, results.size() ); } @Test public void testQueryRemoval() throws Exception { KnowledgeBase kbase = SerializationHelper.serializeObject(loadKnowledgeBase("simple_query_test.drl")); StatefulKnowledgeSession session = createKnowledgeSession( kbase ); final Cheese stilton = new Cheese( "stinky", 5 ); session.insert( stilton ); session = SerializationHelper.getSerialisedStatefulKnowledgeSession(session, true); org.kie.api.runtime.rule.QueryResults results = session.getQueryResults( "simple query" ); assertEquals( 1, results.size() ); Rule rule = kbase.getKnowledgePackage( "org.drools.compiler.test" ).getRules().iterator().next(); assertEquals( "simple query", rule.getName()); kbase.removeQuery( "org.drools.compiler.test", "simple query" ); assertTrue( kbase.getKnowledgePackage( "org.drools.compiler.test" ).getRules().isEmpty() ); try { results = session.getQueryResults( "simple query" ); } catch ( Exception e ) { assertTrue( e.getMessage().endsWith( "does not exist") ); } } @Test public void testQuery2() throws Exception { KnowledgeBase kbase = SerializationHelper.serializeObject(loadKnowledgeBase("test_Query.drl")); StatefulKnowledgeSession session = createKnowledgeSession( kbase ); session.fireAllRules(); org.kie.api.runtime.rule.QueryResults results = session.getQueryResults( "assertedobjquery" ); assertEquals( 1, results.size() ); assertEquals( new InsertedObject( "value1" ), ((InternalFactHandle) results.iterator().next().getFactHandle( "assertedobj" )).getObject() ); } @Test public void testQueryWithParams() throws Exception { KnowledgeBase kbase = SerializationHelper.serializeObject(loadKnowledgeBase("test_QueryWithParams.drl")); StatefulKnowledgeSession session = createKnowledgeSession( kbase ); session.fireAllRules(); org.kie.api.runtime.rule.QueryResults results = session.getQueryResults( "assertedobjquery", new String[]{"value1"} ); assertEquals( 1, results.size() ); assertEquals( new InsertedObject( "value1" ), ((InternalFactHandle) results.iterator().next().getFactHandle( "assertedobj" )).getObject() ); results = session.getQueryResults( "assertedobjquery", new String[]{"value3"} ); assertEquals( 0, results.size() ); results = session.getQueryResults( "assertedobjquery2", new String[]{null, "value2"} ); assertEquals( 1, results.size() ); assertEquals( new InsertedObject( "value2" ), ((InternalFactHandle) results.iterator().next().getFactHandle( "assertedobj" )).getObject() ); results = session.getQueryResults( "assertedobjquery2", new String[]{"value3", "value2"} ); assertEquals( 1, results.size() ); assertEquals( new InsertedObject( "value2" ), ((InternalFactHandle) results.iterator().next().getFactHandle( "assertedobj" )).getObject() ); } @Test public void testQueryWithMultipleResultsOnKnowledgeApi() throws Exception { String str = ""; str += "package org.drools.compiler.test \n"; str += "import org.drools.compiler.Cheese \n"; str += "query cheeses \n"; str += " stilton : Cheese(type == 'stilton') \n"; str += " cheddar : Cheese(type == 'cheddar', price == stilton.price) \n"; str += "end\n"; KnowledgeBase kbase = SerializationHelper.serializeObject(loadKnowledgeBaseFromString(str)); StatefulKnowledgeSession session = createKnowledgeSession( kbase ); Cheese stilton1 = new Cheese( "stilton", 1 ); Cheese cheddar1 = new Cheese( "cheddar", 1 ); Cheese stilton2 = new Cheese( "stilton", 2 ); Cheese cheddar2 = new Cheese( "cheddar", 2 ); Cheese stilton3 = new Cheese( "stilton", 3 ); Cheese cheddar3 = new Cheese( "cheddar", 3 ); Set set = new HashSet(); List list = new ArrayList(); list.add( stilton1 ); list.add( cheddar1 ); set.add( list ); list = new ArrayList(); list.add( stilton2 ); list.add( cheddar2 ); set.add( list ); list = new ArrayList(); list.add( stilton3 ); list.add( cheddar3 ); set.add( list ); session.insert( stilton1 ); session.insert( stilton2 ); session.insert( stilton3 ); session.insert( cheddar1 ); session.insert( cheddar2 ); session.insert( cheddar3 ); org.kie.api.runtime.rule.QueryResults results = session.getQueryResults( "cheeses" ); assertEquals( 3, results.size() ); assertEquals( 2, results.getIdentifiers().length ); Set newSet = new HashSet(); for ( org.kie.api.runtime.rule.QueryResultsRow result : results ) { list = new ArrayList(); list.add( result.get( "stilton" ) ); list.add( result.get( "cheddar" ) ); newSet.add( list ); } assertEquals( set, newSet ); FlatQueryResults flatResults = new FlatQueryResults( ((StatefulKnowledgeSessionImpl) session).getQueryResults( "cheeses" ) ); assertEquals( 3, flatResults.size() ); assertEquals( 2, flatResults.getIdentifiers().length ); newSet = new HashSet(); for ( org.kie.api.runtime.rule.QueryResultsRow result : flatResults ) { list = new ArrayList(); list.add( result.get( "stilton" ) ); list.add( result.get( "cheddar" ) ); newSet.add( list ); } assertEquals( set, newSet ); } @Test public void testTwoQuerries() throws Exception { // @see JBRULES-410 More than one Query definition causes an incorrect // Rete network to be built. KnowledgeBase kbase = SerializationHelper.serializeObject(loadKnowledgeBase("test_TwoQuerries.drl")); StatefulKnowledgeSession session = createKnowledgeSession( kbase ); final Cheese stilton = new Cheese( "stinky", 5 ); session.insert( stilton ); final Person per1 = new Person( "stinker", "smelly feet", 70 ); final Person per2 = new Person( "skunky", "smelly armpits", 40 ); session.insert( per1 ); session.insert( per2 ); org.kie.api.runtime.rule.QueryResults results = session.getQueryResults( "find stinky cheeses" ); assertEquals( 1, results.size() ); results = session.getQueryResults( "find pensioners" ); assertEquals( 1, results.size() ); } @Test public void testDoubleQueryWithExists() throws Exception { KnowledgeBase kbase = SerializationHelper.serializeObject(loadKnowledgeBase("test_DoubleQueryWithExists.drl")); StatefulKnowledgeSession session = createKnowledgeSession( kbase ); final Person p1 = new Person( "p1", "stilton", 20 ); p1.setStatus( "europe" ); final FactHandle c1FactHandle = session.insert( p1 ); final Person p2 = new Person( "p2", "stilton", 30 ); p2.setStatus( "europe" ); final FactHandle c2FactHandle = session.insert( p2 ); final Person p3 = new Person( "p3", "stilton", 40 ); p3.setStatus( "europe" ); final FactHandle c3FactHandle = session.insert( p3 ); session.fireAllRules(); QueryResults results = session.getQueryResults( "2 persons with the same status" ); assertEquals( 2, results.size() ); // europe=[ 1, 2 ], america=[ 3 ] p3.setStatus( "america" ); session.update( c3FactHandle, p3 ); session.fireAllRules(); results = session.getQueryResults( "2 persons with the same status" ); assertEquals( 1, results.size() ); // europe=[ 1 ], america=[ 2, 3 ] p2.setStatus( "america" ); session.update( c2FactHandle, p2 ); session.fireAllRules(); results = session.getQueryResults( "2 persons with the same status" ); assertEquals( 1, results.size() ); // europe=[ ], america=[ 1, 2, 3 ] p1.setStatus( "america" ); session.update( c1FactHandle, p1 ); session.fireAllRules(); results = session.getQueryResults( "2 persons with the same status" ); assertEquals( 2, results.size() ); // europe=[ 2 ], america=[ 1, 3 ] p2.setStatus( "europe" ); session.update( c2FactHandle, p2 ); session.fireAllRules(); results = session.getQueryResults( "2 persons with the same status" ); assertEquals( 1, results.size() ); // europe=[ 1, 2 ], america=[ 3 ] p1.setStatus( "europe" ); session.update( c1FactHandle, p1 ); session.fireAllRules(); results = session.getQueryResults( "2 persons with the same status" ); assertEquals( 1, results.size() ); // europe=[ 1, 2, 3 ], america=[ ] p3.setStatus( "europe" ); session.update( c3FactHandle, p3 ); session.fireAllRules(); results = session.getQueryResults( "2 persons with the same status" ); assertEquals( 2, results.size() ); } @Test public void testQueryWithCollect() throws Exception { KnowledgeBase kbase = SerializationHelper.serializeObject(loadKnowledgeBase("test_Query.drl")); StatefulKnowledgeSession session = createKnowledgeSession( kbase ); session.fireAllRules(); org.kie.api.runtime.rule.QueryResults results = session.getQueryResults( "collect objects" ); assertEquals( 1, results.size() ); final QueryResultsRow row = results.iterator().next(); final List list = (List) row.get( "$list" ); assertEquals( 2, list.size() ); } @Test public void testDroolsQueryCleanup() throws Exception { KnowledgeBase kbase = SerializationHelper.serializeObject(loadKnowledgeBase("test_QueryMemoryLeak.drl")); StatefulKnowledgeSession session = createKnowledgeSession( kbase ); StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession(); String workerId = "B1234"; Worker worker = new Worker(); worker.setId( workerId ); FactHandle handle = ksession.insert( worker ); ksession.fireAllRules(); assertNotNull( handle ); Object retractedWorker = null; for ( int i = 0; i < 100; i++ ) { retractedWorker = (Object) ksession.getQueryResults( "getWorker", new Object[]{workerId} ); } assertNotNull( retractedWorker ); StatefulKnowledgeSessionImpl sessionImpl = (StatefulKnowledgeSessionImpl) ksession; Collection<EntryPointNode> entryPointNodes = sessionImpl.getKnowledgeBase().getRete().getEntryPointNodes().values(); EntryPointNode defaultEntryPointNode = null; for ( EntryPointNode epNode : entryPointNodes ) { if ( epNode.getEntryPoint().getEntryPointId().equals( "DEFAULT" ) ) { defaultEntryPointNode = epNode; break; } } assertNotNull( defaultEntryPointNode ); Map<ObjectType, ObjectTypeNode> obnodes = defaultEntryPointNode.getObjectTypeNodes(); ObjectType key = new ClassObjectType( DroolsQuery.class ); ObjectTypeNode droolsQueryNode = obnodes.get( key ); Iterator<InternalFactHandle> it = ((ObjectTypeNodeMemory) sessionImpl.getNodeMemory( droolsQueryNode )).iterator(); assertFalse(it.hasNext()); } @Test public void testQueriesWithVariableUnification() throws Exception { String str = ""; str += "package org.drools.compiler.test \n"; str += "import org.drools.compiler.Person \n"; str += "query peeps( String $name, String $likes, int $age ) \n"; str += " $p : Person( $name := name, $likes := likes, $age := age ) \n"; str += "end\n"; KnowledgeBase kbase = SerializationHelper.serializeObject(loadKnowledgeBaseFromString(str)); StatefulKnowledgeSession ksession = createKnowledgeSession( kbase ); Person p1 = new Person( "darth", "stilton", 100 ); Person p2 = new Person( "yoda", "stilton", 300 ); Person p3 = new Person( "luke", "brie", 300 ); Person p4 = new Person( "bobba", "cheddar", 300 ); ksession.insert( p1 ); ksession.insert( p2 ); ksession.insert( p3 ); ksession.insert( p4 ); org.kie.api.runtime.rule.QueryResults results = ksession.getQueryResults( "peeps", new Object[]{Variable.v, Variable.v, Variable.v} ); assertEquals( 4, results.size() ); List names = new ArrayList(); for ( org.kie.api.runtime.rule.QueryResultsRow row : results ) { names.add( ((Person) row.get( "$p" )).getName() ); } assertEquals( 4, names.size() ); assertTrue( names.contains( "luke" ) ); assertTrue( names.contains( "yoda" ) ); assertTrue( names.contains( "bobba" ) ); assertTrue( names.contains( "darth" ) ); results = ksession.getQueryResults( "peeps", new Object[]{Variable.v, Variable.v, 300} ); assertEquals( 3, results.size() ); names = new ArrayList(); for ( org.kie.api.runtime.rule.QueryResultsRow row : results ) { names.add( ((Person) row.get( "$p" )).getName() ); } assertEquals( 3, names.size() ); assertTrue( names.contains( "luke" ) ); assertTrue( names.contains( "yoda" ) ); assertTrue( names.contains( "bobba" ) ); results = ksession.getQueryResults( "peeps", new Object[]{Variable.v, "stilton", 300} ); assertEquals( 1, results.size() ); names = new ArrayList(); for ( org.kie.api.runtime.rule.QueryResultsRow row : results ) { names.add( ((Person) row.get( "$p" )).getName() ); } assertEquals( 1, names.size() ); assertTrue( names.contains( "yoda" ) ); results = ksession.getQueryResults( "peeps", new Object[]{Variable.v, "stilton", Variable.v} ); assertEquals( 2, results.size() ); names = new ArrayList(); for ( org.kie.api.runtime.rule.QueryResultsRow row : results ) { names.add( ((Person) row.get( "$p" )).getName() ); } assertEquals( 2, names.size() ); assertTrue( names.contains( "yoda" ) ); assertTrue( names.contains( "darth" ) ); results = ksession.getQueryResults( "peeps", new Object[]{"darth", Variable.v, Variable.v} ); assertEquals( 1, results.size() ); names = new ArrayList(); for ( org.kie.api.runtime.rule.QueryResultsRow row : results ) { names.add( ((Person) row.get( "$p" )).getName() ); } assertEquals( 1, names.size() ); assertTrue( names.contains( "darth" ) ); } @Test public void testQueriesWithVariableUnificationOnPatterns() throws Exception { String str = ""; str += "package org.drools.compiler.test \n"; str += "import org.drools.compiler.Person \n"; str += "query peeps( Person $p, String $name, String $likes, int $age ) \n"; str += " $p := Person( $name := name, $likes := likes, $age := age ) \n"; str += "end\n"; KnowledgeBase kbase = SerializationHelper.serializeObject(loadKnowledgeBaseFromString(str)); StatefulKnowledgeSession ksession = createKnowledgeSession( kbase ); Person p1 = new Person( "darth", "stilton", 100 ); Person p2 = new Person( "yoda", "stilton", 300 ); Person p3 = new Person( "luke", "brie", 300 ); Person p4 = new Person( "bobba", "cheddar", 300 ); ksession.insert( p1 ); ksession.insert( p2 ); ksession.insert( p3 ); ksession.insert( p4 ); org.kie.api.runtime.rule.QueryResults results = ksession.getQueryResults( "peeps", new Object[]{Variable.v, Variable.v, Variable.v, Variable.v} ); assertEquals( 4, results.size() ); List names = new ArrayList(); for ( org.kie.api.runtime.rule.QueryResultsRow row : results ) { names.add( ((Person) row.get( "$p" )).getName() ); } assertEquals( 4, names.size() ); assertTrue( names.contains( "luke" ) ); assertTrue( names.contains( "yoda" ) ); assertTrue( names.contains( "bobba" ) ); assertTrue( names.contains( "darth" ) ); results = ksession.getQueryResults( "peeps", new Object[]{p1, Variable.v, Variable.v, Variable.v} ); assertEquals( 1, results.size() ); names = new ArrayList(); for ( org.kie.api.runtime.rule.QueryResultsRow row : results ) { names.add( ((Person) row.get( "$p" )).getName() ); } assertEquals( 1, names.size() ); assertTrue( names.contains( "darth" ) ); } @Test public void testQueriesWithVariableUnificationOnNestedFields() throws Exception { String str = ""; str += "package org.drools.compiler.test \n"; str += "import org.drools.compiler.Person \n"; str += "query peeps( String $name, String $likes, String $street) \n"; str += " $p : Person( $name := name, $likes := likes, $street := address.street ) \n"; str += "end\n"; KnowledgeBase kbase = SerializationHelper.serializeObject(loadKnowledgeBaseFromString(str)); StatefulKnowledgeSession ksession = createKnowledgeSession( kbase ); Person p1 = new Person( "darth", "stilton", 100 ); p1.setAddress( new Address( "s1" ) ); Person p2 = new Person( "yoda", "stilton", 300 ); p2.setAddress( new Address( "s2" ) ); ksession.insert( p1 ); ksession.insert( p2 ); org.kie.api.runtime.rule.QueryResults results = ksession.getQueryResults( "peeps", new Object[]{Variable.v, Variable.v, Variable.v} ); assertEquals( 2, results.size() ); List names = new ArrayList(); for ( org.kie.api.runtime.rule.QueryResultsRow row : results ) { names.add( ((Person) row.get( "$p" )).getName() ); } assertTrue( names.contains( "yoda" ) ); assertTrue( names.contains( "darth" ) ); results = ksession.getQueryResults( "peeps", new Object[]{Variable.v, Variable.v, "s1"} ); assertEquals( 1, results.size() ); names = new ArrayList(); for ( org.kie.api.runtime.rule.QueryResultsRow row : results ) { names.add( ((Person) row.get( "$p" )).getName() ); } assertTrue( names.contains( "darth" ) ); } @Test public void testOpenQuery() throws Exception { if( CommonTestMethodBase.phreak == RuleEngineOption.RETEOO ) { return; //Disbaled due to phreak, as tests is order specific } String str = ""; str += "package org.drools.compiler.test \n"; str += "import org.drools.compiler.Cheese \n"; str += "query cheeses(String $type1, String $type2) \n"; str += " stilton : Cheese(type == $type1, $sprice : price) \n"; str += " cheddar : Cheese(type == $type2, $cprice : price == stilton.price) \n"; str += "end\n"; KnowledgeBase kbase = SerializationHelper.serializeObject(loadKnowledgeBaseFromString(str)); StatefulKnowledgeSession ksession = createKnowledgeSession( kbase ); Cheese stilton1 = new Cheese( "stilton", 1 ); Cheese cheddar1 = new Cheese( "cheddar", 1 ); Cheese stilton2 = new Cheese( "stilton", 2 ); Cheese cheddar2 = new Cheese( "cheddar", 2 ); Cheese stilton3 = new Cheese( "stilton", 3 ); Cheese cheddar3 = new Cheese( "cheddar", 3 ); FactHandle s1Fh = ksession.insert( stilton1 ); ksession.insert( stilton2 ); ksession.insert( stilton3 ); ksession.insert( cheddar1 ); ksession.insert( cheddar2 ); FactHandle c3Fh = ksession.insert( cheddar3 ); final List<Object[]> updated = new ArrayList<Object[]>(); final List<Object[]> removed = new ArrayList<Object[]>(); final List<Object[]> added = new ArrayList<Object[]>(); ViewChangedEventListener listener = new ViewChangedEventListener() { public void rowUpdated( Row row ) { Object[] array = new Object[6]; array[0] = row.get( "stilton" ); array[1] = row.get( "cheddar" ); array[2] = row.get( "$sprice" ); array[3] = row.get( "$cprice" ); array[4] = row.get( "$type1" ); array[5] = row.get( "$type2" ); updated.add( array ); } public void rowDeleted( Row row ) { Object[] array = new Object[6]; array[0] = row.get( "stilton" ); array[1] = row.get( "cheddar" ); array[2] = row.get( "$sprice" ); array[3] = row.get( "$cprice" ); array[4] = row.get( "$type1" ); array[5] = row.get( "$type2" ); removed.add( array ); } public void rowInserted( Row row ) { Object[] array = new Object[6]; array[0] = row.get( "stilton" ); array[1] = row.get( "cheddar" ); array[2] = row.get( "$sprice" ); array[3] = row.get( "$cprice" ); array[4] = row.get( "$type1" ); array[5] = row.get( "$type2" ); added.add( array ); } }; // Open the LiveQuery LiveQuery query = ksession.openLiveQuery( "cheeses", new Object[]{"stilton", "cheddar"}, listener ); ksession.fireAllRules(); // Assert that on opening we have three rows added assertEquals( 3, added.size() ); assertEquals( 0, removed.size() ); assertEquals( 0, updated.size() ); // Assert that the identifiers where retrievable assertSame( stilton1, added.get( 2 )[0] ); assertSame( cheddar1, added.get( 2 )[1] ); assertEquals( 1, added.get( 2 )[2] ); assertEquals( 1, added.get( 2 )[3] ); assertEquals( "stilton", added.get( 2 )[4] ); assertEquals( "cheddar", added.get( 2 )[5] ); // And that we have correct values from those rows assertEquals( 3, added.get( 0 )[3] ); assertEquals( 2, added.get( 1 )[3] ); assertEquals( 1, added.get( 2 )[3] ); // Do an update that causes a match to become untrue, thus triggering a removed cheddar3.setPrice( 4 ); ksession.update( c3Fh, cheddar3 ); ksession.fireAllRules(); assertEquals( 3, added.size() ); assertEquals( 1, removed.size() ); assertEquals( 0, updated.size() ); assertEquals( 4, removed.get( 0 )[3] ); // Now make that partial true again, and thus another added cheddar3.setPrice( 3 ); ksession.update( c3Fh, cheddar3 ); ksession.fireAllRules(); assertEquals( 4, added.size() ); assertEquals( 1, removed.size() ); assertEquals( 0, updated.size() ); assertEquals( 3, added.get( 3 )[3] ); // check a standard update cheddar3.setOldPrice( 0 ); ksession.update( c3Fh, cheddar3 ); ksession.fireAllRules(); assertEquals( 4, added.size() ); assertEquals( 1, removed.size() ); assertEquals( 1, updated.size() ); assertEquals( 3, updated.get( 0 )[3] ); // Check a standard retract ksession.retract( s1Fh ); ksession.fireAllRules(); assertEquals( 4, added.size() ); assertEquals( 2, removed.size() ); assertEquals( 1, updated.size() ); assertEquals( 1, removed.get( 1 )[3] ); // Close the query, we should get removed events for each row query.close(); ksession.fireAllRules(); assertEquals( 4, added.size() ); assertEquals( 4, removed.size() ); assertEquals( 1, updated.size() ); assertEquals( 2, removed.get( 3 )[3] ); assertEquals( 3, removed.get( 2 )[3] ); // Check that updates no longer have any impact. ksession.update( c3Fh, cheddar3 ); assertEquals( 4, added.size() ); assertEquals( 4, removed.size() ); assertEquals( 1, updated.size() ); } @Test public void testStandardQueryListener() throws IOException, ClassNotFoundException { runQueryListenerTest( QueryListenerOption.STANDARD ); } @Test public void testNonCloningQueryListener() throws IOException, ClassNotFoundException { runQueryListenerTest( QueryListenerOption.LIGHTWEIGHT ); } public void runQueryListenerTest( QueryListenerOption option ) throws IOException, ClassNotFoundException { String str = ""; str += "package org.drools.compiler.integrationtests\n"; str += "import " + Cheese.class.getCanonicalName() + " \n"; str += "query cheeses(String $type) \n"; str += " $cheese : Cheese(type == $type) \n"; str += "end\n"; KnowledgeBase kbase = SerializationHelper.serializeObject(loadKnowledgeBaseFromString(str)); StatefulKnowledgeSession ksession = createKnowledgeSession( kbase, option ); // insert some data into the session for ( int i = 0; i < 10000; i++ ) { ksession.insert( new Cheese( i % 2 == 0 ? "stilton" : "brie" ) ); } // query the session List<Cheese> cheeses; for ( int i = 0; i < 100; i++ ) { org.kie.api.runtime.rule.QueryResults queryResults = ksession.getQueryResults( "cheeses", new Object[]{"stilton"} ); cheeses = new ArrayList<Cheese>(); for ( QueryResultsRow row : queryResults ) { cheeses.add( (Cheese) row.get( "$cheese" ) ); } assertEquals( 5000, cheeses.size() ); } } @Test public void testQueryWithEval() throws IOException, ClassNotFoundException { // [Regression in 5.2.0.M2]: NPE during rule evaluation on MVELPredicateExpression.evaluate(MVELPredicateExpression.java:82) String str = "package org.drools.compiler.integrationtests\n" + "import " + DomainObject.class.getCanonicalName() + " \n" + "query queryWithEval \n" + " $do: DomainObject()\n" + " not DomainObject( id == $do.id, eval(interval.isAfter($do.getInterval())))\n" + "end"; KnowledgeBase kbase = SerializationHelper.serializeObject(loadKnowledgeBaseFromString(str)); StatefulKnowledgeSession ksession = createKnowledgeSession( kbase ); DomainObject do1 = new DomainObject(); do1.setId( 1 ); do1.setInterval( new Interval( 10, 5 ) ); DomainObject do2 = new DomainObject(); do2.setId( 1 ); do2.setInterval( new Interval( 20, 5 ) ); ksession.insert( do1 ); ksession.insert( do2 ); org.kie.api.runtime.rule.QueryResults results = ksession.getQueryResults( "queryWithEval" ); assertEquals( 1, results.size() ); assertEquals( do2, results.iterator().next().get( "$do" ) ); ksession.dispose(); } }
package org.keycloak.testsuite.cli; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FilterOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.InterruptedIOException; import java.io.OutputStream; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.concurrent.TimeUnit; /** * @author <a href="mailto:mstrukel@redhat.com">Marko Strukelj</a> */ public class KcRegExec { public static final String WORK_DIR = System.getProperty("user.dir") + "/target/containers/keycloak-client-tools"; public static final OsArch OS_ARCH = OsUtils.determineOSAndArch(); public static final String CMD = OS_ARCH.isWindows() ? "kcreg.bat" : "kcreg.sh"; private long waitTimeout = 30000; private Process process; private int exitCode = -1; private boolean logStreams = Boolean.valueOf(System.getProperty("cli.log.output", "true")); private boolean dumpStreams; private String workDir = WORK_DIR; private String env; private String argsLine; private ByteArrayOutputStream stdout = new ByteArrayOutputStream(); private ByteArrayOutputStream stderr = new ByteArrayOutputStream(); private InputStream stdin = new InteractiveInputStream(); private Throwable err; private KcRegExec(String workDir, String argsLine, InputStream stdin) { this(workDir, argsLine, null, stdin); } private KcRegExec(String workDir, String argsLine, String env, InputStream stdin) { if (workDir != null) { this.workDir = workDir; } this.argsLine = argsLine; this.env = env; if (stdin != null) { this.stdin = stdin; } } public static Builder newBuilder() { return new Builder(); } public static KcRegExec execute(String args) { return newBuilder() .argsLine(args) .execute(); } public void execute() { executeAsync(); if (err == null) { waitCompletion(); } } public void executeAsync() { try { if (OS_ARCH.isWindows()) { String cmd = (env != null ? "set " + env + " & " : "") + "bin\\" + CMD + " " + fixQuotes(argsLine); System.out.println("Executing: cmd.exe /c " + cmd); process = Runtime.getRuntime().exec(new String[]{"cmd.exe", "/c", cmd}, null, new File(workDir)); } else { String cmd = (env != null ? env + " " : "") + "bin/" + CMD + " " + argsLine; System.out.println("Executing: sh -c " + cmd); process = Runtime.getRuntime().exec(new String[]{"sh", "-c", cmd}, null, new File(workDir)); } new StreamReaderThread(process.getInputStream(), logStreams ? new LoggingOutputStream("STDOUT", stdout) : stdout) .start(); new StreamReaderThread(process.getErrorStream(), logStreams ? new LoggingOutputStream("STDERR", stderr) : stderr) .start(); new StreamReaderThread(stdin, process.getOutputStream()) .start(); } catch (Throwable t) { err = t; } } private String fixQuotes(String argsLine) { argsLine = argsLine + " "; argsLine = argsLine.replaceAll("\"", "\\\\\""); argsLine = argsLine.replaceAll(" '", " \""); argsLine = argsLine.replaceAll("' ", "\" "); return argsLine; } public void waitCompletion() { //if (stdin instanceof InteractiveInputStream) { // ((InteractiveInputStream) stdin).close(); //} try { if (process.waitFor(waitTimeout, TimeUnit.MILLISECONDS)) { exitCode = process.exitValue(); if (exitCode != 0) { dumpStreams = true; } } else { if (process.isAlive()) { process.destroyForcibly(); } throw new RuntimeException("Timeout after " + (waitTimeout / 1000) + " seconds."); } } catch (InterruptedException e) { dumpStreams = true; throw new RuntimeException("Interrupted ...", e); } catch (Throwable t) { dumpStreams = true; err = t; } finally { if (!logStreams && dumpStreams) try { System.out.println("STDOUT: "); copyStream(new ByteArrayInputStream(stdout.toByteArray()), System.out); System.out.println("STDERR: "); copyStream(new ByteArrayInputStream(stderr.toByteArray()), System.out); } catch (Exception ignored) { } } } public int exitCode() { return exitCode; } public Throwable error() { return err; } public InputStream stdout() { return new ByteArrayInputStream(stdout.toByteArray()); } public List<String> stdoutLines() { return parseStreamAsLines(new ByteArrayInputStream(stdout.toByteArray())); } public String stdoutString() { return new String(stdout.toByteArray()); } public InputStream stderr() { return new ByteArrayInputStream(stderr.toByteArray()); } public List<String> stderrLines() { return parseStreamAsLines(new ByteArrayInputStream(stderr.toByteArray())); } public String stderrString() { return new String(stderr.toByteArray()); } static List<String> parseStreamAsLines(InputStream stream) { List<String> lines = new ArrayList<>(); try { BufferedReader reader = new BufferedReader(new InputStreamReader(stream)); String line; while ((line = reader.readLine()) != null) { lines.add(line); } return lines; } catch (IOException e) { throw new RuntimeException("Unexpected I/O error", e); } } public void waitForStdout(String content) { long start = System.currentTimeMillis(); while (System.currentTimeMillis() - start < waitTimeout) { if (stdoutString().indexOf(content) != -1) { return; } try { Thread.sleep(10); } catch (InterruptedException e) { throw new RuntimeException("Interrupted ...", e); } } throw new RuntimeException("Timed while waiting for content to appear in stdout"); } public void sendToStdin(String s) { if (stdin instanceof InteractiveInputStream) { ((InteractiveInputStream) stdin).pushBytes(s.getBytes()); } else { throw new RuntimeException("Can't push to stdin - not interactive"); } } static class StreamReaderThread extends Thread { private InputStream is; private OutputStream os; StreamReaderThread(InputStream is, OutputStream os) { this.is = is; this.os = os; } public void run() { try { copyStream(is, os); } catch (IOException e) { throw new RuntimeException("Unexpected I/O error", e); } finally { try { os.close(); } catch (IOException ignored) { System.err.print("IGNORED: error while closing output stream: "); ignored.printStackTrace(); } } } } static void copyStream(InputStream is, OutputStream os) throws IOException { byte [] buf = new byte[8192]; try (InputStream iss = is) { int c; while ((c = iss.read(buf)) != -1) { os.write(buf, 0, c); os.flush(); } } } public static class Builder { private String workDir; private String argsLine; private InputStream stdin; private String env; private boolean dumpStreams; public Builder workDir(String path) { this.workDir = path; return this; } public Builder argsLine(String cmd) { this.argsLine = cmd; return this; } public Builder stdin(InputStream is) { this.stdin = is; return this; } public Builder env(String env) { this.env = env; return this; } public Builder fullStreamDump() { this.dumpStreams = true; return this; } public KcRegExec execute() { KcRegExec exe = new KcRegExec(workDir, argsLine, env, stdin); exe.dumpStreams = dumpStreams; exe.execute(); return exe; } public KcRegExec executeAsync() { KcRegExec exe = new KcRegExec(workDir, argsLine, env, stdin); exe.dumpStreams = dumpStreams; exe.executeAsync(); return exe; } } static class NullInputStream extends InputStream { @Override public int read() throws IOException { return -1; } } static class InteractiveInputStream extends InputStream { private LinkedList<Byte> queue = new LinkedList<>(); private Thread consumer; private boolean closed; @Override public int read(byte b[]) throws IOException { return read(b, 0, b.length); } @Override public synchronized int read(byte[] b, int off, int len) throws IOException { Byte current = null; int rc = 0; try { consumer = Thread.currentThread(); do { current = queue.poll(); if (current == null) { if (rc > 0) { return rc; } else { do { if (closed) { return -1; } wait(); } while ((current = queue.poll()) == null); } } b[off + rc] = current; rc++; } while (rc < len); } catch (InterruptedException e) { throw new InterruptedIOException("Signalled to exit"); } finally { consumer = null; } return rc; } @Override public long skip(long n) throws IOException { return super.skip(n); } @Override public int available() throws IOException { return super.available(); } @Override public synchronized void mark(int readlimit) { super.mark(readlimit); } @Override public synchronized void reset() throws IOException { super.reset(); } @Override public boolean markSupported() { return super.markSupported(); } @Override public synchronized int read() throws IOException { if (closed) { return -1; } // when input is available pass it on Byte current; try { consumer = Thread.currentThread(); while ((current = queue.poll()) == null) { wait(); if (closed) { return -1; } } } catch (InterruptedException e) { throw new InterruptedIOException("Signalled to exit"); } finally { consumer = null; } return current; } @Override public synchronized void close() { closed = true; new RuntimeException("IIS || close").printStackTrace(); if (consumer != null) { consumer.interrupt(); } } public synchronized void pushBytes(byte [] buff) { for (byte b : buff) { queue.add(b); } notify(); } } static class LoggingOutputStream extends FilterOutputStream { private ByteArrayOutputStream buffer = new ByteArrayOutputStream(); private String name; public LoggingOutputStream(String name, OutputStream os) { super(os); this.name = name; } @Override public void write(int b) throws IOException { super.write(b); if (b == 10) { log(); } else { buffer.write(b); } } @Override public void write(byte[] buf) throws IOException { write(buf, 0, buf.length); } @Override public void write(byte[] buf, int offs, int len) throws IOException { for (int i = 0; i < len; i++) { write(buf[offs+i]); } } @Override public void close() throws IOException { super.close(); if (buffer.size() > 0) { log(); } } private void log() { String log = new String(buffer.toByteArray()); buffer.reset(); System.out.println("[" + name + "] " + log); } } }
package hex.word2vec; import water.DKV; import water.Job; import water.Key; import water.MRTask; import water.fvec.Chunk; import water.parser.BufferedString; import hex.word2vec.Word2VecModel.*; import water.util.ArrayUtils; import water.util.IcedHashMap; import water.util.IcedHashMapGeneric; import water.util.IcedLong; import java.util.Iterator; public class WordVectorTrainer extends MRTask<WordVectorTrainer> { private static final int MAX_SENTENCE_LEN = 1000; private static final int EXP_TABLE_SIZE = 1000; private static final int MAX_EXP = 6; private static final float[] _expTable = calcExpTable(); private static final float LEARNING_RATE_MIN_FACTOR = 0.0001F; // learning rate stops decreasing at (initLearningRate * this factor) // Job private final Job<Word2VecModel> _job; // Params private final Word2Vec.WordModel _wordModel; private final int _wordVecSize, _windowSize, _epochs; private final float _initLearningRate; private final float _sentSampleRate; private final long _vocabWordCount; // Model IN private final Key<Vocabulary> _vocabKey; private final Key<WordCounts> _wordCountsKey; private final Key<HBWTree> _treeKey; private final long _prevTotalProcessedWords; // Model IN & OUT // _syn0 represents the matrix of synaptic weights connecting the input layer of the NN to the hidden layer, // similarly _syn1 corresponds to the weight matrix of the synapses connecting the hidden layer to the output layer // both matrices are represented in a 1D array, where M[i,j] == array[i * VEC_SIZE + j] float[] _syn0, _syn1; long _processedWords = 0L; // Node-Local (Shared) IcedLong _nodeProcessedWords; // mutable long, approximates the total number of words processed by this node private transient IcedHashMapGeneric<BufferedString, Integer> _vocab; private transient IcedHashMap<BufferedString, IcedLong> _wordCounts; private transient int[][] _HBWTCode; private transient int[][] _HBWTPoint; private float _curLearningRate; private long _seed = System.nanoTime(); public WordVectorTrainer(Job<Word2VecModel> job, Word2VecModelInfo input) { super(null); _job = job; _treeKey = input._treeKey; _vocabKey = input._vocabKey; _wordCountsKey = input._wordCountsKey; // Params _wordModel = input.getParams()._word_model; _wordVecSize = input.getParams()._vec_size; _windowSize = input.getParams()._window_size; _sentSampleRate = input.getParams()._sent_sample_rate; _epochs = input.getParams()._epochs; _initLearningRate = input.getParams()._init_learning_rate; _vocabWordCount = input._vocabWordCount; _prevTotalProcessedWords = input._totalProcessedWords; _syn0 = input._syn0; _syn1 = input._syn1; _curLearningRate = calcLearningRate(_initLearningRate, _epochs, _prevTotalProcessedWords, _vocabWordCount); } @Override protected void setupLocal() { _vocab = ((Vocabulary) DKV.getGet(_vocabKey))._data; _wordCounts = ((WordCounts) DKV.getGet(_wordCountsKey))._data; HBWTree t = DKV.getGet(_treeKey); _HBWTCode = t._code; _HBWTPoint = t._point; _nodeProcessedWords = new IcedLong(0L); } // Precompute the exp() table private static float[] calcExpTable() { float[] expTable = new float[EXP_TABLE_SIZE]; for (int i = 0; i < EXP_TABLE_SIZE; i++) { expTable[i] = (float) Math.exp((i / (float) EXP_TABLE_SIZE * 2 - 1) * MAX_EXP); expTable[i] = expTable[i] / (expTable[i] + 1); // Precompute f(x) = x / (x + 1) } return expTable; } @Override public void map(Chunk chk) { final int winSize = _windowSize, vecSize = _wordVecSize; float[] neu1 = new float[vecSize]; float[] neu1e = new float[vecSize]; ChunkSentenceIterator sentIter = new ChunkSentenceIterator(chk); int wordCount = 0; while (sentIter.hasNext()) { int sentLen = sentIter.nextLength(); int[] sentence = sentIter.next(); for (int sentIdx = 0; sentIdx < sentLen; sentIdx++) { int curWord = sentence[sentIdx]; int bagSize = 0; if (_wordModel == Word2Vec.WordModel.CBOW) { for (int j = 0; j < vecSize; j++) neu1[j] = 0; for (int j = 0; j < vecSize; j++) neu1e[j] = 0; } // for each item in the window (except curWord), update neu1 vals int winSizeMod = cheapRandInt(winSize); for (int winIdx = winSizeMod; winIdx < winSize * 2 + 1 - winSizeMod; winIdx++) { if (winIdx != winSize) { // skips curWord in sentence int winWordSentIdx = sentIdx - winSize + winIdx; if (winWordSentIdx < 0 || winWordSentIdx >= sentLen) continue; int winWord = sentence[winWordSentIdx]; if (_wordModel == Word2Vec.WordModel.SkipGram) skipGram(curWord, winWord, neu1e); else { // CBOW for (int j = 0; j < vecSize; j++) neu1[j] += _syn0[j + winWord * vecSize]; bagSize++; } } } // end for each item in the window if (_wordModel == Word2Vec.WordModel.CBOW && bagSize > 0) { CBOW(curWord, sentence, sentIdx, sentLen, winSizeMod, bagSize, neu1, neu1e); } wordCount++; // update learning rate if (wordCount % 10000 == 0) { _nodeProcessedWords._val += 10000; long totalProcessedWordsEst = _prevTotalProcessedWords + _nodeProcessedWords._val; _curLearningRate = calcLearningRate(_initLearningRate, _epochs, totalProcessedWordsEst, _vocabWordCount); } } // for each item in the sentence } // while more sentences _processedWords = wordCount; _nodeProcessedWords._val += wordCount % 10000; _job.update(1); } @Override public void reduce(WordVectorTrainer other) { _processedWords += other._processedWords; if (_syn0 != other._syn0) { // other task worked on a different syn0 float c = (float) other._processedWords / _processedWords; ArrayUtils.add(1.0f - c, _syn0, c, other._syn0); ArrayUtils.add(1.0f - c, _syn1, c, other._syn1); // for diagnostics only _nodeProcessedWords._val += other._nodeProcessedWords._val; } } private void skipGram(int curWord, int winWord, float[] neu1e) { final int vecSize = _wordVecSize; final int l1 = winWord * vecSize; for (int i = 0; i < vecSize; i++) neu1e[i] = 0; hierarchicalSoftmaxSG(curWord, l1, neu1e); // Learned weights input -> hidden for (int i = 0; i < vecSize; i++) _syn0[i + l1] += neu1e[i]; } private void hierarchicalSoftmaxSG(final int targetWord, final int l1, float[] neu1e) { final int vecSize = _wordVecSize, tWrdCodeLen = _HBWTCode[targetWord].length; final float alpha = _curLearningRate; for (int i = 0; i < tWrdCodeLen; i++) { int l2 = _HBWTPoint[targetWord][i] * vecSize; float f = 0; // Propagate hidden -> output (calc sigmoid) for (int j = 0; j < vecSize; j++) f += _syn0[j + l1] * _syn1[j + l2]; if (f <= -MAX_EXP) continue; else if (f >= MAX_EXP) continue; else f = _expTable[(int) ((f + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))]; float gradient = (1 - _HBWTCode[targetWord][i] - f) * alpha; // Propagate errors output -> hidden for (int j = 0; j < vecSize; j++) neu1e[j] += gradient * _syn1[j + l2]; // Learn weights hidden -> output for (int j = 0; j < vecSize; j++) _syn1[j + l2] += gradient * _syn0[j + l1]; } } private void CBOW( int curWord, int[] sentence, int sentIdx, int sentLen, int winSizeMod, int bagSize, float[] neu1, float[] neu1e ) { int winWordSentIdx, winWord; final int vecSize = _wordVecSize, winSize = _windowSize; final int curWinSize = winSize * 2 + 1 - winSize; for (int i = 0; i < vecSize; i++) neu1[i] /= bagSize; hierarchicalSoftmaxCBOW(curWord, neu1, neu1e); // hidden -> in for (int winIdx = winSizeMod; winIdx < curWinSize; winIdx++) { if (winIdx != winSize) { winWordSentIdx = sentIdx - winSize + winIdx; if (winWordSentIdx < 0 || winWordSentIdx >= sentLen) continue; winWord = sentence[winWordSentIdx]; for (int i = 0; i < vecSize; i++) _syn0[i + winWord * vecSize] += neu1e[i]; } } } private void hierarchicalSoftmaxCBOW(final int targetWord, float[] neu1, float[] neu1e) { final int vecSize = _wordVecSize, tWrdCodeLen = _HBWTCode[targetWord].length; final float alpha = _curLearningRate; float gradient, f = 0; int l2; for (int i = 0; i < tWrdCodeLen; i++, f = 0) { l2 = _HBWTPoint[targetWord][i] * vecSize; // Propagate hidden -> output (calc sigmoid) for (int j = 0; j < vecSize; j++) f += neu1[j] * _syn1[j + l2]; if (f <= -MAX_EXP) continue; else if (f >= MAX_EXP) continue; else f = _expTable[(int) ((f + MAX_EXP) * (EXP_TABLE_SIZE / MAX_EXP / 2))]; gradient = (1 - _HBWTCode[targetWord][i] - f) * alpha; // Propagate errors output -> hidden for (int j = 0; j < vecSize; j++) neu1e[j] += gradient * _syn1[j + l2]; // Learn weights hidden -> output for (int j = 0; j < vecSize; j++) _syn1[j + l2] += gradient * neu1[j]; } } /** * Calculates a new global learning rate for the next round * of map/reduce calls. * The learning rate is a coefficient that controls the amount that * newly learned information affects current learned information. */ private static float calcLearningRate(float initLearningRate, int epochs, long totalProcessed, long vocabWordCount) { float rate = initLearningRate * (1 - totalProcessed / (float) (epochs * vocabWordCount + 1)); if (rate < initLearningRate * LEARNING_RATE_MIN_FACTOR) rate = initLearningRate * LEARNING_RATE_MIN_FACTOR; return rate; } public void updateModelInfo(Word2VecModelInfo modelInfo) { modelInfo._syn0 = _syn0; modelInfo._syn1 = _syn1; modelInfo._totalProcessedWords += _processedWords; } /** * This is cheap and moderate in quality. * * @param max - Upper range limit. * @return int between 0-(max-1). */ private int cheapRandInt(int max) { _seed ^= ( _seed << 21); _seed ^= ( _seed >>> 35); _seed ^= ( _seed << 4); int r = (int) _seed % max; return r > 0 ? r : -r; } private class ChunkSentenceIterator implements Iterator<int[]> { private Chunk _chk; private int _pos = 0; private int _len = -1; private int[] _sent = new int[MAX_SENTENCE_LEN + 1]; private ChunkSentenceIterator(Chunk chk) { _chk = chk; } @Override public boolean hasNext() { return nextLength() >= 0; } private int nextLength() { if (_len >= 0) return _len; if (_pos >= _chk._len) return -1; _len = 0; BufferedString tmp = new BufferedString(); for (; _pos < _chk._len && ! _chk.isNA(_pos) && _len < MAX_SENTENCE_LEN; _pos++) { BufferedString str = _chk.atStr(tmp, _pos); if (! _vocab.containsKey(str)) continue; // not in the vocab, skip if (_sentSampleRate > 0) { // sub-sampling while creating a sentence long count = _wordCounts.get(str)._val; float ran = (float) ((Math.sqrt(count / (_sentSampleRate * _vocabWordCount)) + 1) * (_sentSampleRate * _vocabWordCount) / count); if (ran * 65536 < cheapRandInt(0xFFFF)) continue; } _sent[_len++] = _vocab.get(tmp); } _sent[_len] = -1; _pos++; return _len; } @Override public int[] next() { if (hasNext()) { _len = -1; return _sent; } else return null; } @Override public void remove() { throw new UnsupportedOperationException("Remove is not supported"); } // should never be called } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.query.aggregation.last; import org.apache.druid.common.config.NullHandling; import org.apache.druid.java.util.common.Pair; import org.apache.druid.query.aggregation.AggregateCombiner; import org.apache.druid.query.aggregation.Aggregator; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.BufferAggregator; import org.apache.druid.query.aggregation.SerializablePairLongString; import org.apache.druid.query.aggregation.TestLongColumnSelector; import org.apache.druid.query.aggregation.TestObjectColumnSelector; import org.apache.druid.segment.ColumnSelectorFactory; import org.apache.druid.segment.column.ColumnCapabilitiesImpl; import org.apache.druid.segment.column.ColumnHolder; import org.apache.druid.segment.column.ValueType; import org.easymock.EasyMock; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import java.nio.ByteBuffer; public class StringLastAggregationTest { private final Integer MAX_STRING_SIZE = 1024; private AggregatorFactory stringLastAggFactory; private AggregatorFactory combiningAggFactory; private ColumnSelectorFactory colSelectorFactory; private TestLongColumnSelector timeSelector; private TestObjectColumnSelector<String> valueSelector; private TestObjectColumnSelector objectSelector; private String[] strings = {"1111", "2222", "3333", null, "4444"}; private long[] times = {8224, 6879, 2436, 3546, 7888}; private SerializablePairLongString[] pairs = { new SerializablePairLongString(52782L, "AAAA"), new SerializablePairLongString(65492L, "BBBB"), new SerializablePairLongString(69134L, "CCCC"), new SerializablePairLongString(11111L, "DDDD"), new SerializablePairLongString(51223L, null) }; @Before public void setup() { NullHandling.initializeForTests(); stringLastAggFactory = new StringLastAggregatorFactory("billy", "nilly", MAX_STRING_SIZE); combiningAggFactory = stringLastAggFactory.getCombiningFactory(); timeSelector = new TestLongColumnSelector(times); valueSelector = new TestObjectColumnSelector<>(strings); objectSelector = new TestObjectColumnSelector<>(pairs); colSelectorFactory = EasyMock.createMock(ColumnSelectorFactory.class); EasyMock.expect(colSelectorFactory.makeColumnValueSelector(ColumnHolder.TIME_COLUMN_NAME)).andReturn(timeSelector); EasyMock.expect(colSelectorFactory.makeColumnValueSelector("nilly")).andReturn(valueSelector); EasyMock.expect(colSelectorFactory.makeColumnValueSelector("billy")).andReturn(objectSelector); EasyMock.expect(colSelectorFactory.getColumnCapabilities("nilly")) .andReturn(new ColumnCapabilitiesImpl().setType(ValueType.STRING)); EasyMock.expect(colSelectorFactory.getColumnCapabilities("billy")).andReturn(null); EasyMock.replay(colSelectorFactory); } @Test public void testStringLastAggregator() { Aggregator agg = stringLastAggFactory.factorize(colSelectorFactory); aggregate(agg); aggregate(agg); aggregate(agg); aggregate(agg); Pair<Long, String> result = (Pair<Long, String>) agg.get(); Assert.assertEquals(strings[0], result.rhs); } @Test public void testStringLastBufferAggregator() { BufferAggregator agg = stringLastAggFactory.factorizeBuffered( colSelectorFactory); ByteBuffer buffer = ByteBuffer.wrap(new byte[stringLastAggFactory.getMaxIntermediateSize()]); agg.init(buffer, 0); aggregate(agg, buffer, 0); aggregate(agg, buffer, 0); aggregate(agg, buffer, 0); aggregate(agg, buffer, 0); Pair<Long, String> result = (Pair<Long, String>) agg.get(buffer, 0); Assert.assertEquals(strings[0], result.rhs); } @Test public void testCombine() { SerializablePairLongString pair1 = new SerializablePairLongString(1467225000L, "AAAA"); SerializablePairLongString pair2 = new SerializablePairLongString(1467240000L, "BBBB"); Assert.assertEquals(pair2, stringLastAggFactory.combine(pair1, pair2)); } @Test public void testStringLastCombiningAggregator() { Aggregator agg = combiningAggFactory.factorize(colSelectorFactory); aggregate(agg); aggregate(agg); aggregate(agg); aggregate(agg); Pair<Long, String> result = (Pair<Long, String>) agg.get(); Pair<Long, String> expected = pairs[2]; Assert.assertEquals(expected.lhs, result.lhs); Assert.assertEquals(expected.rhs, result.rhs); } @Test public void testStringLastCombiningBufferAggregator() { BufferAggregator agg = combiningAggFactory.factorizeBuffered( colSelectorFactory); ByteBuffer buffer = ByteBuffer.wrap(new byte[stringLastAggFactory.getMaxIntermediateSize()]); agg.init(buffer, 0); aggregate(agg, buffer, 0); aggregate(agg, buffer, 0); aggregate(agg, buffer, 0); aggregate(agg, buffer, 0); Pair<Long, String> result = (Pair<Long, String>) agg.get(buffer, 0); Pair<Long, String> expected = pairs[2]; Assert.assertEquals(expected.lhs, result.lhs); Assert.assertEquals(expected.rhs, result.rhs); } @Test public void testStringLastAggregateCombiner() { TestObjectColumnSelector columnSelector = new TestObjectColumnSelector<>(pairs); AggregateCombiner stringFirstAggregateCombiner = combiningAggFactory.makeAggregateCombiner(); stringFirstAggregateCombiner.reset(columnSelector); Assert.assertEquals(pairs[0], stringFirstAggregateCombiner.getObject()); columnSelector.increment(); stringFirstAggregateCombiner.fold(columnSelector); Assert.assertEquals(pairs[1], stringFirstAggregateCombiner.getObject()); stringFirstAggregateCombiner.reset(columnSelector); Assert.assertEquals(pairs[1], stringFirstAggregateCombiner.getObject()); } private void aggregate( Aggregator agg ) { agg.aggregate(); timeSelector.increment(); valueSelector.increment(); objectSelector.increment(); } private void aggregate( BufferAggregator agg, ByteBuffer buff, int position ) { agg.aggregate(buff, position); timeSelector.increment(); valueSelector.increment(); objectSelector.increment(); } }
package elasta.orm.event.builder.impl; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import elasta.orm.entity.EntityMappingHelper; import elasta.orm.event.*; import elasta.orm.event.builder.*; import elasta.orm.event.impl.EntityToEventDispatcherMapImpl; import elasta.orm.event.impl.EventProcessorImpl; import java.util.*; /** * Created by sohan on 3/30/2017. */ final public class EventProcessorBuilderImpl implements EventProcessorBuilder { final EntityMappingHelper helper; final Map<String, List<EventHandler>> deleteEventHandlersMap; final Map<String, List<EventHandler>> upsertEventHandlersMap; final Map<String, List<EventHandler>> deleteRelationEventHandlersMap; public EventProcessorBuilderImpl(EntityMappingHelper helper) { this(helper, new LinkedHashMap<>(), new LinkedHashMap<>(), new LinkedHashMap<>()); } public EventProcessorBuilderImpl(EntityMappingHelper helper, Map<String, List<EventHandler>> deleteEventHandlersMap, Map<String, List<EventHandler>> upsertEventHandlersMap, Map<String, List<EventHandler>> deleteRelationEventHandlersMap) { Objects.requireNonNull(helper); Objects.requireNonNull(deleteEventHandlersMap); Objects.requireNonNull(upsertEventHandlersMap); Objects.requireNonNull(deleteRelationEventHandlersMap); this.helper = helper; this.deleteEventHandlersMap = deleteEventHandlersMap; this.upsertEventHandlersMap = upsertEventHandlersMap; this.deleteRelationEventHandlersMap = deleteRelationEventHandlersMap; } @Override public EventProcessorBuilder onDelete(String entity, EventHandler eventHandler) { addDeleteEventHandler(entity, eventHandler); return this; } @Override public OnDeleteHandlersBuilder onDelete(String entity) { return new OnDeleteHandlersBuilder() { @Override public OnDeleteHandlersBuilder add(EventHandler eventHandler) { addDeleteEventHandler(entity, eventHandler); return this; } @Override public OnDeleteHandlersBuilder addAll(Collection<EventHandler> eventHandlers) { eventHandlers.forEach(eventHandler -> addDeleteEventHandler(entity, eventHandler)); return this; } }; } @Override public EventProcessorBuilder onUpsert(String entity, EventHandler eventHandler) { addUpsertEventHandler(entity, eventHandler); return this; } @Override public OnUpsertHandlersBuilder onUpsert(String entity) { return new OnUpsertHandlersBuilder() { @Override public OnUpsertHandlersBuilder add(EventHandler eventHandler) { addUpsertEventHandler(entity, eventHandler); return this; } @Override public OnUpsertHandlersBuilder addAll(Collection<EventHandler> eventHandlers) { eventHandlers.forEach(eventHandler -> addUpsertEventHandler(entity, eventHandler)); return this; } }; } @Override public EventProcessorBuilder onDeleteRelation(String entity, EventHandler eventHandler) { addDeleteRelationEventHandler(entity, eventHandler); return this; } @Override public OnDeleteRelationHandlersBuilder onDeleteRelation(String entity) { return new OnDeleteRelationHandlersBuilder() { @Override public OnDeleteRelationHandlersBuilder add(EventHandler eventHandler) { addDeleteRelationEventHandler(entity, eventHandler); return this; } @Override public OnDeleteRelationHandlersBuilder addAll(Collection<EventHandler> eventHandlers) { eventHandlers.forEach(eventHandler -> addDeleteRelationEventHandler(entity, eventHandler)); return this; } }; } @Override public EventProcessorBuilder on(String entity, OperationType operationType, EventHandler eventHandler) { switch (operationType) { case DELETE: { addDeleteEventHandler(entity, eventHandler); } break; case UPSERT: { addUpsertEventHandler(entity, eventHandler); } break; case DELETE_RELATION: { addDeleteRelationEventHandler(entity, eventHandler); } break; } return this; } private void addDeleteRelationEventHandler(String entity, EventHandler eventHandler) { List<EventHandler> eventHandlers = deleteRelationEventHandlersMap.get(entity); if (eventHandlers == null) { deleteRelationEventHandlersMap.put(entity, eventHandlers = new ArrayList<>()); } eventHandlers.add(eventHandler); } @Override public EventProcessor build() { return new EventProcessorImpl( entityToEventDispatcherMap() ); } private EntityToEventDispatcherMap entityToEventDispatcherMap() { ImmutableMap.Builder<String, EntityToEventDispatcherMapImpl.EventHandlerTpl> mapBuilder = ImmutableMap.builder(); ImmutableSet<String> keySet = ImmutableSet.<String>builder() .addAll(deleteEventHandlersMap.keySet()) .addAll(upsertEventHandlersMap.keySet()) .addAll(deleteRelationEventHandlersMap.keySet()) .build(); BuilderContextImpl<EventDispatcher> builderContextDelete = new BuilderContextImpl<>(new LinkedHashMap<>()); BuilderContextImpl<EventDispatcher> builderContextUpsert = new BuilderContextImpl<>(new LinkedHashMap<>()); BuilderContextImpl<EventDispatcher> builderContextDeleteRelation = new BuilderContextImpl<>(new LinkedHashMap<>()); EventDispatcherBuilderImpl deleteEventDispatcherBuilder = new EventDispatcherBuilderImpl( new EntityToEventHandlerMapImpl( deleteEventHandlersMap ), new ListChildsForDeleteEventHandlerFunctionImpl(helper) ); EventDispatcherBuilderImpl upsertEventDispatcherBuilder = new EventDispatcherBuilderImpl( new EntityToEventHandlerMapImpl( upsertEventHandlersMap ), new ListChildsForUpsertEventHandlerFunctionImpl(helper) ); EventDispatcherBuilderImpl deleteRelationEventDispatcherBuilder = new EventDispatcherBuilderImpl( new EntityToEventHandlerMapImpl( deleteRelationEventHandlersMap ), new ListChildsForDeleteRelationEventHandlerFunctionImpl(helper) ); keySet.forEach(entity -> { mapBuilder.put( entity, new EntityToEventDispatcherMapImpl.EventHandlerTpl( deleteEventDispatcherBuilder.build( entity, builderContextDelete ), upsertEventDispatcherBuilder.build( entity, builderContextUpsert ), deleteRelationEventDispatcherBuilder.build( entity, builderContextDeleteRelation ) ) ); }); return new EntityToEventDispatcherMapImpl( mapBuilder.build() ); } private void addDeleteEventHandler(String entity, EventHandler eventHandler) { List<EventHandler> eventHandlers = deleteEventHandlersMap.get(entity); if (eventHandlers == null) { deleteEventHandlersMap.put(entity, eventHandlers = new ArrayList<>()); } eventHandlers.add(eventHandler); } private void addUpsertEventHandler(String entity, EventHandler eventHandler) { List<EventHandler> eventHandlers = upsertEventHandlersMap.get(entity); if (eventHandlers == null) { upsertEventHandlersMap.put(entity, eventHandlers = new ArrayList<>()); } eventHandlers.add(eventHandler); } public static void main(String[] asdf) { } }
/* Copyright 2007-2009 Selenium committers Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.openqa.selenium; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assume.assumeFalse; import static org.junit.Assume.assumeTrue; import static org.openqa.selenium.testing.Ignore.Driver.ANDROID; import static org.openqa.selenium.testing.Ignore.Driver.CHROME; import static org.openqa.selenium.testing.Ignore.Driver.FIREFOX; import static org.openqa.selenium.testing.Ignore.Driver.HTMLUNIT; import static org.openqa.selenium.testing.Ignore.Driver.IE; import static org.openqa.selenium.testing.Ignore.Driver.IPHONE; import static org.openqa.selenium.testing.Ignore.Driver.MARIONETTE; import static org.openqa.selenium.testing.Ignore.Driver.OPERA; import static org.openqa.selenium.testing.Ignore.Driver.*; import org.junit.Test; import org.openqa.selenium.environment.GlobalTestEnvironment; import org.openqa.selenium.testing.Ignore; import org.openqa.selenium.testing.JUnit4TestBase; import org.openqa.selenium.testing.TestUtilities; import java.util.Arrays; import java.util.List; public class I18nTest extends JUnit4TestBase { /** * The Hebrew word shalom (peace) encoded in order Shin (sh) Lamed (L) Vav (O) final-Mem (M). */ private static final String shalom = "\u05E9\u05DC\u05D5\u05DD"; /** * The Hebrew word tmunot (images) encoded in order Taf (t) Mem (m) Vav (u) Nun (n) Vav (o) Taf * (t). */ private static final String tmunot = "\u05EA\u05DE\u05D5\u05E0\u05D5\u05EA"; /** * Japanese for "Tokyo" */ private static final String tokyo = "\u6771\u4EAC"; @Ignore({IPHONE, MARIONETTE}) @Test public void testCn() { driver.get(pages.chinesePage); driver.findElement(By.linkText(Messages.getString("I18nTest.link1"))).click(); } @Ignore({ANDROID, MARIONETTE}) @Test public void testEnteringHebrewTextFromLeftToRight() { driver.get(pages.chinesePage); WebElement input = driver.findElement(By.name("i18n")); input.sendKeys(shalom); assertEquals(shalom, input.getAttribute("value")); } @Ignore({ANDROID, MARIONETTE}) @Test public void testEnteringHebrewTextFromRightToLeft() { driver.get(pages.chinesePage); WebElement input = driver.findElement(By.name("i18n")); input.sendKeys(tmunot); assertEquals(tmunot, input.getAttribute("value")); } @Test @Ignore( value = {MARIONETTE, CHROME, OPERA, QTWEBKIT}, reason = "MAIONETTE: not checked, " + "CHROME: ChromeDriver only supports characters in the BMP" + "OPERA: doesn't work - see issue 5069" + "QTWEBKIT: WebDriver only supports characters in the BMP" ) public void testEnteringSupplementaryCharacters() { assumeFalse("IE: versions less thank 10 have issue 5069", TestUtilities.isInternetExplorer(driver) && TestUtilities.getIEVersion(driver) < 10); assumeFalse("FF: native events at linux broke it - see issue 5069", TestUtilities.isFirefox(driver) && TestUtilities.isNativeEventsEnabled(driver) && TestUtilities.getEffectivePlatform().is(Platform.LINUX)); driver.get(pages.chinesePage); String input = ""; input += new String(Character.toChars(0x20000)); input += new String(Character.toChars(0x2070E)); input += new String(Character.toChars(0x2000B)); input += new String(Character.toChars(0x2A190)); input += new String(Character.toChars(0x2A6B2)); WebElement el = driver.findElement(By.name("i18n")); el.sendKeys(input); assertEquals(input, el.getAttribute("value")); } @NeedsFreshDriver @Test @Ignore(MARIONETTE) public void testShouldBeAbleToReturnTheTextInAPage() { String url = GlobalTestEnvironment.get() .getAppServer() .whereIs("encoding"); driver.get(url); String text = driver.findElement(By.tagName("body")).getText(); assertEquals(shalom, text); } @NeedsFreshDriver @Ignore(value = {IE, CHROME, HTMLUNIT, FIREFOX, OPERA, ANDROID, IPHONE, QTWEBKIT}, reason = "Not implemented on anything other than" + "Firefox/Linux at the moment.") @Test public void testShouldBeAbleToActivateIMEEngine() throws InterruptedException { assumeTrue("IME is supported on Linux only.", TestUtilities.getEffectivePlatform().is(Platform.LINUX)); assumeTrue("Native events are disabled, IME will not work.", TestUtilities.isNativeEventsEnabled(driver)); driver.get(pages.formPage); WebElement input = driver.findElement(By.id("working")); // Activate IME. By default, this keycode activates IBus input for Japanese. WebDriver.ImeHandler ime = driver.manage().ime(); List<String> engines = ime.getAvailableEngines(); String desiredEngine = "anthy"; if (!engines.contains(desiredEngine)) { System.out.println("Desired engine " + desiredEngine + " not available, skipping test."); return; } ime.activateEngine(desiredEngine); int totalWaits = 0; while (!ime.isActivated() && (totalWaits < 10)) { Thread.sleep(500); totalWaits++; } assertTrue("IME Engine should be activated.", ime.isActivated()); assertEquals(desiredEngine, ime.getActiveEngine()); // Send the Romaji for "Tokyo". The space at the end instructs the IME to convert the word. input.sendKeys("toukyou "); input.sendKeys(Keys.ENTER); String elementValue = input.getAttribute("value"); ime.deactivate(); assertFalse("IME engine should be off.", ime.isActivated()); // IME is not present. Don't fail because of that. But it should have the Romaji value // instead. assertTrue("The elemnt's value should either remain in Romaji or be converted properly." + " It was:" + elementValue, elementValue.equals(tokyo)); } @Ignore(value = {IE, CHROME, HTMLUNIT, OPERA, ANDROID, IPHONE}, reason = "Not implemented on anything other than" + "Firefox/Linux at the moment.") @Test public void testShouldBeAbleToInputJapanese() { assumeTrue("IME is supported on Linux only.", TestUtilities.getEffectivePlatform().is(Platform.LINUX)); assumeTrue("Native events are disabled, IME will not work.", TestUtilities.isNativeEventsEnabled(driver)); driver.get(pages.formPage); WebElement input = driver.findElement(By.id("working")); // Activate IME. By default, this keycode activates IBus input for Japanese. input.sendKeys(Keys.ZENKAKU_HANKAKU); // Send the Romaji for "Tokyo". The space at the end instructs the IME to convert the word. input.sendKeys("toukyou "); String elementValue = input.getAttribute("value"); // Turn OFF IME input first. input.sendKeys(Keys.ZENKAKU_HANKAKU); // IME is not present. Don't fail because of that. But it should have the Romaji value // instead. String[] possibleValues = {tokyo, "\uE040" + "toukyou ", "toukyou "}; assertTrue("The element's value should either remain in Romaji or be converted properly." + " It was: -" + elementValue + "-", Arrays.asList(possibleValues).contains(elementValue)); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.webapp; import static org.apache.hadoop.yarn.util.StringHelper.join; import java.util.Collection; import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.FairSchedulerInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.FairSchedulerLeafQueueInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.FairSchedulerQueueInfo; import org.apache.hadoop.yarn.webapp.ResponseInfo; import org.apache.hadoop.yarn.webapp.SubView; import org.apache.hadoop.yarn.webapp.hamlet.Hamlet; import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV; import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.LI; import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.UL; import org.apache.hadoop.yarn.webapp.view.HtmlBlock; import org.apache.hadoop.yarn.webapp.view.InfoBlock; import com.google.inject.Inject; import com.google.inject.servlet.RequestScoped; public class FairSchedulerPage extends RmView { static final String _Q = ".ui-state-default.ui-corner-all"; static final float Q_MAX_WIDTH = 0.8f; static final float Q_STATS_POS = Q_MAX_WIDTH + 0.05f; static final String Q_END = "left:101%"; static final String Q_GIVEN = "left:0%;background:none;border:1px dashed rgba(0,0,0,0.25)"; static final String Q_OVER = "background:rgba(255, 140, 0, 0.8)"; static final String Q_UNDER = "background:rgba(50, 205, 50, 0.8)"; @RequestScoped static class FSQInfo { FairSchedulerQueueInfo qinfo; } static class LeafQueueBlock extends HtmlBlock { final FairSchedulerLeafQueueInfo qinfo; @Inject LeafQueueBlock(ViewContext ctx, FSQInfo info) { super(ctx); qinfo = (FairSchedulerLeafQueueInfo)info.qinfo; } @Override protected void render(Block html) { ResponseInfo ri = info("\'" + qinfo.getQueueName() + "\' Queue Status"). _("Used Resources:", qinfo.getUsedResources().toString()). _("Num Active Applications:", qinfo.getNumActiveApplications()). _("Num Pending Applications:", qinfo.getNumPendingApplications()). _("Min Resources:", qinfo.getMinResources().toString()). _("Max Resources:", qinfo.getMaxResources().toString()); int maxApps = qinfo.getMaxApplications(); if (maxApps < Integer.MAX_VALUE) { ri._("Max Running Applications:", qinfo.getMaxApplications()); } ri._("Fair Share:", qinfo.getFairShare().toString()); html._(InfoBlock.class); // clear the info contents so this queue's info doesn't accumulate into another queue's info ri.clear(); } } static class QueueBlock extends HtmlBlock { final FSQInfo fsqinfo; @Inject QueueBlock(FSQInfo info) { fsqinfo = info; } @Override public void render(Block html) { Collection<FairSchedulerQueueInfo> subQueues = fsqinfo.qinfo.getChildQueues(); UL<Hamlet> ul = html.ul("#pq"); for (FairSchedulerQueueInfo info : subQueues) { float capacity = info.getMaxResourcesFraction(); float fairShare = info.getFairShareMemoryFraction(); float used = info.getUsedMemoryFraction(); LI<UL<Hamlet>> li = ul. li(). a(_Q).$style(width(capacity * Q_MAX_WIDTH)). $title(join("Fair Share:", percent(fairShare))). span().$style(join(Q_GIVEN, ";font-size:1px;", width(fairShare/capacity))). _('.')._(). span().$style(join(width(used/capacity), ";font-size:1px;left:0%;", used > fairShare ? Q_OVER : Q_UNDER)). _('.')._(). span(".q", info.getQueueName())._(). span().$class("qstats").$style(left(Q_STATS_POS)). _(join(percent(used), " used"))._(); fsqinfo.qinfo = info; if (info instanceof FairSchedulerLeafQueueInfo) { li.ul("#lq").li()._(LeafQueueBlock.class)._()._(); } else { li._(QueueBlock.class); } li._(); } ul._(); } } static class QueuesBlock extends HtmlBlock { final FairScheduler fs; final FSQInfo fsqinfo; @Inject QueuesBlock(ResourceManager rm, FSQInfo info) { fs = (FairScheduler)rm.getResourceScheduler(); fsqinfo = info; } @Override public void render(Block html) { html._(MetricsOverviewTable.class); UL<DIV<DIV<Hamlet>>> ul = html. div("#cs-wrapper.ui-widget"). div(".ui-widget-header.ui-corner-top"). _("Application Queues")._(). div("#cs.ui-widget-content.ui-corner-bottom"). ul(); if (fs == null) { ul. li(). a(_Q).$style(width(Q_MAX_WIDTH)). span().$style(Q_END)._("100% ")._(). span(".q", "default")._()._(); } else { FairSchedulerInfo sinfo = new FairSchedulerInfo(fs); fsqinfo.qinfo = sinfo.getRootQueueInfo(); float used = fsqinfo.qinfo.getUsedMemoryFraction(); ul. li().$style("margin-bottom: 1em"). span().$style("font-weight: bold")._("Legend:")._(). span().$class("qlegend ui-corner-all").$style(Q_GIVEN). _("Fair Share")._(). span().$class("qlegend ui-corner-all").$style(Q_UNDER). _("Used")._(). span().$class("qlegend ui-corner-all").$style(Q_OVER). _("Used (over fair share)")._(). span().$class("qlegend ui-corner-all ui-state-default"). _("Max Capacity")._(). _(). li(). a(_Q).$style(width(Q_MAX_WIDTH)). span().$style(join(width(used), ";left:0%;", used > 1 ? Q_OVER : Q_UNDER))._(".")._(). span(".q", "root")._(). span().$class("qstats").$style(left(Q_STATS_POS)). _(join(percent(used), " used"))._(). _(QueueBlock.class)._(); } ul._()._(). script().$type("text/javascript"). _("$('#cs').hide();")._()._(). _(FairSchedulerAppsBlock.class); } } @Override protected void postHead(Page.HTML<_> html) { html. style().$type("text/css"). _("#cs { padding: 0.5em 0 1em 0; margin-bottom: 1em; position: relative }", "#cs ul { list-style: none }", "#cs a { font-weight: normal; margin: 2px; position: relative }", "#cs a span { font-weight: normal; font-size: 80% }", "#cs-wrapper .ui-widget-header { padding: 0.2em 0.5em }", ".qstats { font-weight: normal; font-size: 80%; position: absolute }", ".qlegend { font-weight: normal; padding: 0 1em; margin: 1em }", "table.info tr th {width: 50%}")._(). // to center info table script("/static/jt/jquery.jstree.js"). script().$type("text/javascript"). _("$(function() {", " $('#cs a span').addClass('ui-corner-all').css('position', 'absolute');", " $('#cs').bind('loaded.jstree', function (e, data) {", " var callback = { call:reopenQueryNodes }", " data.inst.open_node('#pq', callback);", " }).", " jstree({", " core: { animation: 188, html_titles: true },", " plugins: ['themeroller', 'html_data', 'ui'],", " themeroller: { item_open: 'ui-icon-minus',", " item_clsd: 'ui-icon-plus', item_leaf: 'ui-icon-gear'", " }", " });", " $('#cs').bind('select_node.jstree', function(e, data) {", " var q = $('.q', data.rslt.obj).first().text();", " if (q == 'root') q = '';", " else q = '^' + q.substr(q.lastIndexOf('.') + 1) + '$';", " $('#apps').dataTable().fnFilter(q, 3, true);", " });", " $('#cs').show();", "});")._(). _(SchedulerPageUtil.QueueBlockUtil.class); } @Override protected Class<? extends SubView> content() { return QueuesBlock.class; } static String percent(float f) { return String.format("%.1f%%", f * 100); } static String width(float f) { return String.format("width:%.1f%%", f * 100); } static String left(float f) { return String.format("left:%.1f%%", f * 100); } @Override protected String getAppsTableColumnDefs() { StringBuilder sb = new StringBuilder(); return sb .append("[\n") .append("{'sType':'numeric', 'aTargets': [0]") .append(", 'mRender': parseHadoopID }") .append("\n, {'sType':'numeric', 'aTargets': [5, 6]") .append(", 'mRender': renderHadoopDate }") .append("\n, {'sType':'numeric', bSearchable:false, 'aTargets': [9]") .append(", 'mRender': parseHadoopProgress }]").toString(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.connectors.fs; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.state.ListState; import org.apache.flink.api.common.state.OperatorStateStore; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.java.typeutils.InputTypeConfigurable; import org.apache.flink.configuration.Configuration; import org.apache.flink.runtime.state.CheckpointListener; import org.apache.flink.runtime.state.FunctionInitializationContext; import org.apache.flink.runtime.state.FunctionSnapshotContext; import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction; import org.apache.flink.streaming.api.functions.sink.RichSinkFunction; import org.apache.flink.streaming.connectors.fs.bucketing.BucketingSink; import org.apache.flink.util.Preconditions; import org.apache.commons.lang3.time.StopWatch; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nullable; import java.io.IOException; import java.io.Serializable; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; /** * Sink that emits its input elements to rolling {@link org.apache.hadoop.fs.FileSystem} files. This * is integrated with the checkpointing mechanism to provide exactly once semantics. * * * <p>When creating the sink a {@code basePath} must be specified. The base directory contains * one directory for every bucket. The bucket directories themselves contain several part files. * These contain the actual written data. * * * <p>The sink uses a {@link Bucketer} to determine the name of bucket directories inside the * base directory. Whenever the {@code Bucketer} returns a different directory name than * it returned before the sink will close the current part files inside that bucket * and start the new bucket directory. The default bucketer is a {@link DateTimeBucketer} with * date format string {@code ""yyyy-MM-dd--HH"}. You can specify a custom {@code Bucketer} * using {@link #setBucketer(Bucketer)}. For example, use * {@link NonRollingBucketer} if you don't want to have * buckets but still write part files in a fault-tolerant way. * * * <p>The filenames of the part files contain the part prefix, the parallel subtask index of the sink * and a rolling counter, for example {@code "part-1-17"}. Per default the part prefix is * {@code "part"} but this can be * configured using {@link #setPartPrefix(String)}. When a part file becomes bigger * than the batch size the current part file is closed, the part counter is increased and * a new part file is created. The batch size defaults to {@code 384MB}, this can be configured * using {@link #setBatchSize(long)}. * * * <p>Part files can be in one of three states: in-progress, pending or finished. The reason for this * is how the sink works together with the checkpointing mechanism to provide exactly-once semantics * and fault-tolerance. The part file that is currently being written to is in-progress. Once * a part file is closed for writing it becomes pending. When a checkpoint is successful the * currently pending files will be moved to finished. If a failure occurs the pending files * will be deleted to reset state to the last checkpoint. The data in in-progress files will * also have to be rolled back. If the {@code FileSystem} supports the {@code truncate} call * this will be used to reset the file back to a previous state. If not, a special file * with the same name as the part file and the suffix {@code ".valid-length"} will be written * that contains the length up to which the file contains valid data. When reading the file * it must be ensured that it is only read up to that point. The prefixes and suffixes for * the different file states and valid-length files can be configured, for example with * {@link #setPendingSuffix(String)}. * * * <p>Note: If checkpointing is not enabled the pending files will never be moved to the finished state. * In that case, the pending suffix/prefix can be set to {@code ""} to make the sink work * in a non-fault-tolerant way but still provide output without prefixes and suffixes. * * * <p>The part files are written using an instance of {@link Writer}. By default * {@link org.apache.flink.streaming.connectors.fs.StringWriter} is used, which writes the result * of {@code toString()} for every element. Separated by newlines. You can configure the writer * using {@link #setWriter(Writer)}. For example, * {@link org.apache.flink.streaming.connectors.fs.SequenceFileWriter} can be used to write * Hadoop {@code SequenceFiles}. * * * <p>Example: * * <pre>{@code * new RollingSink<Tuple2<IntWritable, Text>>(outPath) * .setWriter(new SequenceFileWriter<IntWritable, Text>()) * .setBucketer(new DateTimeBucketer("yyyy-MM-dd--HHmm") * }</pre> * * <p>This will create a sink that writes to {@code SequenceFiles} and rolls every minute. * * @see DateTimeBucketer * @see StringWriter * @see SequenceFileWriter * * @param <T> Type of the elements emitted by this sink * * @deprecated use {@link BucketingSink} instead. */ @Deprecated public class RollingSink<T> extends RichSinkFunction<T> implements InputTypeConfigurable, CheckpointedFunction, CheckpointListener { private static final long serialVersionUID = 1L; private static final Logger LOG = LoggerFactory.getLogger(RollingSink.class); // -------------------------------------------------------------------------------------------- // User configuration values // -------------------------------------------------------------------------------------------- // These are initialized with some defaults but are meant to be changeable by the user /** * The default maximum size of part files (currently {@code 384 MB}). */ private static final long DEFAULT_BATCH_SIZE = 1024L * 1024L * 384L; /** * This is used for part files that we are writing to but which where not yet confirmed * by a checkpoint. */ private static final String DEFAULT_IN_PROGRESS_SUFFIX = ".in-progress"; /** * See above, but for prefix. */ private static final String DEFAULT_IN_PROGRESS_PREFIX = "_"; /** * This is used for part files that we are not writing to but which are not yet confirmed by * checkpoint. */ private static final String DEFAULT_PENDING_SUFFIX = ".pending"; /** * See above, but for prefix. */ private static final String DEFAULT_PENDING_PREFIX = "_"; /** * When truncate() is not supported on the used FileSystem we instead write a * file along the part file with this ending that contains the length up to which * the part file is valid. */ private static final String DEFAULT_VALID_SUFFIX = ".valid-length"; /** * See above, but for prefix. */ private static final String DEFAULT_VALID_PREFIX = "_"; /** * The default prefix for part files. */ private static final String DEFAULT_PART_REFIX = "part"; /** * The default timeout for asynchronous operations such as recoverLease and truncate. In * milliseconds. */ private static final long DEFAULT_ASYNC_TIMEOUT_MS = 60 * 1000; /** * The base {@code Path} that stores all bucket directories. */ private final String basePath; /** * The {@code Bucketer} that is used to determine the path of bucket directories. */ private Bucketer bucketer; /** * We have a template and call duplicate() for each parallel writer in open() to get the actual * writer that is used for the part files. */ private Writer<T> writerTemplate; /** * The actual writer that we user for writing the part files. */ private Writer<T> writer; /** * Maximum size of part files. If files exceed this we close and create a new one in the same * bucket directory. */ private long batchSize; // These are the actually configured prefixes/suffixes private String inProgressSuffix = DEFAULT_IN_PROGRESS_SUFFIX; private String inProgressPrefix = DEFAULT_IN_PROGRESS_PREFIX; private String pendingSuffix = DEFAULT_PENDING_SUFFIX; private String pendingPrefix = DEFAULT_PENDING_PREFIX; private String validLengthSuffix = DEFAULT_VALID_SUFFIX; private String validLengthPrefix = DEFAULT_VALID_PREFIX; private String partPrefix = DEFAULT_PART_REFIX; /** * The timeout for asynchronous operations such as recoverLease and truncate. In * milliseconds. */ private long asyncTimeout = DEFAULT_ASYNC_TIMEOUT_MS; // -------------------------------------------------------------------------------------------- // Internal fields (not configurable by user) // -------------------------------------------------------------------------------------------- /** * The part file that we are currently writing to. */ private transient Path currentPartPath; /** * The bucket directory that we are currently filling. */ private transient Path currentBucketDirectory; /** * For counting the part files inside a bucket directory. Part files follow the patter * {@code "{part-prefix}-{subtask}-{count}"}. When creating new part files we increase the counter. */ private transient int partCounter; /** * Tracks if the writer is currently opened or closed. */ private transient boolean isWriterOpen; /** * We use reflection to get the .truncate() method, this is only available starting with * Hadoop 2.7. */ private transient Method refTruncate; /** * The state object that is handled by flink from snapshot/restore. In there we store the * current part file path, the valid length of the in-progress files and pending part files. */ private transient BucketState bucketState; private transient ListState<BucketState> restoredBucketStates; /** * User-defined FileSystem parameters. */ @Nullable private Configuration fsConfig; /** * The FileSystem reference. */ private transient FileSystem fs; /** * Creates a new {@code RollingSink} that writes files to the given base directory. * * * <p>This uses a{@link DateTimeBucketer} as bucketer and a {@link StringWriter} has writer. * The maximum bucket size is set to 384 MB. * * @param basePath The directory to which to write the bucket files. */ public RollingSink(String basePath) { this.basePath = basePath; this.bucketer = new DateTimeBucketer(); this.batchSize = DEFAULT_BATCH_SIZE; this.writerTemplate = new StringWriter<>(); } /** * Specify a custom {@code Configuration} that will be used when creating * the {@link FileSystem} for writing. */ public RollingSink<T> setFSConfig(Configuration config) { this.fsConfig = new Configuration(); fsConfig.addAll(config); return this; } /** * Specify a custom {@code Configuration} that will be used when creating * the {@link FileSystem} for writing. */ public RollingSink<T> setFSConfig(org.apache.hadoop.conf.Configuration config) { this.fsConfig = new Configuration(); for (Map.Entry<String, String> entry : config) { fsConfig.setString(entry.getKey(), entry.getValue()); } return this; } @Override @SuppressWarnings("unchecked") public void setInputType(TypeInformation<?> type, ExecutionConfig executionConfig) { if (this.writerTemplate instanceof InputTypeConfigurable) { ((InputTypeConfigurable) writerTemplate).setInputType(type, executionConfig); } } @Override public void initializeState(FunctionInitializationContext context) throws Exception { Preconditions.checkArgument(this.restoredBucketStates == null, "The " + getClass().getSimpleName() + " has already been initialized."); try { initFileSystem(); } catch (IOException e) { LOG.error("Error while creating FileSystem when initializing the state of the RollingSink.", e); throw new RuntimeException("Error while creating FileSystem when initializing the state of the RollingSink.", e); } if (this.refTruncate == null) { this.refTruncate = reflectTruncate(fs); } OperatorStateStore stateStore = context.getOperatorStateStore(); restoredBucketStates = stateStore.getSerializableListState("rolling-states"); int subtaskIndex = getRuntimeContext().getIndexOfThisSubtask(); if (context.isRestored()) { LOG.info("Restoring state for the {} (taskIdx={}).", getClass().getSimpleName(), subtaskIndex); for (BucketState bucketState : restoredBucketStates.get()) { handleRestoredBucketState(bucketState); } if (LOG.isDebugEnabled()) { LOG.debug("{} (taskIdx= {}) restored {}", getClass().getSimpleName(), subtaskIndex, bucketState); } } else { LOG.info("No state to restore for the {} (taskIdx= {}).", getClass().getSimpleName(), subtaskIndex); } } @Override public void open(Configuration parameters) throws Exception { super.open(parameters); partCounter = 0; this.writer = writerTemplate.duplicate(); bucketState = new BucketState(); } /** * Create a file system with the user-defined hdfs config. * @throws IOException */ private void initFileSystem() throws IOException { if (fs == null) { Path path = new Path(basePath); fs = BucketingSink.createHadoopFileSystem(path, fsConfig); } } @Override public void close() throws Exception { closeCurrentPartFile(); } @Override public void invoke(T value) throws Exception { if (shouldRoll()) { openNewPartFile(); } writer.write(value); } /** * Determines whether we should change the bucket file we are writing to. * * * <p>This will roll if no file was created yet, if the file size is larger than the specified size * or if the {@code Bucketer} determines that we should roll. */ private boolean shouldRoll() throws IOException { boolean shouldRoll = false; int subtaskIndex = getRuntimeContext().getIndexOfThisSubtask(); if (!isWriterOpen) { shouldRoll = true; LOG.debug("RollingSink {} starting new initial bucket. ", subtaskIndex); } if (bucketer.shouldStartNewBucket(new Path(basePath), currentBucketDirectory)) { shouldRoll = true; LOG.debug("RollingSink {} starting new bucket because {} said we should. ", subtaskIndex, bucketer); // we will retrieve a new bucket base path in openNewPartFile so reset the part counter partCounter = 0; } if (isWriterOpen) { long writePosition = writer.getPos(); if (isWriterOpen && writePosition > batchSize) { shouldRoll = true; LOG.debug( "RollingSink {} starting new bucket because file position {} is above batch size {}.", subtaskIndex, writePosition, batchSize); } } return shouldRoll; } /** * Opens a new part file. * * * <p>This closes the old bucket file and retrieves a new bucket path from the {@code Bucketer}. */ private void openNewPartFile() throws Exception { closeCurrentPartFile(); Path newBucketDirectory = bucketer.getNextBucketPath(new Path(basePath)); if (!newBucketDirectory.equals(currentBucketDirectory)) { currentBucketDirectory = newBucketDirectory; try { if (fs.mkdirs(currentBucketDirectory)) { LOG.debug("Created new bucket directory: {}", currentBucketDirectory); } } catch (IOException e) { throw new RuntimeException("Could not create base path for new rolling file.", e); } } int subtaskIndex = getRuntimeContext().getIndexOfThisSubtask(); currentPartPath = new Path(currentBucketDirectory, partPrefix + "-" + subtaskIndex + "-" + partCounter); // This should work since there is only one parallel subtask that tries names with // our subtask id. Otherwise we would run into concurrency issues here. while (fs.exists(currentPartPath) || fs.exists(getPendingPathFor(currentPartPath)) || fs.exists(getInProgressPathFor(currentPartPath))) { partCounter++; currentPartPath = new Path(currentBucketDirectory, partPrefix + "-" + subtaskIndex + "-" + partCounter); } // increase, so we don't have to check for this name next time partCounter++; LOG.debug("Next part path is {}", currentPartPath.toString()); Path inProgressPath = getInProgressPathFor(currentPartPath); writer.open(fs, inProgressPath); isWriterOpen = true; } private Path getPendingPathFor(Path path) { return new Path(path.getParent(), pendingPrefix + path.getName()).suffix(pendingSuffix); } private Path getInProgressPathFor(Path path) { return new Path(path.getParent(), inProgressPrefix + path.getName()).suffix(inProgressSuffix); } private Path getValidLengthPathFor(Path path) { return new Path(path.getParent(), validLengthPrefix + path.getName()).suffix(validLengthSuffix); } /** * Closes the current part file. * * * <p>This moves the current in-progress part file to a pending file and adds it to the list * of pending files in our bucket state. */ private void closeCurrentPartFile() throws Exception { if (isWriterOpen) { writer.close(); isWriterOpen = false; } if (currentPartPath != null) { Path inProgressPath = getInProgressPathFor(currentPartPath); Path pendingPath = getPendingPathFor(currentPartPath); fs.rename(inProgressPath, pendingPath); LOG.debug("Moving in-progress bucket {} to pending file {}", inProgressPath, pendingPath); this.bucketState.pendingFiles.add(currentPartPath.toString()); } } /** * Gets the truncate() call using reflection. * * <p><b>NOTE: </b>This code comes from Flume */ private Method reflectTruncate(FileSystem fs) { Method m = null; if (fs != null) { Class<?> fsClass = fs.getClass(); try { m = fsClass.getMethod("truncate", Path.class, long.class); } catch (NoSuchMethodException ex) { LOG.debug("Truncate not found. Will write a file with suffix '{}' " + " and prefix '{}' to specify how many bytes in a bucket are valid.", validLengthSuffix, validLengthPrefix); return null; } // verify that truncate actually works FSDataOutputStream outputStream; Path testPath = new Path(UUID.randomUUID().toString()); try { outputStream = fs.create(testPath); outputStream.writeUTF("hello"); outputStream.close(); } catch (IOException e) { LOG.error("Could not create file for checking if truncate works.", e); throw new RuntimeException("Could not create file for checking if truncate works.", e); } try { m.invoke(fs, testPath, 2); } catch (IllegalAccessException | InvocationTargetException e) { LOG.debug("Truncate is not supported.", e); m = null; } try { fs.delete(testPath, false); } catch (IOException e) { LOG.error("Could not delete truncate test file.", e); throw new RuntimeException("Could not delete truncate test file.", e); } } return m; } @Override public void notifyCheckpointComplete(long checkpointId) throws Exception { synchronized (bucketState.pendingFilesPerCheckpoint) { Iterator<Map.Entry<Long, List<String>>> pendingCheckpointsIt = bucketState.pendingFilesPerCheckpoint.entrySet().iterator(); while (pendingCheckpointsIt.hasNext()) { Map.Entry<Long, List<String>> entry = pendingCheckpointsIt.next(); Long pastCheckpointId = entry.getKey(); if (pastCheckpointId <= checkpointId) { LOG.debug("Moving pending files to final location for checkpoint {}", pastCheckpointId); // All the pending files are buckets that have been completed but are waiting to be renamed // to their final name for (String filename : entry.getValue()) { Path finalPath = new Path(filename); Path pendingPath = getPendingPathFor(finalPath); fs.rename(pendingPath, finalPath); LOG.debug("Moving pending file {} to final location after complete checkpoint {}.", pendingPath, pastCheckpointId); } pendingCheckpointsIt.remove(); } } } } @Override public void snapshotState(FunctionSnapshotContext context) throws Exception { Preconditions.checkNotNull(restoredBucketStates, "The " + getClass().getSimpleName() + " has not been properly initialized."); int subtaskIdx = getRuntimeContext().getIndexOfThisSubtask(); if (isWriterOpen) { bucketState.currentFile = currentPartPath.toString(); bucketState.currentFileValidLength = writer.flush(); } synchronized (bucketState.pendingFilesPerCheckpoint) { bucketState.pendingFilesPerCheckpoint.put(context.getCheckpointId(), bucketState.pendingFiles); } bucketState.pendingFiles = new ArrayList<>(); restoredBucketStates.clear(); restoredBucketStates.add(bucketState); if (LOG.isDebugEnabled()) { LOG.debug("{} (taskIdx={}) checkpointed {}.", getClass().getSimpleName(), subtaskIdx, bucketState); } } private void handleRestoredBucketState(BucketState bucketState) { // we can clean all the pending files since they were renamed to // final files after this checkpoint was successful // (we re-start from the last **successful** checkpoint) bucketState.pendingFiles.clear(); if (bucketState.currentFile != null) { // We were writing to a file when the last checkpoint occurred. This file can either // be still in-progress or became a pending file at some point after the checkpoint. // Either way, we have to truncate it back to a valid state (or write a .valid-length // file that specifies up to which length it is valid) and rename it to the final name // before starting a new bucket file. Path partPath = new Path(bucketState.currentFile); try { Path partPendingPath = getPendingPathFor(partPath); Path partInProgressPath = getInProgressPathFor(partPath); if (fs.exists(partPendingPath)) { LOG.debug("In-progress file {} has been moved to pending after checkpoint, moving to final location.", partPath); // has been moved to pending in the mean time, rename to final location fs.rename(partPendingPath, partPath); } else if (fs.exists(partInProgressPath)) { LOG.debug("In-progress file {} is still in-progress, moving to final location.", partPath); // it was still in progress, rename to final path fs.rename(partInProgressPath, partPath); } else if (fs.exists(partPath)) { LOG.debug("In-Progress file {} was already moved to final location {}.", bucketState.currentFile, partPath); } else { LOG.debug("In-Progress file {} was neither moved to pending nor is still in progress. Possibly, " + "it was moved to final location by a previous snapshot restore", bucketState.currentFile); } if (this.refTruncate == null) { this.refTruncate = reflectTruncate(fs); } // truncate it or write a ".valid-length" file to specify up to which point it is valid if (refTruncate != null) { LOG.debug("Truncating {} to valid length {}", partPath, bucketState.currentFileValidLength); // some-one else might still hold the lease from a previous try, we are // recovering, after all ... if (fs instanceof DistributedFileSystem) { DistributedFileSystem dfs = (DistributedFileSystem) fs; LOG.debug("Trying to recover file lease {}", partPath); dfs.recoverLease(partPath); boolean isclosed = dfs.isFileClosed(partPath); StopWatch sw = new StopWatch(); sw.start(); while (!isclosed) { if (sw.getTime() > asyncTimeout) { break; } try { Thread.sleep(500); } catch (InterruptedException e1) { // ignore it } isclosed = dfs.isFileClosed(partPath); } } Boolean truncated = (Boolean) refTruncate.invoke(fs, partPath, bucketState.currentFileValidLength); if (!truncated) { LOG.debug("Truncate did not immediately complete for {}, waiting...", partPath); // we must wait for the asynchronous truncate operation to complete StopWatch sw = new StopWatch(); sw.start(); long newLen = fs.getFileStatus(partPath).getLen(); while (newLen != bucketState.currentFileValidLength) { if (sw.getTime() > asyncTimeout) { break; } try { Thread.sleep(500); } catch (InterruptedException e1) { // ignore it } newLen = fs.getFileStatus(partPath).getLen(); } if (newLen != bucketState.currentFileValidLength) { throw new RuntimeException("Truncate did not truncate to right length. Should be " + bucketState.currentFileValidLength + " is " + newLen + "."); } } } else { LOG.debug("Writing valid-length file for {} to specify valid length {}", partPath, bucketState.currentFileValidLength); Path validLengthFilePath = getValidLengthPathFor(partPath); if (!fs.exists(validLengthFilePath) && fs.exists(partPath)) { FSDataOutputStream lengthFileOut = fs.create(validLengthFilePath); lengthFileOut.writeUTF(Long.toString(bucketState.currentFileValidLength)); lengthFileOut.close(); } } // invalidate in the state object bucketState.currentFile = null; bucketState.currentFileValidLength = -1; isWriterOpen = false; } catch (IOException e) { LOG.error("Error while restoring RollingSink state.", e); throw new RuntimeException("Error while restoring RollingSink state.", e); } catch (InvocationTargetException | IllegalAccessException e) { LOG.error("Could not invoke truncate.", e); throw new RuntimeException("Could not invoke truncate.", e); } } // Move files that are confirmed by a checkpoint but did not get moved to final location // because the checkpoint notification did not happen before a failure Set<Long> pastCheckpointIds = bucketState.pendingFilesPerCheckpoint.keySet(); LOG.debug("Moving pending files to final location on restore."); for (Long pastCheckpointId : pastCheckpointIds) { // All the pending files are buckets that have been completed but are waiting to be renamed // to their final name for (String filename : bucketState.pendingFilesPerCheckpoint.get(pastCheckpointId)) { Path finalPath = new Path(filename); Path pendingPath = getPendingPathFor(finalPath); try { if (fs.exists(pendingPath)) { LOG.debug("(RESTORE) Moving pending file {} to final location after complete checkpoint {}.", pendingPath, pastCheckpointId); fs.rename(pendingPath, finalPath); } } catch (IOException e) { LOG.error("(RESTORE) Error while renaming pending file {} to final path {}: {}", pendingPath, finalPath, e); throw new RuntimeException("Error while renaming pending file " + pendingPath + " to final path " + finalPath, e); } } } synchronized (bucketState.pendingFilesPerCheckpoint) { bucketState.pendingFilesPerCheckpoint.clear(); } } // -------------------------------------------------------------------------------------------- // Setters for User configuration values // -------------------------------------------------------------------------------------------- /** * Sets the maximum bucket size in bytes. * * * <p>When a bucket part file becomes larger than this size a new bucket part file is started and * the old one is closed. The name of the bucket files depends on the {@link Bucketer}. * * @param batchSize The bucket part file size in bytes. */ public RollingSink<T> setBatchSize(long batchSize) { this.batchSize = batchSize; return this; } /** * Sets the {@link Bucketer} to use for determining the bucket files to write to. * * @param bucketer The bucketer to use. */ public RollingSink<T> setBucketer(Bucketer bucketer) { this.bucketer = bucketer; return this; } /** * Sets the {@link Writer} to be used for writing the incoming elements to bucket files. * * @param writer The {@code Writer} to use. */ public RollingSink<T> setWriter(Writer<T> writer) { this.writerTemplate = writer; return this; } /** * Sets the suffix of in-progress part files. The default is {@code "in-progress"}. */ public RollingSink<T> setInProgressSuffix(String inProgressSuffix) { this.inProgressSuffix = inProgressSuffix; return this; } /** * Sets the prefix of in-progress part files. The default is {@code "_"}. */ public RollingSink<T> setInProgressPrefix(String inProgressPrefix) { this.inProgressPrefix = inProgressPrefix; return this; } /** * Sets the suffix of pending part files. The default is {@code ".pending"}. */ public RollingSink<T> setPendingSuffix(String pendingSuffix) { this.pendingSuffix = pendingSuffix; return this; } /** * Sets the prefix of pending part files. The default is {@code "_"}. */ public RollingSink<T> setPendingPrefix(String pendingPrefix) { this.pendingPrefix = pendingPrefix; return this; } /** * Sets the suffix of valid-length files. The default is {@code ".valid-length"}. */ public RollingSink<T> setValidLengthSuffix(String validLengthSuffix) { this.validLengthSuffix = validLengthSuffix; return this; } /** * Sets the prefix of valid-length files. The default is {@code "_"}. */ public RollingSink<T> setValidLengthPrefix(String validLengthPrefix) { this.validLengthPrefix = validLengthPrefix; return this; } /** * Sets the prefix of part files. The default is {@code "part"}. */ public RollingSink<T> setPartPrefix(String partPrefix) { this.partPrefix = partPrefix; return this; } /** * Disable cleanup of leftover in-progress/pending files when the sink is opened. * * * <p>This should only be disabled if using the sink without checkpoints, to not remove * the files already in the directory. * * @deprecated This option is deprecated and remains only for backwards compatibility. * We do not clean up lingering files anymore. */ @Deprecated public RollingSink<T> disableCleanupOnOpen() { return this; } /** * Sets the default timeout for asynchronous operations such as recoverLease and truncate. * * @param timeout The timeout, in milliseconds. */ public RollingSink<T> setAsyncTimeout(long timeout) { this.asyncTimeout = timeout; return this; } // -------------------------------------------------------------------------------------------- // Internal Classes // -------------------------------------------------------------------------------------------- /** * This is used for keeping track of the current in-progress files and files that we mark * for moving from pending to final location after we get a checkpoint-complete notification. */ public static final class BucketState implements Serializable { private static final long serialVersionUID = 1L; /** * The file that was in-progress when the last checkpoint occurred. */ public String currentFile; /** * The valid length of the in-progress file at the time of the last checkpoint. */ public long currentFileValidLength = -1; /** * Pending files that accumulated since the last checkpoint. */ public List<String> pendingFiles = new ArrayList<>(); /** * When doing a checkpoint we move the pending files since the last checkpoint to this map * with the id of the checkpoint. When we get the checkpoint-complete notification we move * pending files of completed checkpoints to their final location. */ public final Map<Long, List<String>> pendingFilesPerCheckpoint = new HashMap<>(); @Override public String toString() { return "In-progress=" + currentFile + " validLength=" + currentFileValidLength + " pendingForNextCheckpoint=" + pendingFiles + " pendingForPrevCheckpoints=" + pendingFilesPerCheckpoint; } } }
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.bookkeeper.client; import java.io.IOException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import org.apache.bookkeeper.client.AsyncCallback.CreateCallback; import org.apache.bookkeeper.client.AsyncCallback.DeleteCallback; import org.apache.bookkeeper.client.AsyncCallback.OpenCallback; import org.apache.bookkeeper.client.AsyncCallback.IsClosedCallback; import org.apache.bookkeeper.client.BKException.Code; import org.apache.bookkeeper.conf.ClientConfiguration; import org.apache.bookkeeper.meta.LedgerManager; import org.apache.bookkeeper.meta.LedgerManagerFactory; import org.apache.bookkeeper.proto.BookieClient; import org.apache.bookkeeper.proto.BookkeeperInternalCallbacks.GenericCallback; import org.apache.bookkeeper.util.OrderedSafeExecutor; import org.apache.bookkeeper.util.ReflectionUtils; import org.apache.bookkeeper.util.ZkUtils; import org.apache.bookkeeper.zookeeper.ZooKeeperWatcherBase; import org.apache.commons.configuration.ConfigurationException; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.ZooKeeper; import org.jboss.netty.channel.socket.ClientSocketChannelFactory; import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * BookKeeper client. We assume there is one single writer to a ledger at any * time. * * There are four possible operations: start a new ledger, write to a ledger, * read from a ledger and delete a ledger. * * The exceptions resulting from synchronous calls and error code resulting from * asynchronous calls can be found in the class {@link BKException}. * * */ public class BookKeeper { static final Logger LOG = LoggerFactory.getLogger(BookKeeper.class); final ZooKeeper zk; final CountDownLatch connectLatch = new CountDownLatch(1); final static int zkConnectTimeoutMs = 5000; final ClientSocketChannelFactory channelFactory; // whether the socket factory is one we created, or is owned by whoever // instantiated us boolean ownChannelFactory = false; // whether the zk handle is one we created, or is owned by whoever // instantiated us boolean ownZKHandle = false; final BookieClient bookieClient; final BookieWatcher bookieWatcher; final OrderedSafeExecutor mainWorkerPool; final ScheduledExecutorService scheduler; // Ledger manager responsible for how to store ledger meta data final LedgerManagerFactory ledgerManagerFactory; final LedgerManager ledgerManager; // Ensemble Placement Policy final EnsemblePlacementPolicy placementPolicy; final ClientConfiguration conf; interface ZKConnectCallback { public void connected(); public void connectionFailed(int code); } /** * Create a bookkeeper client. A zookeeper client and a client socket factory * will be instantiated as part of this constructor. * * @param servers * A list of one of more servers on which zookeeper is running. The * client assumes that the running bookies have been registered with * zookeeper under the path * {@link BookieWatcher#bookieRegistrationPath} * @throws IOException * @throws InterruptedException * @throws KeeperException */ public BookKeeper(String servers) throws IOException, InterruptedException, KeeperException { this(new ClientConfiguration().setZkServers(servers)); } /** * Create a bookkeeper client using a configuration object. * A zookeeper client and a client socket factory will be * instantiated as part of this constructor. * * @param conf * Client Configuration object * @throws IOException * @throws InterruptedException * @throws KeeperException */ public BookKeeper(final ClientConfiguration conf) throws IOException, InterruptedException, KeeperException { this.conf = conf; ZooKeeperWatcherBase w = new ZooKeeperWatcherBase(conf.getZkTimeout()); this.zk = ZkUtils .createConnectedZookeeperClient(conf.getZkServers(), w); this.channelFactory = new NioClientSocketChannelFactory(Executors.newCachedThreadPool(), Executors.newCachedThreadPool()); this.scheduler = Executors.newSingleThreadScheduledExecutor(); // initialize the ensemble placement this.placementPolicy = initializeEnsemblePlacementPolicy(conf); mainWorkerPool = new OrderedSafeExecutor(conf.getNumWorkerThreads()); bookieClient = new BookieClient(conf, channelFactory, mainWorkerPool); bookieWatcher = new BookieWatcher(conf, scheduler, placementPolicy, this); bookieWatcher.readBookiesBlocking(); ledgerManagerFactory = LedgerManagerFactory.newLedgerManagerFactory(conf, zk); ledgerManager = ledgerManagerFactory.newLedgerManager(); ownChannelFactory = true; ownZKHandle = true; } /** * Create a bookkeeper client but use the passed in zookeeper client instead * of instantiating one. * * @param conf * Client Configuration object * {@link ClientConfiguration} * @param zk * Zookeeper client instance connected to the zookeeper with which * the bookies have registered * @throws IOException * @throws InterruptedException * @throws KeeperException */ public BookKeeper(ClientConfiguration conf, ZooKeeper zk) throws IOException, InterruptedException, KeeperException { this(conf, zk, new NioClientSocketChannelFactory(Executors.newCachedThreadPool(), Executors.newCachedThreadPool())); ownChannelFactory = true; } /** * Create a bookkeeper client but use the passed in zookeeper client and * client socket channel factory instead of instantiating those. * * @param conf * Client Configuration Object * {@link ClientConfiguration} * @param zk * Zookeeper client instance connected to the zookeeper with which * the bookies have registered. The ZooKeeper client must be connected * before it is passed to BookKeeper. Otherwise a KeeperException is thrown. * @param channelFactory * A factory that will be used to create connections to the bookies * @throws IOException * @throws InterruptedException * @throws KeeperException if the passed zk handle is not connected */ public BookKeeper(ClientConfiguration conf, ZooKeeper zk, ClientSocketChannelFactory channelFactory) throws IOException, InterruptedException, KeeperException { if (zk == null || channelFactory == null) { throw new NullPointerException(); } if (!zk.getState().isConnected()) { LOG.error("Unconnected zookeeper handle passed to bookkeeper"); throw KeeperException.create(KeeperException.Code.CONNECTIONLOSS); } this.conf = conf; this.zk = zk; this.channelFactory = channelFactory; this.scheduler = Executors.newSingleThreadScheduledExecutor(); // initialize the ensemble placement this.placementPolicy = initializeEnsemblePlacementPolicy(conf); mainWorkerPool = new OrderedSafeExecutor(conf.getNumWorkerThreads()); bookieClient = new BookieClient(conf, channelFactory, mainWorkerPool); bookieWatcher = new BookieWatcher(conf, scheduler, placementPolicy, this); bookieWatcher.readBookiesBlocking(); ledgerManagerFactory = LedgerManagerFactory.newLedgerManagerFactory(conf, zk); ledgerManager = ledgerManagerFactory.newLedgerManager(); } private EnsemblePlacementPolicy initializeEnsemblePlacementPolicy(ClientConfiguration conf) throws IOException { try { Class<? extends EnsemblePlacementPolicy> policyCls = conf.getEnsemblePlacementPolicy(); return ReflectionUtils.newInstance(policyCls).initialize(conf); } catch (ConfigurationException e) { throw new IOException("Failed to initialize ensemble placement policy : ", e); } } LedgerManager getLedgerManager() { return ledgerManager; } /** * There are 2 digest types that can be used for verification. The CRC32 is * cheap to compute but does not protect against byzantine bookies (i.e., a * bookie might report fake bytes and a matching CRC32). The MAC code is more * expensive to compute, but is protected by a password, i.e., a bookie can't * report fake bytes with a mathching MAC unless it knows the password */ public enum DigestType { MAC, CRC32 }; ZooKeeper getZkHandle() { return zk; } protected ClientConfiguration getConf() { return conf; } /** * Get the BookieClient, currently used for doing bookie recovery. * * @return BookieClient for the BookKeeper instance. */ BookieClient getBookieClient() { return bookieClient; } /** * Creates a new ledger asynchronously. To create a ledger, we need to specify * the ensemble size, the quorum size, the digest type, a password, a callback * implementation, and an optional control object. The ensemble size is how * many bookies the entries should be striped among and the quorum size is the * degree of replication of each entry. The digest type is either a MAC or a * CRC. Note that the CRC option is not able to protect a client against a * bookie that replaces an entry. The password is used not only to * authenticate access to a ledger, but also to verify entries in ledgers. * * @param ensSize * number of bookies over which to stripe entries * @param writeQuorumSize * number of bookies each entry will be written to. each of these bookies * must acknowledge the entry before the call is completed. * @param digestType * digest type, either MAC or CRC32 * @param passwd * password * @param cb * createCallback implementation * @param ctx * optional control object */ public void asyncCreateLedger(final int ensSize, final int writeQuorumSize, final DigestType digestType, final byte[] passwd, final CreateCallback cb, final Object ctx) { asyncCreateLedger(ensSize, writeQuorumSize, writeQuorumSize, digestType, passwd, cb, ctx); } /** * Creates a new ledger asynchronously. Ledgers created with this call have * a separate write quorum and ack quorum size. The write quorum must be larger than * the ack quorum. * * Separating the write and the ack quorum allows the BookKeeper client to continue * writing when a bookie has failed but the failure has not yet been detected. Detecting * a bookie has failed can take a number of seconds, as configured by the read timeout * {@link ClientConfiguration#getReadTimeout()}. Once the bookie failure is detected, * that bookie will be removed from the ensemble. * * The other parameters match those of {@link #asyncCreateLedger(int, int, DigestType, byte[], * AsyncCallback.CreateCallback, Object)} * * @param ensSize * number of bookies over which to stripe entries * @param writeQuorumSize * number of bookies each entry will be written to * @param ackQuorumSize * number of bookies which must acknowledge an entry before the call is completed * @param digestType * digest type, either MAC or CRC32 * @param passwd * password * @param cb * createCallback implementation * @param ctx * optional control object */ public void asyncCreateLedger(final int ensSize, final int writeQuorumSize, final int ackQuorumSize, final DigestType digestType, final byte[] passwd, final CreateCallback cb, final Object ctx) { if (writeQuorumSize < ackQuorumSize) { throw new IllegalArgumentException("Write quorum must be larger than ack quorum"); } new LedgerCreateOp(BookKeeper.this, ensSize, writeQuorumSize, ackQuorumSize, digestType, passwd, cb, ctx) .initiate(); } /** * Creates a new ledger. Default of 3 servers, and quorum of 2 servers. * * @param digestType * digest type, either MAC or CRC32 * @param passwd * password * @return a handle to the newly created ledger * @throws InterruptedException * @throws BKException */ public LedgerHandle createLedger(DigestType digestType, byte passwd[]) throws BKException, InterruptedException { return createLedger(3, 2, digestType, passwd); } /** * Synchronous call to create ledger. Parameters match those of * {@link #asyncCreateLedger(int, int, DigestType, byte[], * AsyncCallback.CreateCallback, Object)} * * @param ensSize * @param qSize * @param digestType * @param passwd * @return a handle to the newly created ledger * @throws InterruptedException * @throws BKException */ public LedgerHandle createLedger(int ensSize, int qSize, DigestType digestType, byte passwd[]) throws InterruptedException, BKException { return createLedger(ensSize, qSize, qSize, digestType, passwd); } /** * Synchronous call to create ledger. Parameters match those of * {@link #asyncCreateLedger(int, int, int, DigestType, byte[], * AsyncCallback.CreateCallback, Object)} * * @param ensSize * @param writeQuorumSize * @param ackQuorumSize * @param digestType * @param passwd * @return a handle to the newly created ledger * @throws InterruptedException * @throws BKException */ public LedgerHandle createLedger(int ensSize, int writeQuorumSize, int ackQuorumSize, DigestType digestType, byte passwd[]) throws InterruptedException, BKException { SyncCounter counter = new SyncCounter(); counter.inc(); /* * Calls asynchronous version */ asyncCreateLedger(ensSize, writeQuorumSize, ackQuorumSize, digestType, passwd, new SyncCreateCallback(), counter); /* * Wait */ counter.block(0); if (counter.getLh() == null) { LOG.error("ZooKeeper error: " + counter.getrc()); throw BKException.create(Code.ZKException); } return counter.getLh(); } /** * Open existing ledger asynchronously for reading. * * Opening a ledger with this method invokes fencing and recovery on the ledger * if the ledger has not been closed. Fencing will block all other clients from * writing to the ledger. Recovery will make sure that the ledger is closed * before reading from it. * * Recovery also makes sure that any entries which reached one bookie, but not a * quorum, will be replicated to a quorum of bookies. This occurs in cases were * the writer of a ledger crashes after sending a write request to one bookie but * before being able to send it to the rest of the bookies in the quorum. * * If the ledger is already closed, neither fencing nor recovery will be applied. * * @see LedgerHandle#asyncClose * * @param lId * ledger identifier * @param digestType * digest type, either MAC or CRC32 * @param passwd * password * @param ctx * optional control object */ public void asyncOpenLedger(final long lId, final DigestType digestType, final byte passwd[], final OpenCallback cb, final Object ctx) { new LedgerOpenOp(BookKeeper.this, lId, digestType, passwd, cb, ctx).initiate(); } /** * Open existing ledger asynchronously for reading, but it does not try to * recover the ledger if it is not yet closed. The application needs to use * it carefully, since the writer might have crashed and ledger will remain * unsealed forever if there is no external mechanism to detect the failure * of the writer and the ledger is not open in a safe manner, invoking the * recovery procedure. * * Opening a ledger without recovery does not fence the ledger. As such, other * clients can continue to write to the ledger. * * This method returns a read only ledger handle. It will not be possible * to add entries to the ledger. Any attempt to add entries will throw an * exception. * * Reads from the returned ledger will only be able to read entries up until * the lastConfirmedEntry at the point in time at which the ledger was opened. * * @param lId * ledger identifier * @param digestType * digest type, either MAC or CRC32 * @param passwd * password * @param ctx * optional control object */ public void asyncOpenLedgerNoRecovery(final long lId, final DigestType digestType, final byte passwd[], final OpenCallback cb, final Object ctx) { new LedgerOpenOp(BookKeeper.this, lId, digestType, passwd, cb, ctx).initiateWithoutRecovery(); } /** * Synchronous open ledger call * * @see #asyncOpenLedger * @param lId * ledger identifier * @param digestType * digest type, either MAC or CRC32 * @param passwd * password * @return a handle to the open ledger * @throws InterruptedException * @throws BKException */ public LedgerHandle openLedger(long lId, DigestType digestType, byte passwd[]) throws BKException, InterruptedException { SyncCounter counter = new SyncCounter(); counter.inc(); /* * Calls async open ledger */ asyncOpenLedger(lId, digestType, passwd, new SyncOpenCallback(), counter); /* * Wait */ counter.block(0); if (counter.getrc() != BKException.Code.OK) throw BKException.create(counter.getrc()); return counter.getLh(); } /** * Synchronous, unsafe open ledger call * * @see #asyncOpenLedgerNoRecovery * @param lId * ledger identifier * @param digestType * digest type, either MAC or CRC32 * @param passwd * password * @return a handle to the open ledger * @throws InterruptedException * @throws BKException */ public LedgerHandle openLedgerNoRecovery(long lId, DigestType digestType, byte passwd[]) throws BKException, InterruptedException { SyncCounter counter = new SyncCounter(); counter.inc(); /* * Calls async open ledger */ asyncOpenLedgerNoRecovery(lId, digestType, passwd, new SyncOpenCallback(), counter); /* * Wait */ counter.block(0); if (counter.getrc() != BKException.Code.OK) throw BKException.create(counter.getrc()); return counter.getLh(); } /** * Deletes a ledger asynchronously. * * @param lId * ledger Id * @param cb * deleteCallback implementation * @param ctx * optional control object */ public void asyncDeleteLedger(final long lId, final DeleteCallback cb, final Object ctx) { new LedgerDeleteOp(BookKeeper.this, lId, cb, ctx).initiate(); } /** * Synchronous call to delete a ledger. Parameters match those of * {@link #asyncDeleteLedger(long, AsyncCallback.DeleteCallback, Object)} * * @param lId * ledgerId * @throws InterruptedException * @throws BKException.BKNoSuchLedgerExistsException if the ledger doesn't exist * @throws BKException */ public void deleteLedger(long lId) throws InterruptedException, BKException { SyncCounter counter = new SyncCounter(); counter.inc(); // Call asynchronous version asyncDeleteLedger(lId, new SyncDeleteCallback(), counter); // Wait counter.block(0); if (counter.getrc() != BKException.Code.OK) { LOG.error("Error deleting ledger " + lId + " : " + counter.getrc()); throw BKException.create(counter.getrc()); } } /** * Check asynchronously whether the ledger with identifier <i>lId</i> * has been closed. * * @param lId ledger identifier * @param cb callback method */ public void asyncIsClosed(long lId, final IsClosedCallback cb, final Object ctx){ ledgerManager.readLedgerMetadata(lId, new GenericCallback<LedgerMetadata>(){ public void operationComplete(int rc, LedgerMetadata lm){ if (rc == BKException.Code.OK) { cb.isClosedComplete(rc, lm.isClosed(), ctx); } else { cb.isClosedComplete(rc, false, ctx); } } }); } /** * Check whether the ledger with identifier <i>lId</i> * has been closed. * * @param lId * @return boolean true if ledger has been closed * @throws BKException */ public boolean isClosed(long lId) throws BKException, InterruptedException { final class Result { int rc; boolean isClosed; final CountDownLatch notifier = new CountDownLatch(1); } final Result result = new Result(); final IsClosedCallback cb = new IsClosedCallback(){ public void isClosedComplete(int rc, boolean isClosed, Object ctx){ result.isClosed = isClosed; result.rc = rc; result.notifier.countDown(); } }; /* * Call asynchronous version of isClosed */ asyncIsClosed(lId, cb, null); /* * Wait for callback */ result.notifier.await(); if (result.rc != BKException.Code.OK) { throw BKException.create(result.rc); } return result.isClosed; } /** * Shuts down client. * */ public void close() throws InterruptedException, BKException { scheduler.shutdown(); if (!scheduler.awaitTermination(10, TimeUnit.SECONDS)) { LOG.warn("The scheduler did not shutdown cleanly"); } mainWorkerPool.shutdown(); if (!mainWorkerPool.awaitTermination(10, TimeUnit.SECONDS)) { LOG.warn("The mainWorkerPool did not shutdown cleanly"); } bookieClient.close(); try { ledgerManager.close(); ledgerManagerFactory.uninitialize(); } catch (IOException ie) { LOG.error("Failed to close ledger manager : ", ie); } if (ownChannelFactory) { channelFactory.releaseExternalResources(); } if (ownZKHandle) { zk.close(); } } private static class SyncCreateCallback implements CreateCallback { /** * Create callback implementation for synchronous create call. * * @param rc * return code * @param lh * ledger handle object * @param ctx * optional control object */ @Override public void createComplete(int rc, LedgerHandle lh, Object ctx) { SyncCounter counter = (SyncCounter) ctx; counter.setLh(lh); counter.setrc(rc); counter.dec(); } } static class SyncOpenCallback implements OpenCallback { /** * Callback method for synchronous open operation * * @param rc * return code * @param lh * ledger handle * @param ctx * optional control object */ @Override public void openComplete(int rc, LedgerHandle lh, Object ctx) { SyncCounter counter = (SyncCounter) ctx; counter.setLh(lh); LOG.debug("Open complete: {}", rc); counter.setrc(rc); counter.dec(); } } private static class SyncDeleteCallback implements DeleteCallback { /** * Delete callback implementation for synchronous delete call. * * @param rc * return code * @param ctx * optional control object */ @Override public void deleteComplete(int rc, Object ctx) { SyncCounter counter = (SyncCounter) ctx; counter.setrc(rc); counter.dec(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ranger.biz; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.List; import javax.persistence.EntityManager; import javax.persistence.Query; import org.apache.ranger.common.AppConstants; import org.apache.ranger.common.ContextUtil; import org.apache.ranger.common.RESTErrorUtil; import org.apache.ranger.common.RangerCommonEnums; import org.apache.ranger.common.RangerConstants; import org.apache.ranger.common.SearchCriteria; import org.apache.ranger.common.SearchUtil; import org.apache.ranger.common.StringUtil; import org.apache.ranger.common.UserSessionBase; import org.apache.ranger.db.RangerDaoManager; import org.apache.ranger.db.XXGroupPermissionDao; import org.apache.ranger.db.XXModuleDefDao; import org.apache.ranger.db.XXPortalUserDao; import org.apache.ranger.db.XXPortalUserRoleDao; import org.apache.ranger.db.XXUserDao; import org.apache.ranger.db.XXUserPermissionDao; import org.apache.ranger.entity.XXGroupPermission; import org.apache.ranger.entity.XXModuleDef; import org.apache.ranger.entity.XXPortalUser; import org.apache.ranger.entity.XXPortalUserRole; import org.apache.ranger.entity.XXTrxLog; import org.apache.ranger.entity.XXUser; import org.apache.ranger.entity.XXUserPermission; import org.apache.ranger.security.context.RangerContextHolder; import org.apache.ranger.security.context.RangerSecurityContext; import org.apache.ranger.service.XGroupPermissionService; import org.apache.ranger.service.XPortalUserService; import org.apache.ranger.service.XUserPermissionService; import org.apache.ranger.view.VXGroupPermission; import org.apache.ranger.view.VXPasswordChange; import org.apache.ranger.view.VXPortalUser; import org.apache.ranger.view.VXPortalUserList; import org.apache.ranger.view.VXResponse; import org.apache.ranger.view.VXString; import org.apache.ranger.view.VXUserPermission; import org.junit.After; import org.junit.Assert; import org.junit.FixMethodOrder; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.MethodSorters; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; @RunWith(MockitoJUnitRunner.class) @FixMethodOrder(MethodSorters.NAME_ASCENDING) public class TestUserMgr { private static Long userId = 1L; private static String userLoginID = "testuser"; @InjectMocks UserMgr userMgr = new UserMgr(); @Mock VXPortalUser VXPortalUser; @Mock RangerDaoManager daoManager; @Mock RESTErrorUtil restErrorUtil; @Mock ContextUtil contextUtil; @Mock StringUtil stringUtil; @Mock SearchUtil searchUtil; @Mock RangerBizUtil rangerBizUtil; @Mock XUserPermissionService xUserPermissionService; @Mock XGroupPermissionService xGroupPermissionService; @Mock SessionMgr sessionMgr; @Mock XUserMgr xUserMgr; @Mock XPortalUserService xPortalUserService; @Rule public ExpectedException thrown = ExpectedException.none(); public void setup() { RangerSecurityContext context = new RangerSecurityContext(); context.setUserSession(new UserSessionBase()); RangerContextHolder.setSecurityContext(context); UserSessionBase currentUserSession = ContextUtil.getCurrentUserSession(); currentUserSession.setUserAdmin(true); } public void setupKeyAdmin() { RangerSecurityContext context = new RangerSecurityContext(); context.setUserSession(new UserSessionBase()); RangerContextHolder.setSecurityContext(context); UserSessionBase currentUserSession = ContextUtil.getCurrentUserSession(); XXPortalUser userKeyAdmin = new XXPortalUser(); userKeyAdmin.setId(userProfile().getId()); userKeyAdmin.setLoginId(userProfile().getLoginId()); currentUserSession.setXXPortalUser(userKeyAdmin); currentUserSession.setKeyAdmin(true); } public void setupUser() { RangerSecurityContext context = new RangerSecurityContext(); context.setUserSession(new UserSessionBase()); RangerContextHolder.setSecurityContext(context); UserSessionBase currentUserSession = ContextUtil.getCurrentUserSession(); XXPortalUser user = new XXPortalUser(); user.setId(userProfile().getId()); user.setLoginId(userProfile().getLoginId()); currentUserSession.setXXPortalUser(user); } private VXPortalUser userProfile() { VXPortalUser userProfile = new VXPortalUser(); userProfile.setEmailAddress("test@test.com"); userProfile.setFirstName("user12"); userProfile.setLastName("test12"); userProfile.setLoginId(userLoginID); userProfile.setPassword("usertest12323"); userProfile.setUserSource(1); userProfile.setPublicScreenName("testuser"); userProfile.setId(userId); return userProfile; } private XXPortalUser xxPortalUser(VXPortalUser userProfile) { XXPortalUser xxPortalUser = new XXPortalUser(); xxPortalUser.setEmailAddress(userProfile.getEmailAddress()); xxPortalUser.setFirstName(userProfile.getFirstName()); xxPortalUser.setLastName(userProfile.getLastName()); xxPortalUser.setLoginId(userProfile.getLoginId()); xxPortalUser.setPassword(userProfile.getPassword()); xxPortalUser.setUserSource(userProfile.getUserSource()); xxPortalUser.setPublicScreenName(userProfile.getPublicScreenName()); return xxPortalUser; } public void setupRangerUserSyncUser() { RangerSecurityContext context = new RangerSecurityContext(); context.setUserSession(new UserSessionBase()); RangerContextHolder.setSecurityContext(context); UserSessionBase currentUserSession = ContextUtil.getCurrentUserSession(); XXPortalUser user = new XXPortalUser(); user.setId(1L); user.setLoginId("rangerusersync"); user.setEmailAddress("test@test.com"); currentUserSession.setXXPortalUser(user); currentUserSession.setUserAdmin(true); } @After public void destroySession() { RangerSecurityContext context = new RangerSecurityContext(); context.setUserSession(null); RangerContextHolder.setSecurityContext(context); } @Test public void test01CreateUser() { setup(); XXPortalUserDao userDao = Mockito.mock(XXPortalUserDao.class); XXPortalUserRoleDao roleDao = Mockito.mock(XXPortalUserRoleDao.class); VXPortalUser userProfile = userProfile(); Collection<String> userRoleList = new ArrayList<String>(); userRoleList.add("ROLE_USER"); XXPortalUser user = new XXPortalUser(); user.setEmailAddress(userProfile.getEmailAddress()); user.setFirstName(userProfile.getFirstName()); user.setLastName(userProfile.getLastName()); user.setLoginId(userProfile.getLoginId()); user.setPassword(userProfile.getPassword()); user.setUserSource(userProfile.getUserSource()); user.setPublicScreenName(userProfile.getPublicScreenName()); user.setId(userProfile.getId()); XXPortalUserRole XXPortalUserRole = new XXPortalUserRole(); XXPortalUserRole.setId(user.getId()); XXPortalUserRole.setUserRole("ROLE_USER"); List<XXPortalUserRole> list = new ArrayList<XXPortalUserRole>(); list.add(XXPortalUserRole); Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(userDao.create((XXPortalUser) Mockito.any())).thenReturn(user); Mockito.when(daoManager.getXXPortalUserRole()).thenReturn(roleDao); Mockito.when(roleDao.findByUserId(userId)).thenReturn(list); Mockito.doNothing().when(rangerBizUtil).blockAuditorRoleUser(); XXPortalUser dbxxPortalUser = userMgr.createUser(userProfile, 1,userRoleList); Assert.assertNotNull(dbxxPortalUser); userId = dbxxPortalUser.getId(); Assert.assertEquals(userId, dbxxPortalUser.getId()); Assert.assertEquals(userProfile.getFirstName(),dbxxPortalUser.getFirstName()); Assert.assertEquals(userProfile.getFirstName(),dbxxPortalUser.getFirstName()); Assert.assertEquals(userProfile.getLastName(),dbxxPortalUser.getLastName()); Assert.assertEquals(userProfile.getLoginId(),dbxxPortalUser.getLoginId()); Assert.assertEquals(userProfile.getEmailAddress(),dbxxPortalUser.getEmailAddress()); Assert.assertEquals(userProfile.getPassword(),dbxxPortalUser.getPassword()); Mockito.verify(daoManager).getXXPortalUser(); Mockito.verify(daoManager).getXXPortalUserRole(); } @Test public void test02CreateUser() { setup(); XXPortalUserDao userDao = Mockito.mock(XXPortalUserDao.class); XXPortalUserRoleDao roleDao = Mockito.mock(XXPortalUserRoleDao.class); VXPortalUser userProfile = userProfile(); Collection<String> userRoleList = new ArrayList<String>(); userRoleList.add("ROLE_USER"); userProfile.setUserRoleList(userRoleList); XXPortalUser user = new XXPortalUser(); user.setEmailAddress(userProfile.getEmailAddress()); user.setFirstName(userProfile.getFirstName()); user.setLastName(userProfile.getLastName()); user.setLoginId(userProfile.getLoginId()); user.setPassword(userProfile.getPassword()); user.setUserSource(userProfile.getUserSource()); user.setPublicScreenName(userProfile.getPublicScreenName()); user.setId(userProfile.getId()); XXPortalUserRole XXPortalUserRole = new XXPortalUserRole(); XXPortalUserRole.setId(user.getId()); XXPortalUserRole.setUserRole("ROLE_USER"); List<XXPortalUserRole> list = new ArrayList<XXPortalUserRole>(); list.add(XXPortalUserRole); Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(userDao.create((XXPortalUser) Mockito.any())).thenReturn(user); Mockito.when(daoManager.getXXPortalUserRole()).thenReturn(roleDao); Mockito.when(roleDao.findByUserId(userId)).thenReturn(list); Mockito.doNothing().when(rangerBizUtil).blockAuditorRoleUser(); XXPortalUser dbxxPortalUser = userMgr.createUser(userProfile, 1); userId = dbxxPortalUser.getId(); Assert.assertNotNull(dbxxPortalUser); Assert.assertEquals(userId, dbxxPortalUser.getId()); Assert.assertEquals(userProfile.getFirstName(),dbxxPortalUser.getFirstName()); Assert.assertEquals(userProfile.getFirstName(),dbxxPortalUser.getFirstName()); Assert.assertEquals(userProfile.getLastName(),dbxxPortalUser.getLastName()); Assert.assertEquals(userProfile.getLoginId(),dbxxPortalUser.getLoginId()); Assert.assertEquals(userProfile.getEmailAddress(),dbxxPortalUser.getEmailAddress()); Assert.assertEquals(userProfile.getPassword(),dbxxPortalUser.getPassword()); Mockito.verify(daoManager).getXXPortalUser(); Mockito.verify(daoManager).getXXPortalUserRole(); } @Test public void test03ChangePasswordAsAdmin() { setup(); XXPortalUserDao userDao = Mockito.mock(XXPortalUserDao.class); VXPortalUser userProfile = userProfile(); VXPasswordChange pwdChange = new VXPasswordChange(); pwdChange.setId(userProfile.getId()); pwdChange.setLoginId(userProfile.getLoginId()); pwdChange.setOldPassword(userProfile.getPassword()); pwdChange.setEmailAddress(userProfile.getEmailAddress()); pwdChange.setUpdPassword(userProfile.getPassword()); XXPortalUser user = new XXPortalUser(); Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(userDao.findByLoginId(Mockito.nullable(String.class))).thenReturn(user); Mockito.when(stringUtil.equals(Mockito.anyString(), Mockito.nullable(String.class))).thenReturn(true); Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(stringUtil.validatePassword(Mockito.anyString(), Mockito.any(String[].class))).thenReturn(true); VXResponse dbVXResponse = userMgr.changePassword(pwdChange); Assert.assertNotNull(dbVXResponse); Assert.assertEquals(userProfile.getStatus(),dbVXResponse.getStatusCode()); Mockito.verify(stringUtil).equals(Mockito.anyString(),Mockito.nullable(String.class)); Mockito.verify(stringUtil).validatePassword(Mockito.anyString(),Mockito.any(String[].class)); } @Test public void test04ChangePasswordAsKeyAdmin() { setupKeyAdmin(); XXPortalUserDao userDao = Mockito.mock(XXPortalUserDao.class); VXPortalUser userProfile = userProfile(); VXPasswordChange pwdChange = new VXPasswordChange(); pwdChange.setId(userProfile.getId()); pwdChange.setLoginId(userProfile.getLoginId()); pwdChange.setOldPassword(userProfile.getPassword()); pwdChange.setEmailAddress(userProfile.getEmailAddress()); pwdChange.setUpdPassword(userProfile.getPassword()); XXPortalUser userKeyAdmin = new XXPortalUser(); userKeyAdmin.setId(userProfile.getId()); userKeyAdmin.setLoginId(userProfile.getLoginId()); Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(userDao.findByLoginId(Mockito.anyString())).thenReturn(userKeyAdmin); Mockito.when(stringUtil.equals(Mockito.anyString(), Mockito.nullable(String.class))).thenReturn(true); Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(stringUtil.validatePassword(Mockito.anyString(), Mockito.any(String[].class))).thenReturn(true); VXResponse dbVXResponse = userMgr.changePassword(pwdChange); Assert.assertNotNull(dbVXResponse); Assert.assertEquals(userProfile.getStatus(),dbVXResponse.getStatusCode()); Mockito.verify(stringUtil).equals(Mockito.anyString(), Mockito.nullable(String.class)); Mockito.verify(stringUtil).validatePassword(Mockito.anyString(), Mockito.any(String[].class)); } @Test public void test05ChangePasswordAsUser() { setupUser(); XXPortalUserDao userDao = Mockito.mock(XXPortalUserDao.class); VXPortalUser userProfile = userProfile(); VXPasswordChange pwdChange = new VXPasswordChange(); pwdChange.setId(userProfile.getId()); pwdChange.setLoginId(userProfile.getLoginId()); pwdChange.setOldPassword(userProfile.getPassword()); pwdChange.setEmailAddress(userProfile.getEmailAddress()); pwdChange.setUpdPassword(userProfile.getPassword()); XXPortalUser user = new XXPortalUser(); user.setId(userProfile.getId()); user.setLoginId(userProfile.getLoginId()); Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(userDao.findByLoginId(Mockito.anyString())).thenReturn(user); Mockito.when(stringUtil.equals(Mockito.anyString(), Mockito.nullable(String.class))).thenReturn(true); Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(stringUtil.validatePassword(Mockito.anyString(), Mockito.any(String[].class))).thenReturn(true); VXResponse dbVXResponse = userMgr.changePassword(pwdChange); Assert.assertNotNull(dbVXResponse); Assert.assertEquals(userProfile.getStatus(),dbVXResponse.getStatusCode()); Mockito.verify(stringUtil).equals(Mockito.anyString(), Mockito.nullable(String.class)); Mockito.verify(stringUtil).validatePassword(Mockito.anyString(),Mockito.any(String[].class)); } @Test public void test06ChangeEmailAddressAsAdmin() { setup(); XXPortalUserDao userDao = Mockito.mock(XXPortalUserDao.class); XXPortalUserRoleDao roleDao = Mockito.mock(XXPortalUserRoleDao.class); XXUserPermissionDao xUserPermissionDao = Mockito.mock(XXUserPermissionDao.class); XXGroupPermissionDao xGroupPermissionDao = Mockito.mock(XXGroupPermissionDao.class); XXModuleDefDao xModuleDefDao = Mockito.mock(XXModuleDefDao.class); XXModuleDef xModuleDef = Mockito.mock(XXModuleDef.class); VXPortalUser userProfile = userProfile(); XXPortalUser user = new XXPortalUser(); user.setEmailAddress(userProfile.getEmailAddress()); user.setFirstName(userProfile.getFirstName()); user.setLastName(userProfile.getLastName()); user.setLoginId(userProfile.getLoginId()); String encryptedPwd = userMgr.encrypt(userProfile.getLoginId(),userProfile.getPassword()); user.setPassword(encryptedPwd); user.setUserSource(userProfile.getUserSource()); user.setPublicScreenName(userProfile.getPublicScreenName()); user.setId(userProfile.getId()); VXPasswordChange changeEmail = new VXPasswordChange(); changeEmail.setEmailAddress("testuser@test.com"); changeEmail.setId(user.getId()); changeEmail.setLoginId(user.getLoginId()); changeEmail.setOldPassword(userProfile.getPassword()); XXPortalUserRole XXPortalUserRole = new XXPortalUserRole(); XXPortalUserRole.setId(userId); XXPortalUserRole.setUserRole("ROLE_USER"); List<XXPortalUserRole> list = new ArrayList<XXPortalUserRole>(); list.add(XXPortalUserRole); List<XXUserPermission> xUserPermissionsList = new ArrayList<XXUserPermission>(); XXUserPermission xUserPermissionObj = new XXUserPermission(); xUserPermissionObj.setAddedByUserId(userId); xUserPermissionObj.setCreateTime(new Date()); xUserPermissionObj.setId(userId); xUserPermissionObj.setIsAllowed(1); xUserPermissionObj.setModuleId(1L); xUserPermissionObj.setUpdatedByUserId(userId); xUserPermissionObj.setUpdateTime(new Date()); xUserPermissionObj.setUserId(userId); xUserPermissionsList.add(xUserPermissionObj); List<XXGroupPermission> xGroupPermissionList = new ArrayList<XXGroupPermission>(); XXGroupPermission xGroupPermissionObj = new XXGroupPermission(); xGroupPermissionObj.setAddedByUserId(userId); xGroupPermissionObj.setCreateTime(new Date()); xGroupPermissionObj.setId(userId); xGroupPermissionObj.setIsAllowed(1); xGroupPermissionObj.setModuleId(1L); xGroupPermissionObj.setUpdatedByUserId(userId); xGroupPermissionObj.setUpdateTime(new Date()); xGroupPermissionObj.setGroupId(userId); xGroupPermissionList.add(xGroupPermissionObj); VXUserPermission userPermission = new VXUserPermission(); userPermission.setId(1L); userPermission.setIsAllowed(1); userPermission.setModuleId(1L); userPermission.setUserId(userId); userPermission.setUserName("xyz"); userPermission.setOwner("admin"); VXGroupPermission groupPermission = new VXGroupPermission(); groupPermission.setId(1L); groupPermission.setIsAllowed(1); groupPermission.setModuleId(1L); groupPermission.setGroupId(userId); groupPermission.setGroupName("xyz"); groupPermission.setOwner("admin"); Mockito.when(stringUtil.validateEmail(Mockito.anyString())).thenReturn(true); Mockito.when(stringUtil.equals(Mockito.anyString(), Mockito.anyString())).thenReturn(true); Mockito.when(stringUtil.normalizeEmail(Mockito.anyString())).thenReturn(changeEmail.getEmailAddress()); Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(daoManager.getXXPortalUserRole()).thenReturn(roleDao); Mockito.when(userDao.update(user)).thenReturn(user); Mockito.when(roleDao.findByParentId(Mockito.anyLong())).thenReturn(list); Mockito.when(daoManager.getXXUserPermission()).thenReturn(xUserPermissionDao); Mockito.when(daoManager.getXXGroupPermission()).thenReturn(xGroupPermissionDao); Mockito.when(xUserPermissionDao.findByUserPermissionIdAndIsAllowed(userProfile.getId())).thenReturn(xUserPermissionsList); Mockito.when(xGroupPermissionDao.findbyVXPortalUserId(userProfile.getId())).thenReturn(xGroupPermissionList); Mockito.when(xGroupPermissionService.populateViewBean(xGroupPermissionObj)).thenReturn(groupPermission); Mockito.when(xUserPermissionService.populateViewBean(xUserPermissionObj)).thenReturn(userPermission); Mockito.when(daoManager.getXXModuleDef()).thenReturn(xModuleDefDao); Mockito.when(xModuleDefDao.findByModuleId(Mockito.anyLong())).thenReturn(xModuleDef); Mockito.doNothing().when(rangerBizUtil).blockAuditorRoleUser(); VXPortalUser dbVXPortalUser = userMgr.changeEmailAddress(user,changeEmail); Assert.assertNotNull(dbVXPortalUser); Assert.assertEquals(userId, dbVXPortalUser.getId()); Assert.assertEquals(userProfile.getLastName(),dbVXPortalUser.getLastName()); Assert.assertEquals(changeEmail.getLoginId(),dbVXPortalUser.getLoginId()); Assert.assertEquals(changeEmail.getEmailAddress(),dbVXPortalUser.getEmailAddress()); user.setUserSource(RangerCommonEnums.USER_APP); dbVXPortalUser = userMgr.changeEmailAddress(user,changeEmail); user.setUserSource(RangerCommonEnums.USER_EXTERNAL); changeEmail.setEmailAddress(""); dbVXPortalUser = userMgr.changeEmailAddress(user,changeEmail); } @Test public void test07ChangeEmailAddressAsKeyAdmin() { setupKeyAdmin(); XXPortalUserDao userDao = Mockito.mock(XXPortalUserDao.class); XXPortalUserRoleDao roleDao = Mockito.mock(XXPortalUserRoleDao.class); XXUserPermissionDao xUserPermissionDao = Mockito.mock(XXUserPermissionDao.class); XXGroupPermissionDao xGroupPermissionDao = Mockito.mock(XXGroupPermissionDao.class); XXModuleDefDao xModuleDefDao = Mockito.mock(XXModuleDefDao.class); XXModuleDef xModuleDef = Mockito.mock(XXModuleDef.class); VXPortalUser userProfile = userProfile(); XXPortalUser userKeyAdmin = new XXPortalUser(); userKeyAdmin.setEmailAddress(userProfile.getEmailAddress()); userKeyAdmin.setFirstName(userProfile.getFirstName()); userKeyAdmin.setLastName(userProfile.getLastName()); userKeyAdmin.setLoginId(userProfile.getLoginId()); String encryptedPwd = userMgr.encrypt(userProfile.getLoginId(),userProfile.getPassword()); userKeyAdmin.setPassword(encryptedPwd); userKeyAdmin.setUserSource(userProfile.getUserSource()); userKeyAdmin.setPublicScreenName(userProfile.getPublicScreenName()); userKeyAdmin.setId(userProfile.getId()); VXPasswordChange changeEmail = new VXPasswordChange(); changeEmail.setEmailAddress("testuser@test.com"); changeEmail.setId(userKeyAdmin.getId()); changeEmail.setLoginId(userKeyAdmin.getLoginId()); changeEmail.setOldPassword(userProfile.getPassword()); XXPortalUserRole XXPortalUserRole = new XXPortalUserRole(); XXPortalUserRole.setId(userId); XXPortalUserRole.setUserRole("ROLE_USER"); List<XXPortalUserRole> list = new ArrayList<XXPortalUserRole>(); list.add(XXPortalUserRole); List<XXUserPermission> xUserPermissionsList = new ArrayList<XXUserPermission>(); XXUserPermission xUserPermissionObj = new XXUserPermission(); xUserPermissionObj.setAddedByUserId(userId); xUserPermissionObj.setCreateTime(new Date()); xUserPermissionObj.setId(userId); xUserPermissionObj.setIsAllowed(1); xUserPermissionObj.setModuleId(1L); xUserPermissionObj.setUpdatedByUserId(userId); xUserPermissionObj.setUpdateTime(new Date()); xUserPermissionObj.setUserId(userId); xUserPermissionsList.add(xUserPermissionObj); List<XXGroupPermission> xGroupPermissionList = new ArrayList<XXGroupPermission>(); XXGroupPermission xGroupPermissionObj = new XXGroupPermission(); xGroupPermissionObj.setAddedByUserId(userId); xGroupPermissionObj.setCreateTime(new Date()); xGroupPermissionObj.setId(userId); xGroupPermissionObj.setIsAllowed(1); xGroupPermissionObj.setModuleId(1L); xGroupPermissionObj.setUpdatedByUserId(userId); xGroupPermissionObj.setUpdateTime(new Date()); xGroupPermissionObj.setGroupId(userId); xGroupPermissionList.add(xGroupPermissionObj); VXUserPermission userPermission = new VXUserPermission(); userPermission.setId(1L); userPermission.setIsAllowed(1); userPermission.setModuleId(1L); userPermission.setUserId(userId); userPermission.setUserName("xyz"); userPermission.setOwner("admin"); VXGroupPermission groupPermission = new VXGroupPermission(); groupPermission.setId(1L); groupPermission.setIsAllowed(1); groupPermission.setModuleId(1L); groupPermission.setGroupId(userId); groupPermission.setGroupName("xyz"); groupPermission.setOwner("admin"); Mockito.when(stringUtil.validateEmail(Mockito.anyString())).thenReturn(true); Mockito.when(stringUtil.equals(Mockito.anyString(), Mockito.anyString())).thenReturn(true); Mockito.when(stringUtil.normalizeEmail(Mockito.anyString())).thenReturn(changeEmail.getEmailAddress()); Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(daoManager.getXXPortalUserRole()).thenReturn(roleDao); Mockito.when(roleDao.findByParentId(Mockito.anyLong())).thenReturn(list); Mockito.when(daoManager.getXXUserPermission()).thenReturn(xUserPermissionDao); Mockito.when(daoManager.getXXGroupPermission()).thenReturn(xGroupPermissionDao); Mockito.when(xUserPermissionDao.findByUserPermissionIdAndIsAllowed(userProfile.getId())).thenReturn(xUserPermissionsList); Mockito.when(xGroupPermissionDao.findbyVXPortalUserId(userProfile.getId())).thenReturn(xGroupPermissionList); Mockito.when(xGroupPermissionService.populateViewBean(xGroupPermissionObj)).thenReturn(groupPermission); Mockito.when(xUserPermissionService.populateViewBean(xUserPermissionObj)).thenReturn(userPermission); Mockito.when(daoManager.getXXModuleDef()).thenReturn(xModuleDefDao); Mockito.when(xModuleDefDao.findByModuleId(Mockito.anyLong())).thenReturn(xModuleDef); Mockito.doNothing().when(rangerBizUtil).blockAuditorRoleUser(); VXPortalUser dbVXPortalUser = userMgr.changeEmailAddress(userKeyAdmin,changeEmail); Assert.assertNotNull(dbVXPortalUser); Assert.assertEquals(userId, dbVXPortalUser.getId()); Assert.assertEquals(userProfile.getLastName(),dbVXPortalUser.getLastName()); Assert.assertEquals(changeEmail.getLoginId(),dbVXPortalUser.getLoginId()); Assert.assertEquals(changeEmail.getEmailAddress(),dbVXPortalUser.getEmailAddress()); } @Test public void test08ChangeEmailAddressAsUser() { setupUser(); XXPortalUserDao userDao = Mockito.mock(XXPortalUserDao.class); XXPortalUserRoleDao roleDao = Mockito.mock(XXPortalUserRoleDao.class); XXUserPermissionDao xUserPermissionDao = Mockito.mock(XXUserPermissionDao.class); XXGroupPermissionDao xGroupPermissionDao = Mockito.mock(XXGroupPermissionDao.class); XXModuleDefDao xModuleDefDao = Mockito.mock(XXModuleDefDao.class); XXModuleDef xModuleDef = Mockito.mock(XXModuleDef.class); VXPortalUser userProfile = userProfile(); XXPortalUser user = new XXPortalUser(); user.setEmailAddress(userProfile.getEmailAddress()); user.setFirstName(userProfile.getFirstName()); user.setLastName(userProfile.getLastName()); user.setLoginId(userProfile.getLoginId()); String encryptedPwd = userMgr.encrypt(userProfile.getLoginId(),userProfile.getPassword()); user.setPassword(encryptedPwd); user.setUserSource(userProfile.getUserSource()); user.setPublicScreenName(userProfile.getPublicScreenName()); user.setId(userProfile.getId()); VXPasswordChange changeEmail = new VXPasswordChange(); changeEmail.setEmailAddress("testuser@test.com"); changeEmail.setId(user.getId()); changeEmail.setLoginId(user.getLoginId()); changeEmail.setOldPassword(userProfile.getPassword()); XXPortalUserRole XXPortalUserRole = new XXPortalUserRole(); XXPortalUserRole.setId(userId); XXPortalUserRole.setUserRole("ROLE_USER"); List<XXPortalUserRole> list = new ArrayList<XXPortalUserRole>(); list.add(XXPortalUserRole); List<XXUserPermission> xUserPermissionsList = new ArrayList<XXUserPermission>(); XXUserPermission xUserPermissionObj = new XXUserPermission(); xUserPermissionObj.setAddedByUserId(userId); xUserPermissionObj.setCreateTime(new Date()); xUserPermissionObj.setId(userId); xUserPermissionObj.setIsAllowed(1); xUserPermissionObj.setModuleId(1L); xUserPermissionObj.setUpdatedByUserId(userId); xUserPermissionObj.setUpdateTime(new Date()); xUserPermissionObj.setUserId(userId); xUserPermissionsList.add(xUserPermissionObj); List<XXGroupPermission> xGroupPermissionList = new ArrayList<XXGroupPermission>(); XXGroupPermission xGroupPermissionObj = new XXGroupPermission(); xGroupPermissionObj.setAddedByUserId(userId); xGroupPermissionObj.setCreateTime(new Date()); xGroupPermissionObj.setId(userId); xGroupPermissionObj.setIsAllowed(1); xGroupPermissionObj.setModuleId(1L); xGroupPermissionObj.setUpdatedByUserId(userId); xGroupPermissionObj.setUpdateTime(new Date()); xGroupPermissionObj.setGroupId(userId); xGroupPermissionList.add(xGroupPermissionObj); VXUserPermission userPermission = new VXUserPermission(); userPermission.setId(1L); userPermission.setIsAllowed(1); userPermission.setModuleId(1L); userPermission.setUserId(userId); userPermission.setUserName("xyz"); userPermission.setOwner("admin"); VXGroupPermission groupPermission = new VXGroupPermission(); groupPermission.setId(1L); groupPermission.setIsAllowed(1); groupPermission.setModuleId(1L); groupPermission.setGroupId(userId); groupPermission.setGroupName("xyz"); groupPermission.setOwner("admin"); Mockito.when(stringUtil.validateEmail(Mockito.anyString())).thenReturn(true); Mockito.when(stringUtil.equals(Mockito.anyString(), Mockito.anyString())).thenReturn(true); Mockito.when(stringUtil.normalizeEmail(Mockito.anyString())).thenReturn(changeEmail.getEmailAddress()); Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(daoManager.getXXPortalUserRole()).thenReturn(roleDao); Mockito.when(roleDao.findByParentId(Mockito.anyLong())).thenReturn(list); Mockito.when(daoManager.getXXUserPermission()).thenReturn(xUserPermissionDao); Mockito.when(daoManager.getXXGroupPermission()).thenReturn(xGroupPermissionDao); Mockito.when(xUserPermissionDao.findByUserPermissionIdAndIsAllowed(userProfile.getId())).thenReturn(xUserPermissionsList); Mockito.when(xGroupPermissionDao.findbyVXPortalUserId(userProfile.getId())).thenReturn(xGroupPermissionList); Mockito.when(xGroupPermissionService.populateViewBean(xGroupPermissionObj)).thenReturn(groupPermission); Mockito.when(xUserPermissionService.populateViewBean(xUserPermissionObj)).thenReturn(userPermission); Mockito.when(daoManager.getXXModuleDef()).thenReturn(xModuleDefDao); Mockito.when(xModuleDefDao.findByModuleId(Mockito.anyLong())).thenReturn(xModuleDef); Mockito.doNothing().when(rangerBizUtil).blockAuditorRoleUser(); VXPortalUser dbVXPortalUser = userMgr.changeEmailAddress(user,changeEmail); Assert.assertNotNull(dbVXPortalUser); Assert.assertEquals(userId, dbVXPortalUser.getId()); Assert.assertEquals(userProfile.getLastName(),dbVXPortalUser.getLastName()); Assert.assertEquals(changeEmail.getLoginId(),dbVXPortalUser.getLoginId()); Assert.assertEquals(changeEmail.getEmailAddress(),dbVXPortalUser.getEmailAddress()); } @Test public void test09CreateUser() { setup(); XXPortalUserDao userDao = Mockito.mock(XXPortalUserDao.class); XXPortalUserRoleDao roleDao = Mockito.mock(XXPortalUserRoleDao.class); XXUserPermissionDao xUserPermissionDao = Mockito.mock(XXUserPermissionDao.class); XXGroupPermissionDao xGroupPermissionDao = Mockito.mock(XXGroupPermissionDao.class); XXPortalUser user = new XXPortalUser(); VXPortalUser userProfile = userProfile(); XXPortalUserRole XXPortalUserRole = new XXPortalUserRole(); XXPortalUserRole.setId(userId); XXPortalUserRole.setUserRole("ROLE_USER"); List<XXPortalUserRole> list = new ArrayList<XXPortalUserRole>(); list.add(XXPortalUserRole); List<XXUserPermission> xUserPermissionsList = new ArrayList<XXUserPermission>(); XXUserPermission xUserPermissionObj = new XXUserPermission(); xUserPermissionObj.setAddedByUserId(userId); xUserPermissionObj.setCreateTime(new Date()); xUserPermissionObj.setId(userId); xUserPermissionObj.setIsAllowed(1); xUserPermissionObj.setModuleId(1L); xUserPermissionObj.setUpdatedByUserId(userId); xUserPermissionObj.setUpdateTime(new Date()); xUserPermissionObj.setUserId(userId); xUserPermissionsList.add(xUserPermissionObj); List<XXGroupPermission> xGroupPermissionList = new ArrayList<XXGroupPermission>(); XXGroupPermission xGroupPermissionObj = new XXGroupPermission(); xGroupPermissionObj.setAddedByUserId(userId); xGroupPermissionObj.setCreateTime(new Date()); xGroupPermissionObj.setId(userId); xGroupPermissionObj.setIsAllowed(1); xGroupPermissionObj.setModuleId(1L); xGroupPermissionObj.setUpdatedByUserId(userId); xGroupPermissionObj.setUpdateTime(new Date()); xGroupPermissionObj.setGroupId(userId); xGroupPermissionList.add(xGroupPermissionObj); Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(userDao.create((XXPortalUser) Mockito.any())).thenReturn(user); Mockito.when(daoManager.getXXPortalUserRole()).thenReturn(roleDao); Mockito.when(daoManager.getXXUserPermission()).thenReturn(xUserPermissionDao); Mockito.when(daoManager.getXXGroupPermission()).thenReturn(xGroupPermissionDao); Mockito.doNothing().when(rangerBizUtil).blockAuditorRoleUser(); VXPortalUser dbVXPortalUser = userMgr.createUser(userProfile); Assert.assertNotNull(dbVXPortalUser); Assert.assertEquals(user.getId(), dbVXPortalUser.getId()); Assert.assertEquals(user.getFirstName(), dbVXPortalUser.getFirstName()); Assert.assertEquals(user.getLastName(), dbVXPortalUser.getLastName()); Assert.assertEquals(user.getLoginId(), dbVXPortalUser.getLoginId()); Assert.assertEquals(user.getEmailAddress(),dbVXPortalUser.getEmailAddress()); Assert.assertEquals(user.getPassword(), dbVXPortalUser.getPassword()); Mockito.verify(daoManager).getXXPortalUser(); Mockito.verify(daoManager).getXXUserPermission(); Mockito.verify(daoManager).getXXGroupPermission(); } @Test public void test10CreateDefaultAccountUser() { setup(); XXPortalUserDao userDao = Mockito.mock(XXPortalUserDao.class); XXPortalUserRoleDao roleDao = Mockito.mock(XXPortalUserRoleDao.class); VXPortalUser userProfile = userProfile(); Collection<String> userRoleList = new ArrayList<String>(); userRoleList.add("ROLE_USER"); userProfile.setUserRoleList(userRoleList); XXPortalUser user = new XXPortalUser(); user.setEmailAddress(userProfile.getEmailAddress()); XXPortalUserRole XXPortalUserRole = new XXPortalUserRole(); XXPortalUserRole.setId(userId); XXPortalUserRole.setUserRole("ROLE_USER"); List<XXPortalUserRole> list = new ArrayList<XXPortalUserRole>(); list.add(XXPortalUserRole); Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(userDao.findByLoginId(Mockito.anyString())).thenReturn(user); Mockito.when(daoManager.getXXPortalUserRole()).thenReturn(roleDao); Mockito.doNothing().when(rangerBizUtil).blockAuditorRoleUser(); VXPortalUser dbVXPortalUser = userMgr.createDefaultAccountUser(userProfile); Assert.assertNotNull(dbVXPortalUser); Assert.assertEquals(user.getId(), dbVXPortalUser.getId()); Assert.assertEquals(user.getFirstName(), dbVXPortalUser.getFirstName()); Assert.assertEquals(user.getLastName(), dbVXPortalUser.getLastName()); Assert.assertEquals(user.getLoginId(), dbVXPortalUser.getLoginId()); Assert.assertEquals(user.getEmailAddress(),dbVXPortalUser.getEmailAddress()); Assert.assertEquals(user.getPassword(), dbVXPortalUser.getPassword()); Mockito.verify(daoManager, Mockito.atLeast(1)).getXXPortalUser(); Mockito.verify(daoManager, Mockito.atLeast(1)).getXXPortalUserRole(); } @Test public void test11CreateDefaultAccountUser() { setup(); XXPortalUserDao userDao = Mockito.mock(XXPortalUserDao.class); XXPortalUserRoleDao roleDao = Mockito.mock(XXPortalUserRoleDao.class); VXPortalUser userProfile = userProfile(); userProfile.setStatus(RangerCommonEnums.USER_EXTERNAL); Collection<String> userRoleList = new ArrayList<String>(); userRoleList.add("ROLE_USER"); userProfile.setUserRoleList(userRoleList); XXPortalUser user = new XXPortalUser(); user.setEmailAddress(userProfile.getEmailAddress()); user.setUserSource(RangerCommonEnums.USER_EXTERNAL); XXPortalUserRole XXPortalUserRole = new XXPortalUserRole(); XXPortalUserRole.setId(userId); XXPortalUserRole.setUserRole("ROLE_USER"); List<XXPortalUserRole> list = new ArrayList<XXPortalUserRole>(); list.add(XXPortalUserRole); Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(userDao.findByLoginId(Mockito.anyString())).thenReturn(null); Mockito.when(userDao.findByEmailAddress(Mockito.anyString())).thenReturn(null); Mockito.when(daoManager.getXXPortalUserRole()).thenReturn(roleDao); Mockito.when(userDao.create((XXPortalUser) Mockito.any())).thenReturn(user); Mockito.doNothing().when(rangerBizUtil).blockAuditorRoleUser(); VXPortalUser dbVXPortalUser = userMgr.createDefaultAccountUser(userProfile); Assert.assertNotNull(dbVXPortalUser); Assert.assertEquals(user.getId(), dbVXPortalUser.getId()); Assert.assertEquals(user.getFirstName(), dbVXPortalUser.getFirstName()); Assert.assertEquals(user.getLastName(), dbVXPortalUser.getLastName()); Assert.assertEquals(user.getLoginId(), dbVXPortalUser.getLoginId()); Assert.assertEquals(user.getEmailAddress(),dbVXPortalUser.getEmailAddress()); Assert.assertEquals(user.getPassword(), dbVXPortalUser.getPassword()); Mockito.verify(daoManager, Mockito.atLeast(1)).getXXPortalUser(); Mockito.verify(daoManager, Mockito.atLeast(1)).getXXPortalUserRole(); } @Test public void test12CreateDefaultAccountUser() { setup(); XXPortalUserDao userDao = Mockito.mock(XXPortalUserDao.class); XXPortalUserRoleDao roleDao = Mockito.mock(XXPortalUserRoleDao.class); VXPortalUser userProfile = userProfile(); userProfile.setStatus(RangerCommonEnums.USER_EXTERNAL); Collection<String> userRoleList = new ArrayList<String>(); userRoleList.add("ROLE_USER"); userProfile.setUserRoleList(userRoleList); XXPortalUser xxPortalUser = new XXPortalUser(); xxPortalUser.setEmailAddress(userProfile.getEmailAddress()); xxPortalUser.setUserSource(RangerCommonEnums.USER_EXTERNAL); XXPortalUserRole XXPortalUserRole = new XXPortalUserRole(); XXPortalUserRole.setId(userId); XXPortalUserRole.setUserRole("ROLE_USER"); List<XXPortalUserRole> list = new ArrayList<XXPortalUserRole>(); list.add(XXPortalUserRole); Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(userDao.findByLoginId(Mockito.anyString())).thenReturn(xxPortalUser); Mockito.when(daoManager.getXXPortalUserRole()).thenReturn(roleDao); Mockito.doNothing().when(rangerBizUtil).blockAuditorRoleUser(); userProfile.setPassword(""); userProfile.setEmailAddress(null); VXPortalUser dbVXPortalUser = userMgr.createDefaultAccountUser(userProfile); Assert.assertNotNull(dbVXPortalUser); Assert.assertEquals(xxPortalUser.getId(), dbVXPortalUser.getId()); Assert.assertEquals(xxPortalUser.getFirstName(), dbVXPortalUser.getFirstName()); Assert.assertEquals(xxPortalUser.getLastName(), dbVXPortalUser.getLastName()); Assert.assertEquals(xxPortalUser.getLoginId(), dbVXPortalUser.getLoginId()); Assert.assertEquals(xxPortalUser.getEmailAddress(),dbVXPortalUser.getEmailAddress()); Assert.assertEquals(xxPortalUser.getPassword(), dbVXPortalUser.getPassword()); Mockito.verify(daoManager, Mockito.atLeast(1)).getXXPortalUser(); Mockito.verify(daoManager, Mockito.atLeast(1)).getXXPortalUserRole(); } @Test public void test13IsUserInRole() { XXPortalUserRoleDao roleDao = Mockito.mock(XXPortalUserRoleDao.class); XXPortalUserRole XXPortalUserRole = new XXPortalUserRole(); XXPortalUserRole.setId(userId); XXPortalUserRole.setUserRole("ROLE_USER"); List<XXPortalUserRole> list = new ArrayList<XXPortalUserRole>(); list.add(XXPortalUserRole); Mockito.when(daoManager.getXXPortalUserRole()).thenReturn(roleDao); Mockito.when(roleDao.findByRoleUserId(userId, "ROLE_USER")).thenReturn(XXPortalUserRole); boolean isValue = userMgr.isUserInRole(userId, "ROLE_USER"); Assert.assertTrue(isValue); Mockito.when(roleDao.findByRoleUserId(userId, "ROLE_USER")).thenReturn(null); isValue = userMgr.isUserInRole(userId, "ROLE_USER"); Assert.assertFalse(isValue); } @Test public void test14UpdateUserWithPass() { setup(); XXPortalUserDao userDao = Mockito.mock(XXPortalUserDao.class); VXPortalUser userProfile = userProfile(); userProfile.setPassword("password1234"); XXPortalUser user = new XXPortalUser(); user.setId(userProfile.getId()); user.setLoginId(userProfile.getLoginId()); user.setEmailAddress(userProfile.getEmailAddress()); user.setLoginId(userProfile.getLoginId()); String encryptedPwd = userMgr.encrypt(userProfile.getLoginId(),userProfile.getPassword()); user.setPassword(encryptedPwd); Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(userDao.getById(userProfile.getId())).thenReturn(user); Mockito.when(stringUtil.validateEmail(Mockito.anyString())).thenReturn(true); Mockito.doNothing().when(rangerBizUtil).blockAuditorRoleUser(); Mockito.when(stringUtil.validatePassword(Mockito.anyString(), Mockito.any(String[].class))).thenReturn(true); Mockito.when(userDao.update(user)).thenReturn(user); XXPortalUser dbXXPortalUser = userMgr.updateUserWithPass(userProfile); Assert.assertNotNull(dbXXPortalUser); Assert.assertEquals(userId, dbXXPortalUser.getId()); Assert.assertEquals(userProfile.getFirstName(),dbXXPortalUser.getFirstName()); Assert.assertEquals(userProfile.getFirstName(),dbXXPortalUser.getFirstName()); Assert.assertEquals(userProfile.getLastName(),dbXXPortalUser.getLastName()); Assert.assertEquals(userProfile.getLoginId(),dbXXPortalUser.getLoginId()); Assert.assertEquals(userProfile.getEmailAddress(),dbXXPortalUser.getEmailAddress()); Assert.assertEquals(encryptedPwd, dbXXPortalUser.getPassword()); Mockito.when(userDao.getById(userProfile.getId())).thenReturn(null); dbXXPortalUser = userMgr.updateUserWithPass(userProfile); Assert.assertNull(dbXXPortalUser); } @Test public void test15searchUsers() { Query query = Mockito.mock(Query.class); EntityManager entityManager = Mockito.mock(EntityManager.class); SearchCriteria searchCriteria = new SearchCriteria(); searchCriteria.setDistinct(true); searchCriteria.setGetChildren(true); searchCriteria.setGetCount(true); searchCriteria.setMaxRows(12); searchCriteria.setOwnerId(userId); searchCriteria.setStartIndex(1); searchCriteria.setSortBy("userId"); searchCriteria.setSortType("asc"); Long count = 1l; Mockito.when(daoManager.getEntityManager()).thenReturn(entityManager); Mockito.when(entityManager.createQuery(Mockito.anyString())).thenReturn(query); Mockito.when(query.getSingleResult()).thenReturn(count); VXPortalUserList dbVXPortalUserList = userMgr.searchUsers(searchCriteria); Assert.assertNotNull(dbVXPortalUserList); searchCriteria.setSortBy("loginId"); dbVXPortalUserList = userMgr.searchUsers(searchCriteria); Assert.assertNotNull(dbVXPortalUserList); searchCriteria.setSortBy("emailAddress"); dbVXPortalUserList = userMgr.searchUsers(searchCriteria); Assert.assertNotNull(dbVXPortalUserList); searchCriteria.setSortBy("firstName"); dbVXPortalUserList = userMgr.searchUsers(searchCriteria); Assert.assertNotNull(dbVXPortalUserList); searchCriteria.setSortBy("lastName"); dbVXPortalUserList = userMgr.searchUsers(searchCriteria); Assert.assertNotNull(dbVXPortalUserList); searchCriteria.setSortBy("source"); searchCriteria.setSortType(""); dbVXPortalUserList = userMgr.searchUsers(searchCriteria); Assert.assertNotNull(dbVXPortalUserList); searchCriteria.setSortBy(""); searchCriteria.setSortType("desc"); dbVXPortalUserList = userMgr.searchUsers(searchCriteria); Assert.assertNotNull(dbVXPortalUserList); } @Test public void test16FindByEmailAddress() { XXPortalUserDao userDao = Mockito.mock(XXPortalUserDao.class); XXPortalUser user = new XXPortalUser(); String emailId = "test001user@apache.org"; Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(userDao.findByEmailAddress(emailId)).thenReturn(user); XXPortalUser dbXXPortalUser = userMgr.findByEmailAddress(emailId); Assert.assertNotNull(dbXXPortalUser); Assert.assertNotEquals(emailId, dbXXPortalUser.getEmailAddress()); Mockito.verify(daoManager).getXXPortalUser(); } @Test public void test17GetRolesForUser() { XXPortalUserRoleDao roleDao = Mockito.mock(XXPortalUserRoleDao.class); VXPortalUser userProfile = userProfile(); XXPortalUser user = new XXPortalUser(); user.setEmailAddress(userProfile.getEmailAddress()); user.setFirstName(userProfile.getFirstName()); user.setLastName(userProfile.getLastName()); user.setLoginId(userProfile.getLoginId()); user.setPassword(userProfile.getPassword()); user.setUserSource(userProfile.getUserSource()); user.setPublicScreenName(userProfile.getPublicScreenName()); user.setId(userProfile.getId()); XXPortalUserRole XXPortalUserRole = new XXPortalUserRole(); XXPortalUserRole.setId(user.getId()); XXPortalUserRole.setUserRole("ROLE_USER"); List<XXPortalUserRole> list = new ArrayList<XXPortalUserRole>(); list.add(XXPortalUserRole); Mockito.when(daoManager.getXXPortalUserRole()).thenReturn(roleDao); Mockito.when(roleDao.findByUserId(userId)).thenReturn(list); Collection<String> stringReturn = userMgr.getRolesForUser(user); Assert.assertNotNull(stringReturn); Mockito.verify(daoManager).getXXPortalUserRole(); } @Test public void test18DeleteUserRole() { setup(); XXPortalUserRoleDao roleDao = Mockito.mock(XXPortalUserRoleDao.class); XXPortalUserRole XXPortalUserRole = new XXPortalUserRole(); String userRole = "ROLE_USER"; XXPortalUser user = new XXPortalUser(); XXPortalUserRole.setId(user.getId()); XXPortalUserRole.setUserRole("ROLE_USER"); List<XXPortalUserRole> list = new ArrayList<XXPortalUserRole>(); list.add(XXPortalUserRole); Mockito.when(daoManager.getXXPortalUserRole()).thenReturn(roleDao); Mockito.when(roleDao.findByUserId(userId)).thenReturn(list); boolean deleteValue = userMgr.deleteUserRole(userId, userRole); Assert.assertTrue(deleteValue); } @Test public void test19DeactivateUser() { setup(); XXPortalUserDao userDao = Mockito.mock(XXPortalUserDao.class); XXPortalUserRoleDao roleDao = Mockito.mock(XXPortalUserRoleDao.class); XXUserPermissionDao xUserPermissionDao = Mockito.mock(XXUserPermissionDao.class); XXGroupPermissionDao xGroupPermissionDao = Mockito.mock(XXGroupPermissionDao.class); VXGroupPermission vXGroupPermission = Mockito.mock(VXGroupPermission.class); XXModuleDefDao xModuleDefDao = Mockito.mock(XXModuleDefDao.class); XXModuleDef xModuleDef = Mockito.mock(XXModuleDef.class); VXUserPermission vXUserPermission = Mockito.mock(VXUserPermission.class); VXPortalUser userProfile = userProfile(); XXPortalUser user = new XXPortalUser(); user.setEmailAddress(userProfile.getEmailAddress()); user.setFirstName(userProfile.getFirstName()); user.setLastName(userProfile.getLastName()); user.setLoginId(userProfile.getLoginId()); user.setPassword(userProfile.getPassword()); user.setUserSource(userProfile.getUserSource()); user.setPublicScreenName(userProfile.getPublicScreenName()); user.setId(userProfile.getId()); List<XXUserPermission> xUserPermissionsList = new ArrayList<XXUserPermission>(); XXUserPermission xUserPermissionObj = new XXUserPermission(); xUserPermissionObj.setAddedByUserId(userId); xUserPermissionObj.setCreateTime(new Date()); xUserPermissionObj.setId(userId); xUserPermissionObj.setIsAllowed(1); xUserPermissionObj.setModuleId(1L); xUserPermissionObj.setUpdatedByUserId(userId); xUserPermissionObj.setUpdateTime(new Date()); xUserPermissionObj.setUserId(userId); xUserPermissionsList.add(xUserPermissionObj); List<XXGroupPermission> xGroupPermissionList = new ArrayList<XXGroupPermission>(); XXGroupPermission xGroupPermissionObj = new XXGroupPermission(); xGroupPermissionObj.setAddedByUserId(userId); xGroupPermissionObj.setCreateTime(new Date()); xGroupPermissionObj.setId(userId); xGroupPermissionObj.setIsAllowed(1); xGroupPermissionObj.setModuleId(1L); xGroupPermissionObj.setUpdatedByUserId(userId); xGroupPermissionObj.setUpdateTime(new Date()); xGroupPermissionObj.setGroupId(userId); xGroupPermissionList.add(xGroupPermissionObj); XXPortalUserRole XXPortalUserRole = new XXPortalUserRole(); XXPortalUserRole.setId(userId); XXPortalUserRole.setUserRole("ROLE_USER"); List<XXPortalUserRole> list = new ArrayList<XXPortalUserRole>(); list.add(XXPortalUserRole); Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(userDao.update(user)).thenReturn(user); Mockito.when(daoManager.getXXPortalUserRole()).thenReturn(roleDao); Mockito.when(roleDao.findByParentId(Mockito.anyLong())).thenReturn(list); Mockito.when(daoManager.getXXUserPermission()).thenReturn(xUserPermissionDao); Mockito.when(xUserPermissionDao.findByUserPermissionIdAndIsAllowed(userProfile.getId())).thenReturn(xUserPermissionsList); Mockito.when(daoManager.getXXGroupPermission()).thenReturn(xGroupPermissionDao); Mockito.when(xGroupPermissionDao.findbyVXPortalUserId(userProfile.getId())).thenReturn(xGroupPermissionList); Mockito.when(xGroupPermissionService.populateViewBean(xGroupPermissionObj)).thenReturn(vXGroupPermission); Mockito.when(daoManager.getXXModuleDef()).thenReturn(xModuleDefDao); Mockito.when(xModuleDefDao.findByModuleId(Mockito.anyLong())).thenReturn(xModuleDef); Mockito.when(xUserPermissionService.populateViewBean(xUserPermissionObj)).thenReturn(vXUserPermission); Mockito.doNothing().when(rangerBizUtil).blockAuditorRoleUser(); VXPortalUser dbVXPortalUser = userMgr.deactivateUser(null); Assert.assertNull(dbVXPortalUser); dbVXPortalUser = userMgr.deactivateUser(user); Assert.assertNotNull(dbVXPortalUser); Assert.assertEquals(user.getId(), dbVXPortalUser.getId()); Assert.assertEquals(user.getFirstName(), dbVXPortalUser.getFirstName()); Assert.assertEquals(user.getLastName(), dbVXPortalUser.getLastName()); Assert.assertEquals(user.getLoginId(), dbVXPortalUser.getLoginId()); Mockito.verify(daoManager).getXXPortalUser(); Mockito.verify(daoManager).getXXUserPermission(); Mockito.verify(daoManager).getXXGroupPermission(); Mockito.verify(xUserPermissionService).populateViewBean(xUserPermissionObj); Mockito.verify(xGroupPermissionService).populateViewBean(xGroupPermissionObj); } @Test public void test20checkAccess() { setup(); XXPortalUserDao xPortalUserDao = Mockito.mock(XXPortalUserDao.class); XXPortalUser xPortalUser = Mockito.mock(XXPortalUser.class); Mockito.when(daoManager.getXXPortalUser()).thenReturn(xPortalUserDao); Mockito.when(xPortalUserDao.getById(userId)).thenReturn(xPortalUser); userMgr.checkAccess(userId); } @Test public void test21getUserProfile() { setup(); XXPortalUserDao xPortalUserDao = Mockito.mock(XXPortalUserDao.class); XXPortalUser xPortalUser = Mockito.mock(XXPortalUser.class); XXUserPermissionDao xUserPermissionDao = Mockito.mock(XXUserPermissionDao.class); XXGroupPermissionDao xGroupPermissionDao = Mockito.mock(XXGroupPermissionDao.class); XXPortalUserRoleDao xPortalUserRoleDao = Mockito.mock(XXPortalUserRoleDao.class); List<XXPortalUserRole> xPortalUserRoleList = new ArrayList<XXPortalUserRole>(); XXPortalUserRole XXPortalUserRole = new XXPortalUserRole(); XXPortalUserRole.setId(userId); XXPortalUserRole.setUserRole("ROLE_USER"); xPortalUserRoleList.add(XXPortalUserRole); List<XXUserPermission> xUserPermissionsList = new ArrayList<XXUserPermission>(); XXUserPermission xUserPermissionObj = new XXUserPermission(); xUserPermissionObj.setAddedByUserId(userId); xUserPermissionObj.setCreateTime(new Date()); xUserPermissionObj.setId(userId); xUserPermissionObj.setIsAllowed(1); xUserPermissionObj.setModuleId(1L); xUserPermissionObj.setUpdatedByUserId(userId); xUserPermissionObj.setUpdateTime(new Date()); xUserPermissionObj.setUserId(userId); xUserPermissionsList.add(xUserPermissionObj); List<XXGroupPermission> xGroupPermissionList = new ArrayList<XXGroupPermission>(); XXGroupPermission xGroupPermissionObj = new XXGroupPermission(); xGroupPermissionObj.setAddedByUserId(userId); xGroupPermissionObj.setCreateTime(new Date()); xGroupPermissionObj.setId(userId); xGroupPermissionObj.setIsAllowed(1); xGroupPermissionObj.setModuleId(1L); xGroupPermissionObj.setUpdatedByUserId(userId); xGroupPermissionObj.setUpdateTime(new Date()); xGroupPermissionObj.setGroupId(userId); xGroupPermissionList.add(xGroupPermissionObj); Mockito.when(daoManager.getXXPortalUser()).thenReturn(xPortalUserDao); Mockito.when(xPortalUserDao.getById(userId)).thenReturn(null); Mockito.when(daoManager.getXXPortalUserRole()).thenReturn(xPortalUserRoleDao); Mockito.when(daoManager.getXXUserPermission()).thenReturn(xUserPermissionDao); Mockito.when(daoManager.getXXGroupPermission()).thenReturn(xGroupPermissionDao); VXPortalUser dbVXPortalUser = userMgr.getUserProfile(userId); Mockito.when(xPortalUserDao.getById(userId)).thenReturn(xPortalUser); dbVXPortalUser = userMgr.getUserProfile(userId); Assert.assertNotNull(dbVXPortalUser); } @Test public void test22getUserProfileByLoginId() { setup(); XXPortalUserDao xPortalUserDao = Mockito.mock(XXPortalUserDao.class); Mockito.when(daoManager.getXXPortalUser()).thenReturn(xPortalUserDao); VXPortalUser userProfile = userProfile(); XXPortalUser user = new XXPortalUser(); user.setEmailAddress(userProfile.getEmailAddress()); user.setFirstName(userProfile.getFirstName()); user.setLastName(userProfile.getLastName()); user.setLoginId(userProfile.getLoginId()); user.setPassword(userProfile.getPassword()); user.setUserSource(userProfile.getUserSource()); user.setPublicScreenName(userProfile.getPublicScreenName()); user.setId(userProfile.getId()); VXPortalUser dbVXPortalUser = userMgr.getUserProfileByLoginId(); Mockito.when(xPortalUserDao.findByLoginId(Mockito.anyString())).thenReturn(user); XXPortalUserRoleDao xPortalUserRoleDao = Mockito.mock(XXPortalUserRoleDao.class); Mockito.when(daoManager.getXXPortalUserRole()).thenReturn(xPortalUserRoleDao); List<XXPortalUserRole> xPortalUserRoleList = new ArrayList<XXPortalUserRole>(); XXPortalUserRole XXPortalUserRole = new XXPortalUserRole(); XXPortalUserRole.setId(userId); XXPortalUserRole.setUserId(userId); XXPortalUserRole.setUserRole("ROLE_USER"); xPortalUserRoleList.add(XXPortalUserRole); Mockito.when(xPortalUserRoleDao.findByParentId(Mockito.anyLong())).thenReturn(xPortalUserRoleList); XXUserPermissionDao xUserPermissionDao = Mockito.mock(XXUserPermissionDao.class); XXGroupPermissionDao xGroupPermissionDao = Mockito.mock(XXGroupPermissionDao.class); Mockito.when(daoManager.getXXUserPermission()).thenReturn(xUserPermissionDao); List<XXUserPermission> xUserPermissionsList = new ArrayList<XXUserPermission>(); List<XXGroupPermission> xGroupPermissionList = new ArrayList<XXGroupPermission>(); Mockito.when(xUserPermissionDao.findByUserPermissionIdAndIsAllowed(userProfile.getId())).thenReturn(xUserPermissionsList); Mockito.when(daoManager.getXXGroupPermission()).thenReturn(xGroupPermissionDao); Mockito.when(xGroupPermissionDao.findbyVXPortalUserId(userProfile.getId())).thenReturn(xGroupPermissionList); dbVXPortalUser = userMgr.getUserProfileByLoginId(user.getLoginId()); Assert.assertNotNull(dbVXPortalUser); } @Test public void test23setUserRoles() { setup(); XXPortalUserRoleDao xPortalUserRoleDao = Mockito.mock(XXPortalUserRoleDao.class); XXPortalUserDao userDao = Mockito.mock(XXPortalUserDao.class); XXUserPermissionDao xUserPermissionDao = Mockito.mock(XXUserPermissionDao.class); XXGroupPermissionDao xGroupPermissionDao = Mockito.mock(XXGroupPermissionDao.class); XXModuleDefDao xModuleDefDao = Mockito.mock(XXModuleDefDao.class); VXPortalUser userProfile = userProfile(); XXPortalUser user = new XXPortalUser(); user.setEmailAddress(userProfile.getEmailAddress()); user.setFirstName(userProfile.getFirstName()); user.setLastName(userProfile.getLastName()); user.setLoginId(userProfile.getLoginId()); user.setPassword(userProfile.getPassword()); user.setUserSource(userProfile.getUserSource()); user.setPublicScreenName(userProfile.getPublicScreenName()); user.setId(userProfile.getId()); List<VXString> vStringRolesList = new ArrayList<VXString>(); VXString vXStringObj = new VXString(); vXStringObj.setValue("ROLE_USER"); vStringRolesList.add(vXStringObj); List<XXPortalUserRole> xPortalUserRoleList = new ArrayList<XXPortalUserRole>(); XXPortalUserRole XXPortalUserRole = new XXPortalUserRole(); XXPortalUserRole.setId(userId); XXPortalUserRole.setUserId(userId); XXPortalUserRole.setUserRole("ROLE_USER"); xPortalUserRoleList.add(XXPortalUserRole); List<XXUserPermission> xUserPermissionsList = new ArrayList<XXUserPermission>(); XXUserPermission xUserPermissionObj = new XXUserPermission(); xUserPermissionObj.setAddedByUserId(userId); xUserPermissionObj.setCreateTime(new Date()); xUserPermissionObj.setId(userId); xUserPermissionObj.setIsAllowed(1); xUserPermissionObj.setModuleId(1L); xUserPermissionObj.setUpdatedByUserId(userId); xUserPermissionObj.setUpdateTime(new Date()); xUserPermissionObj.setUserId(userId); xUserPermissionsList.add(xUserPermissionObj); List<XXGroupPermission> xGroupPermissionList = new ArrayList<XXGroupPermission>(); XXGroupPermission xGroupPermissionObj = new XXGroupPermission(); xGroupPermissionObj.setAddedByUserId(userId); xGroupPermissionObj.setCreateTime(new Date()); xGroupPermissionObj.setId(userId); xGroupPermissionObj.setIsAllowed(1); xGroupPermissionObj.setModuleId(1L); xGroupPermissionObj.setUpdatedByUserId(userId); xGroupPermissionObj.setUpdateTime(new Date()); xGroupPermissionObj.setGroupId(userId); xGroupPermissionList.add(xGroupPermissionObj); List<VXGroupPermission> groupPermList = new ArrayList<VXGroupPermission>(); VXGroupPermission groupPermission = new VXGroupPermission(); groupPermission.setId(1L); groupPermission.setIsAllowed(1); groupPermission.setModuleId(1L); groupPermission.setGroupId(userId); groupPermission.setGroupName("xyz"); groupPermission.setOwner("admin"); groupPermList.add(groupPermission); XXModuleDef xModuleDef = new XXModuleDef(); xModuleDef.setUpdatedByUserId(userId); xModuleDef.setAddedByUserId(userId); xModuleDef.setCreateTime(new Date()); xModuleDef.setId(userId); xModuleDef.setModule("Policy manager"); xModuleDef.setUpdateTime(new Date()); xModuleDef.setUrl("/policy manager"); VXUserPermission userPermission = new VXUserPermission(); userPermission.setId(1L); userPermission.setIsAllowed(1); userPermission.setModuleId(1L); userPermission.setUserId(userId); userPermission.setUserName("xyz"); userPermission.setOwner("admin"); Mockito.when(daoManager.getXXPortalUserRole()).thenReturn(xPortalUserRoleDao); Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(userDao.getById(userId)).thenReturn(user); Mockito.when(daoManager.getXXUserPermission()).thenReturn(xUserPermissionDao); Mockito.when(xUserPermissionDao.findByUserPermissionIdAndIsAllowed(userProfile.getId())).thenReturn(xUserPermissionsList); Mockito.when(daoManager.getXXGroupPermission()).thenReturn(xGroupPermissionDao); Mockito.when(xGroupPermissionDao.findbyVXPortalUserId(userProfile.getId())).thenReturn(xGroupPermissionList); Mockito.when(xGroupPermissionService.populateViewBean(xGroupPermissionObj)).thenReturn(groupPermission); Mockito.when(daoManager.getXXModuleDef()).thenReturn(xModuleDefDao); Mockito.when(xModuleDefDao.findByModuleId(Mockito.anyLong())).thenReturn(xModuleDef); Mockito.when(xUserPermissionService.populateViewBean(xUserPermissionObj)).thenReturn(userPermission); Mockito.when(daoManager.getXXModuleDef()).thenReturn(xModuleDefDao); Mockito.when(xModuleDefDao.findByModuleId(Mockito.anyLong())).thenReturn(xModuleDef); Mockito.doNothing().when(rangerBizUtil).blockAuditorRoleUser(); userMgr.checkAccess(userId); userMgr.setUserRoles(userId, vStringRolesList); Mockito.verify(daoManager).getXXUserPermission(); Mockito.verify(daoManager).getXXGroupPermission(); Mockito.verify(xGroupPermissionService).populateViewBean(xGroupPermissionObj); Mockito.verify(xUserPermissionService).populateViewBean(xUserPermissionObj); } @Test public void test24updateRoles() { setup(); Collection<String> rolesList = new ArrayList<String>(); rolesList.add("ROLE_USER"); rolesList.add("ROLE_SYS_ADMIN"); XXPortalUserRole XXPortalUserRole = new XXPortalUserRole(); XXPortalUserRole.setId(userId); XXPortalUserRole.setUserRole("ROLE_USER"); List<XXPortalUserRole> list = new ArrayList<XXPortalUserRole>(); list.add(XXPortalUserRole); XXPortalUserRoleDao userDao = Mockito.mock(XXPortalUserRoleDao.class); Mockito.when(daoManager.getXXPortalUserRole()).thenReturn(userDao); Mockito.when(userDao.findByUserId(userId)).thenReturn(list); boolean isFound = userMgr.updateRoles(userId, rolesList); Assert.assertFalse(isFound); } @Test public void test25updatePasswordInSHA256() { XXPortalUserDao userDao = Mockito.mock(XXPortalUserDao.class); VXPortalUser userProfile = userProfile(); String userName = userProfile.getFirstName(); String userPassword = userProfile.getPassword(); XXPortalUser user = new XXPortalUser(); user.setEmailAddress(userProfile.getEmailAddress()); user.setFirstName(userProfile.getFirstName()); user.setLastName(userProfile.getLastName()); user.setLoginId(userProfile.getLoginId()); user.setPassword(userProfile.getPassword()); user.setUserSource(RangerCommonEnums.USER_APP); user.setPublicScreenName(userProfile.getPublicScreenName()); user.setId(userProfile.getId()); Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(userDao.update(user)).thenReturn(user); XXPortalUser dbXXPortalUser = userMgr.updatePasswordInSHA256(null,userPassword,false); Assert.assertNull(dbXXPortalUser); Mockito.when(userDao.findByLoginId(Mockito.anyString())).thenReturn(null); dbXXPortalUser = userMgr.updatePasswordInSHA256(userName,userPassword,false); Assert.assertNull(dbXXPortalUser); Mockito.when(userDao.findByLoginId(Mockito.anyString())).thenReturn(user); dbXXPortalUser = userMgr.updatePasswordInSHA256(userName,userPassword,true); Assert.assertNotNull(dbXXPortalUser); dbXXPortalUser = userMgr.updatePasswordInSHA256(userName,"Secret",true); Assert.assertNotNull(dbXXPortalUser); } @Test public void test26CreateUser() { setup(); XXPortalUserDao userDao = Mockito.mock(XXPortalUserDao.class); XXPortalUserRoleDao roleDao = Mockito.mock(XXPortalUserRoleDao.class); VXPortalUser userProfile = userProfile(); Collection<String> userRoleList = new ArrayList<String>(); userRoleList.add("ROLE_USER"); XXPortalUser user = new XXPortalUser(); user.setEmailAddress(userProfile.getEmailAddress()); user.setFirstName(userProfile.getFirstName()); user.setLastName(userProfile.getLastName()); user.setLoginId(userProfile.getLoginId()); String encryptedPwd = userMgr.encrypt(userProfile.getLoginId(),userProfile.getPassword()); user.setPassword(encryptedPwd); user.setUserSource(userProfile.getUserSource()); user.setPublicScreenName(userProfile.getPublicScreenName()); user.setId(userProfile.getId()); XXPortalUserRole XXPortalUserRole = new XXPortalUserRole(); XXPortalUserRole.setId(user.getId()); XXPortalUserRole.setUserRole("ROLE_USER"); List<XXPortalUserRole> list = new ArrayList<XXPortalUserRole>(); list.add(XXPortalUserRole); Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(userDao.create((XXPortalUser) Mockito.any())).thenReturn(user); Mockito.when(daoManager.getXXPortalUserRole()).thenReturn(roleDao); Mockito.when(roleDao.findByUserId(userId)).thenReturn(list); XXPortalUser dbxxPortalUser = userMgr.createUser(userProfile, 1,userRoleList); Assert.assertNotNull(dbxxPortalUser); userId = dbxxPortalUser.getId(); Assert.assertEquals(userId, dbxxPortalUser.getId()); Assert.assertEquals(userProfile.getFirstName(),dbxxPortalUser.getFirstName()); Assert.assertEquals(userProfile.getFirstName(),dbxxPortalUser.getFirstName()); Assert.assertEquals(userProfile.getLastName(),dbxxPortalUser.getLastName()); Assert.assertEquals(userProfile.getLoginId(),dbxxPortalUser.getLoginId()); Assert.assertEquals(userProfile.getEmailAddress(),dbxxPortalUser.getEmailAddress()); Assert.assertEquals(encryptedPwd,dbxxPortalUser.getPassword()); Mockito.verify(daoManager).getXXPortalUser(); Mockito.verify(daoManager).getXXPortalUserRole(); } @Test public void test27UpdateUser() { setup(); XXPortalUserDao userDao = Mockito.mock(XXPortalUserDao.class); VXPortalUser userProfile = userProfile(); XXPortalUser user = new XXPortalUser(); user.setId(userProfile.getId()); user.setLoginId(userProfile.getLoginId()); user.setEmailAddress(userProfile.getEmailAddress()); user.setLoginId(userProfile.getLoginId()); String encryptedPwd = userMgr.encrypt(userProfile.getLoginId(),userProfile.getPassword()); user.setPassword(encryptedPwd); Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(userDao.getById(userProfile.getId())).thenReturn(user); Mockito.when(stringUtil.validateEmail(Mockito.anyString())).thenReturn(true); Mockito.doNothing().when(rangerBizUtil).blockAuditorRoleUser(); XXPortalUser dbXXPortalUser = userMgr.updateUser(userProfile); Assert.assertNotNull(dbXXPortalUser); Assert.assertEquals(userId, dbXXPortalUser.getId()); Assert.assertEquals(userProfile.getFirstName(),dbXXPortalUser.getFirstName()); Assert.assertEquals(userProfile.getFirstName(),dbXXPortalUser.getFirstName()); Assert.assertEquals(userProfile.getLastName(),dbXXPortalUser.getLastName()); Assert.assertEquals(userProfile.getLoginId(),dbXXPortalUser.getLoginId()); Assert.assertEquals(userProfile.getEmailAddress(),dbXXPortalUser.getEmailAddress()); Assert.assertEquals(encryptedPwd,dbXXPortalUser.getPassword()); } @Test public void test28UpdateUser() { setup(); XXPortalUserDao userDao = Mockito.mock(XXPortalUserDao.class); VXPortalUser userProfile = userProfile(); XXPortalUser user = new XXPortalUser(); user.setId(userProfile.getId()); user.setLoginId(userProfile.getLoginId()); user.setEmailAddress(userProfile.getEmailAddress()); user.setLoginId(userProfile.getLoginId()); String encryptedPwd = userMgr.encrypt(userProfile.getLoginId(),userProfile.getPassword()); user.setPassword(encryptedPwd); Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(userDao.getById(userProfile.getId())).thenReturn(null); XXPortalUser dbXXPortalUser = userMgr.updateUser(userProfile); Assert.assertNull(dbXXPortalUser); user.setStatus(RangerCommonEnums.USER_EXTERNAL); user.setFirstName("null"); user.setLastName("null"); Mockito.when(userDao.getById(userProfile.getId())).thenReturn(user); Mockito.when(stringUtil.validateEmail(Mockito.anyString())).thenReturn(true); Mockito.doNothing().when(rangerBizUtil).blockAuditorRoleUser(); Mockito.when(userDao.findByEmailAddress(Mockito.anyString())).thenReturn(user); dbXXPortalUser = userMgr.updateUser(userProfile); Assert.assertNotNull(dbXXPortalUser); Assert.assertEquals(userId, dbXXPortalUser.getId()); Assert.assertEquals(userProfile.getLoginId(),dbXXPortalUser.getLoginId()); Assert.assertEquals(userProfile.getEmailAddress(),dbXXPortalUser.getEmailAddress()); Assert.assertEquals(encryptedPwd,dbXXPortalUser.getPassword()); } @Test public void test29UpdateOldUserName() { setup(); XXPortalUserDao userDao = Mockito.mock(XXPortalUserDao.class); XXUserDao xXUserDao = Mockito.mock(XXUserDao.class); VXPortalUser userProfile = userProfile(); String userLoginId = userProfile.getLoginId(); String newUserName= "newUserName"; String currentPassword = userProfile.getPassword(); XXPortalUser xXPortalUser = new XXPortalUser(); xXPortalUser.setEmailAddress(userProfile.getEmailAddress()); xXPortalUser.setFirstName(userProfile.getFirstName()); xXPortalUser.setLastName(userProfile.getLastName()); xXPortalUser.setLoginId(userProfile.getLoginId()); String encryptedPwd = userMgr.encrypt(userProfile.getLoginId(),userProfile.getPassword()); xXPortalUser.setPassword(encryptedPwd); xXPortalUser.setUserSource(userProfile.getUserSource()); xXPortalUser.setPublicScreenName(userProfile.getPublicScreenName()); xXPortalUser.setId(userProfile.getId()); xXPortalUser.setUserSource(RangerCommonEnums.USER_APP); XXUser xXUser = new XXUser(); Collection<String> userRoleList = new ArrayList<String>(); userRoleList.add("ROLE_USER"); Collection<String> groupNameList = new ArrayList<String>(); groupNameList.add("Grp2"); xXUser.setId(userId); xXUser.setDescription(userProfile.getPublicScreenName()); xXUser.setName(userProfile.getLoginId()); List<XXTrxLog> trxLogList = new ArrayList<XXTrxLog>(); XXTrxLog xTrxLogObj = new XXTrxLog(); xTrxLogObj.setAction("update"); xTrxLogObj.setAddedByUserId(userId); xTrxLogObj.setAttributeName("User Name"); xTrxLogObj.setCreateTime(new Date()); xTrxLogObj.setId(userId); xTrxLogObj.setPreviousValue(userLoginId); xTrxLogObj.setNewValue(newUserName); xTrxLogObj.setObjectClassType(AppConstants.CLASS_TYPE_USER_PROFILE); xTrxLogObj.setObjectName(xXPortalUser.getLoginId()); xTrxLogObj.setObjectId(userId); xTrxLogObj.setParentObjectClassType(AppConstants.CLASS_TYPE_USER_PROFILE); xTrxLogObj.setParentObjectId(userId); xTrxLogObj.setUpdatedByUserId(xXPortalUser.getId()); trxLogList.add(xTrxLogObj); Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(userDao.findByLoginId(userProfile.getLoginId())).thenReturn(xXPortalUser); Mockito.when(daoManager.getXXUser()).thenReturn(xXUserDao); Mockito.when(xXUserDao.findByUserName(xXUser.getName())).thenReturn(xXUser); xXUser.setName(newUserName); Mockito.when(xXUserDao.update(xXUser)).thenReturn(xXUser); xXPortalUser.setLoginId(newUserName); Mockito.when(userDao.update(xXPortalUser)).thenReturn(xXPortalUser); xXPortalUser=userMgr.updateOldUserName(userLoginId, newUserName, currentPassword); Assert.assertNotNull(xXPortalUser); Assert.assertEquals(newUserName,xXPortalUser.getLoginId()); xXPortalUser.setUserSource(RangerCommonEnums.USER_EXTERNAL); Mockito.when(userDao.findByLoginId(userProfile.getLoginId())).thenReturn(xXPortalUser); xXPortalUser=userMgr.updateOldUserName(userLoginId, newUserName, currentPassword); xXPortalUser=userMgr.updateOldUserName(null, newUserName, currentPassword); Mockito.when(userDao.findByLoginId(userProfile.getLoginId())).thenReturn(null); xXPortalUser=userMgr.updateOldUserName(userLoginId, newUserName, currentPassword); } @Test public void test30getRolesByLoginId() { setup(); XXPortalUserDao userDao = Mockito.mock(XXPortalUserDao.class); XXPortalUserRoleDao roleDao = Mockito.mock(XXPortalUserRoleDao.class); VXPortalUser userProfile = userProfile(); String userLoginId = userProfile.getLoginId(); Collection<String> userRoleList = new ArrayList<String>(); userRoleList.add("ROLE_USER"); XXPortalUser user = new XXPortalUser(); user.setEmailAddress(userProfile.getEmailAddress()); user.setFirstName(userProfile.getFirstName()); user.setLastName(userProfile.getLastName()); user.setLoginId(userProfile.getLoginId()); String encryptedPwd = userMgr.encrypt(userProfile.getLoginId(),userProfile.getPassword()); user.setPassword(encryptedPwd); user.setUserSource(userProfile.getUserSource()); user.setPublicScreenName(userProfile.getPublicScreenName()); user.setId(userProfile.getId()); XXPortalUserRole XXPortalUserRole = new XXPortalUserRole(); XXPortalUserRole.setId(user.getId()); XXPortalUserRole.setUserRole("ROLE_USER"); List<XXPortalUserRole> list = new ArrayList<XXPortalUserRole>(); list.add(XXPortalUserRole); Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(userDao.findByLoginId(userProfile.getLoginId())).thenReturn(user); Mockito.when(daoManager.getXXPortalUserRole()).thenReturn(roleDao); Mockito.when(roleDao.findByUserId(userId)).thenReturn(list); Collection<String> roleList = userMgr.getRolesByLoginId(userLoginId); Assert.assertNotNull(roleList); Assert.assertEquals(userLoginId, user.getLoginId()); Assert.assertEquals(userRoleList, roleList); roleList = userMgr.getRolesByLoginId(null); Mockito.when(roleDao.findByUserId(userId)).thenReturn(null); roleList = userMgr.getRolesByLoginId(userLoginId); } @Test public void test31checkAccess() { setup(); XXPortalUser xPortalUser = Mockito.mock(XXPortalUser.class); userMgr.checkAccess(xPortalUser); destroySession(); VXPortalUser userProfile = userProfile(); xPortalUser = xxPortalUser(userProfile); xPortalUser.setId(userProfile.getId()); setupUser(); userMgr.checkAccess(xPortalUser); } @Test public void test32checkAdminAccess() { setup(); userMgr.checkAdminAccess(); } @Test public void test33checkAccessForUpdate() { setup(); XXPortalUser xPortalUser = Mockito.mock(XXPortalUser.class); userMgr.checkAccessForUpdate(xPortalUser); } @Test public void test34updateRoleForExternalUsers() { setupRangerUserSyncUser(); XXPortalUserDao userDao = Mockito.mock(XXPortalUserDao.class); XXPortalUserRoleDao roleDao = Mockito.mock(XXPortalUserRoleDao.class); XXUserPermissionDao xUserPermissionDao = Mockito.mock(XXUserPermissionDao.class); Collection<String> existingRoleList = new ArrayList<String>(); existingRoleList.add(RangerConstants.ROLE_USER); Collection<String> reqRoleList = new ArrayList<String>(); reqRoleList.add(RangerConstants.ROLE_SYS_ADMIN); VXPortalUser userProfile = userProfile(); XXPortalUser user = new XXPortalUser(); user.setId(userProfile.getId()); user.setLoginId(userProfile.getLoginId()); user.setEmailAddress(userProfile.getEmailAddress()); user.setLoginId(userProfile.getLoginId()); String encryptedPwd = userMgr.encrypt(userProfile.getLoginId(),userProfile.getPassword()); user.setPassword(encryptedPwd); XXPortalUserRole XXPortalUserRole = new XXPortalUserRole(); XXPortalUserRole.setId(userProfile.getId()); XXPortalUserRole.setUserRole(RangerConstants.ROLE_USER); List<XXPortalUserRole> list = new ArrayList<XXPortalUserRole>(); list.add(XXPortalUserRole); List<XXUserPermission> xUserPermissionsList = new ArrayList<XXUserPermission>(); XXUserPermission xUserPermissionObj = new XXUserPermission(); xUserPermissionObj.setAddedByUserId(userId); xUserPermissionObj.setCreateTime(new Date()); xUserPermissionObj.setId(userId); xUserPermissionObj.setIsAllowed(1); xUserPermissionObj.setModuleId(1L); xUserPermissionObj.setUpdatedByUserId(userId); xUserPermissionObj.setUpdateTime(new Date()); xUserPermissionObj.setUserId(userId); xUserPermissionsList.add(xUserPermissionObj); Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(daoManager.getXXPortalUserRole()).thenReturn(roleDao); Mockito.when(roleDao.findByUserId(userId)).thenReturn(list); Mockito.when(userDao.getById(userProfile.getId())).thenReturn(user); Mockito.when(stringUtil.validateEmail(Mockito.anyString())).thenReturn(true); Mockito.doNothing().when(rangerBizUtil).blockAuditorRoleUser(); Mockito.when(daoManager.getXXUserPermission()).thenReturn(xUserPermissionDao); Mockito.when(xUserPermissionDao.findByUserPermissionId(userProfile.getId())).thenReturn(xUserPermissionsList); VXPortalUser dbVXPortalUser = userMgr.updateRoleForExternalUsers(reqRoleList,existingRoleList,userProfile); Assert.assertNotNull(dbVXPortalUser); Assert.assertEquals(userId, dbVXPortalUser.getId()); Assert.assertEquals(userProfile.getFirstName(),dbVXPortalUser.getFirstName()); Assert.assertEquals(userProfile.getLastName(),dbVXPortalUser.getLastName()); Assert.assertEquals(userProfile.getLoginId(),dbVXPortalUser.getLoginId()); Assert.assertEquals(userProfile.getEmailAddress(),dbVXPortalUser.getEmailAddress()); } @Test public void test35mapVXPortalUserToXXPortalUser() { setup(); Collection<String> existingRoleList = new ArrayList<String>(); existingRoleList.add(RangerConstants.ROLE_USER); Collection<String> reqRoleList = new ArrayList<String>(); reqRoleList.add(RangerConstants.ROLE_SYS_ADMIN); VXPortalUser userProfile = userProfile(); userProfile.setFirstName("null"); userProfile.setLastName("null"); XXPortalUser user = new XXPortalUser(); user.setId(userProfile.getId()); user.setLoginId(userProfile.getLoginId()); user.setEmailAddress(userProfile.getEmailAddress()); user.setLoginId(userProfile.getLoginId()); String encryptedPwd = userMgr.encrypt(userProfile.getLoginId(),userProfile.getPassword()); user.setPassword(encryptedPwd); XXPortalUserRole XXPortalUserRole = new XXPortalUserRole(); XXPortalUserRole.setId(userProfile.getId()); XXPortalUserRole.setUserRole(RangerConstants.ROLE_USER); List<XXPortalUserRole> list = new ArrayList<XXPortalUserRole>(); list.add(XXPortalUserRole); List<XXUserPermission> xUserPermissionsList = new ArrayList<XXUserPermission>(); XXUserPermission xUserPermissionObj = new XXUserPermission(); xUserPermissionObj.setAddedByUserId(userId); xUserPermissionObj.setCreateTime(new Date()); xUserPermissionObj.setId(userId); xUserPermissionObj.setIsAllowed(1); xUserPermissionObj.setModuleId(1L); xUserPermissionObj.setUpdatedByUserId(userId); xUserPermissionObj.setUpdateTime(new Date()); xUserPermissionObj.setUserId(userId); xUserPermissionsList.add(xUserPermissionObj); XXPortalUser dbVXPortalUser = userMgr.mapVXPortalUserToXXPortalUser(userProfile); Assert.assertNotNull(dbVXPortalUser); Assert.assertEquals(userProfile.getLoginId(),dbVXPortalUser.getLoginId()); Assert.assertEquals(userProfile.getEmailAddress(),dbVXPortalUser.getEmailAddress()); } @Test public void test36UpdateUser() { setup(); XXPortalUserDao userDao = Mockito.mock(XXPortalUserDao.class); VXPortalUser userProfile = userProfile(); XXPortalUser user = new XXPortalUser(); user.setId(userProfile.getId()); user.setLoginId(userProfile.getLoginId()); userProfile.setFirstName("User"); userProfile.setLastName("User"); Mockito.when(stringUtil.validateEmail(Mockito.anyString())).thenReturn(true); String encryptedPwd = userMgr.encrypt(userProfile.getLoginId(),userProfile.getPassword()); user.setPassword(encryptedPwd); Mockito.when(daoManager.getXXPortalUser()).thenReturn(userDao); Mockito.when(userDao.getById(userProfile.getId())).thenReturn(user); Mockito.doNothing().when(rangerBizUtil).blockAuditorRoleUser(); Mockito.when(stringUtil.toCamelCaseAllWords(Mockito.anyString())).thenReturn(userProfile.getFirstName()); XXPortalUser dbXXPortalUser = userMgr.updateUser(userProfile); Assert.assertNotNull(dbXXPortalUser); Mockito.when(stringUtil.isEmpty(Mockito.anyString())).thenReturn(true); userProfile.setFirstName("null"); userProfile.setLastName("null"); userProfile.setEmailAddress(""); dbXXPortalUser = userMgr.updateUser(userProfile); } @Test public void test37createUserSearchQuery() { EntityManager entityManager = Mockito.mock(EntityManager.class); String queryString="Select id,loginId,emailAddress,firstName,lastName,statusList,publicScreenName,status from XXPortalUser"; Query query = Mockito.mock(Query.class); SearchCriteria searchCriteria = new SearchCriteria(); searchCriteria.setDistinct(true); searchCriteria.setGetChildren(true); searchCriteria.setGetCount(true); searchCriteria.setMaxRows(12); searchCriteria.setOwnerId(userId); searchCriteria.setStartIndex(1); searchCriteria.setSortBy("asc"); VXPortalUser vXPortalUser=userProfile(); List<String> userRoleList = new ArrayList<String>(); userRoleList.add("ROLE_USER"); List<Integer> statusList = new ArrayList<Integer>(); statusList.add(1); searchCriteria.addParam("roleList", userRoleList); searchCriteria.addParam("userId", vXPortalUser.getId()); searchCriteria.addParam("loginId", vXPortalUser.getLoginId()); searchCriteria.addParam("emailAddress", vXPortalUser.getEmailAddress()); searchCriteria.addParam("firstName", vXPortalUser.getFirstName()); searchCriteria.addParam("lastName", vXPortalUser.getLastName()); searchCriteria.addParam("statusList", statusList); searchCriteria.addParam("publicScreenName", vXPortalUser.getPublicScreenName()); searchCriteria.addParam("status", vXPortalUser.getStatus()); searchCriteria.addParam("familyScreenName", vXPortalUser.getPublicScreenName()); Mockito.when(daoManager.getEntityManager()).thenReturn(entityManager); Mockito.when(entityManager.createQuery(Mockito.anyString())).thenReturn(query); Query newQuery = userMgr.createUserSearchQuery(query.toString(),queryString,searchCriteria); Assert.assertNotNull(newQuery); userRoleList.add("ROLE_SYS_ADMIN"); statusList.add(0); searchCriteria.addParam("statusList", statusList); searchCriteria.addParam("roleList", userRoleList); newQuery = userMgr.createUserSearchQuery(query.toString(),queryString,searchCriteria); } @Test public void test38mapVXPortalUserToXXPortalUser() { Collection<String> existingRoleList = new ArrayList<String>(); existingRoleList.add(RangerConstants.ROLE_USER); VXPortalUser dbVXPortalUser = userMgr.mapXXPortalUserToVXPortalUser(null,existingRoleList); XXPortalUser user = new XXPortalUser(); Assert.assertNull(dbVXPortalUser); dbVXPortalUser = userMgr.mapXXPortalUserToVXPortalUser(user,existingRoleList); Assert.assertNull(dbVXPortalUser); } @Test public void test39gjUserToUserProfile() { VXPortalUser vXPortalUser = new VXPortalUser(); XXPortalUser xXPortalUser = new XXPortalUser(); userMgr.gjUserToUserProfile(xXPortalUser,vXPortalUser); } @Test public void test40deleteUserRole() { XXPortalUserRole xXPortalUserRole = new XXPortalUserRole(); boolean result = userMgr.deleteUserRole(1L,xXPortalUserRole); Assert.assertFalse("deletion-skipped", result); } @Test public void test41mapXXPortalUserToVXPortalUserForDefaultAccount() { VXPortalUser vXPortalUser=userProfile(); XXPortalUser xXPortalUser = xxPortalUser(vXPortalUser); XXPortalUserRoleDao roleDao = Mockito.mock(XXPortalUserRoleDao.class); Mockito.when(daoManager.getXXPortalUserRole()).thenReturn(roleDao); XXPortalUserRole XXPortalUserRole = new XXPortalUserRole(); XXPortalUserRole.setId(userId); XXPortalUserRole.setUserRole("ROLE_USER"); List<XXPortalUserRole> list = new ArrayList<XXPortalUserRole>(); list.add(XXPortalUserRole); Mockito.when(roleDao.findByParentId(xXPortalUser.getId())).thenReturn(list); VXPortalUser dbVXPortalUser = userMgr.mapXXPortalUserToVXPortalUserForDefaultAccount(xXPortalUser); Assert.assertNotNull(dbVXPortalUser); } }
package org.vizzini.illyriad.map.swingui; import java.awt.BorderLayout; import java.awt.Cursor; import java.awt.EventQueue; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.BitSet; import javax.swing.JFrame; import javax.swing.JPanel; import javax.swing.JSplitPane; import javax.swing.WindowConstants; import org.vizzini.illyriad.map.DefaultMineralDatabase; import org.vizzini.illyriad.map.DefaultTownDatabase; import org.vizzini.illyriad.map.FastWorldMapDatabase; import org.vizzini.illyriad.map.GeoIdConverter; import org.vizzini.illyriad.map.MineralDatabase; import org.vizzini.illyriad.map.SweetspotFinder; import org.vizzini.illyriad.map.TownDatabase; import org.vizzini.illyriad.map.WorldMapDatabase; /** * Provides a user interface for a sweetspot finder. * <p> * To get the application name in the Mac menu bar, add this to the run configuration: * </p> * <code>-Dcom.apple.mrj.application.apple.menu.about.name="Vizzini Sweetspot Finder"</code> */ public final class SweetspotFinderUI extends JPanel { /** Frame. */ static JFrame _frame; /** * @return the frame */ public static JFrame getFrame() { return _frame; } /** * Application method. * * @param args Application arguments. */ public static final void main(final String[] args) { EventQueue.invokeLater(new Runnable() { @Override public void run() { final SweetspotFinderUI app = new SweetspotFinderUI(); _frame = new JFrame("Vizzini Sweetspot Finder"); _frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); _frame.getContentPane().add(app, BorderLayout.CENTER); _frame.setSize(1280, 850); _frame.setLocationByPlatform(true); _frame.setVisible(true); } }); } /** Control panel. */ private final ControlPanel controlPanel; /** Map panel. */ private final MapPanel mapPanel; /** Report panel. */ private final ReportPanel reportPanel; /** Sweetspot finder. */ private final SweetspotFinder sweetspotFinder; /** Previous cursor. */ private Cursor previousCursor; /** * Construct this object. */ public SweetspotFinderUI() { final boolean isElgea = true; final GeoIdConverter converter = new GeoIdConverter(isElgea); final WorldMapDatabase worldMapDatabase = new FastWorldMapDatabase(converter); final MineralDatabase mineralDatabase = new DefaultMineralDatabase(converter); final TownDatabase townDatabase = new DefaultTownDatabase(converter); sweetspotFinder = new SweetspotFinder(converter, worldMapDatabase, mineralDatabase, townDatabase); controlPanel = new ControlPanel(); mapPanel = new MapPanel(); reportPanel = new ReportPanel(converter, sweetspotFinder); final JSplitPane splitPane = new JSplitPane(JSplitPane.VERTICAL_SPLIT, mapPanel, reportPanel); splitPane.setResizeWeight(0.95); setLayout(new BorderLayout()); add(splitPane, BorderLayout.CENTER); add(controlPanel, BorderLayout.EAST); controlPanel.addPropertyChangeListener(createPropertyChangeListener()); updateData(); } /** * Update data. */ void updateData() { setCursorBusy(true); sweetspotFinder.setSevenFood(controlPanel.isSevenFoodSelected()); sweetspotFinder.setEightFood(controlPanel.isEightFoodSelected()); sweetspotFinder.setFiveWood(controlPanel.isFiveWoodSelected()); sweetspotFinder.setFiveClay(controlPanel.isFiveClaySelected()); sweetspotFinder.setFiveIron(controlPanel.isFiveIronSelected()); sweetspotFinder.setFiveStone(controlPanel.isFiveStoneSelected()); sweetspotFinder.setNoTownTooClose(controlPanel.isNoTownTooCloseSelected()); sweetspotFinder.setNoTownTooCloseRadius(controlPanel.getNoTownTooCloseRadius()); sweetspotFinder.setInRegions(controlPanel.isRegionSelected()); sweetspotFinder.setRegions(controlPanel.getRegions()); sweetspotFinder.setOnTerrainCombat(controlPanel.isTerrainCombatSelected()); sweetspotFinder.setTerrainCombats(controlPanel.getTerrainCombats()); sweetspotFinder.setOnTerrainSpecific(controlPanel.isTerrainSpecificSelected()); sweetspotFinder.setTerrainSpecifics(controlPanel.getTerrainSpecifics()); sweetspotFinder.setHighFoodClose(controlPanel.isHighFoodCloseSelected()); sweetspotFinder.setHighFoodRadius(controlPanel.getHighFoodRadius()); sweetspotFinder.setMineralClose(controlPanel.isMineralCloseSelected()); sweetspotFinder.setMineralRadius(controlPanel.getMineralRadius()); sweetspotFinder.setTradeHubClose(controlPanel.isTradeHubCloseSelected()); sweetspotFinder.setTradeHubRadius(controlPanel.getTradeHubRadius()); final BitSet squares = sweetspotFinder.search(); mapPanel.setSquares(squares); reportPanel.setSquares(squares); setCursorBusy(false); } /** * @return a new property change listener. */ private PropertyChangeListener createPropertyChangeListener() { return new PropertyChangeListener() { @Override public void propertyChange(final PropertyChangeEvent event) { if ("refreshUI".equals(event.getPropertyName())) { updateData(); } } }; } /** * Set the cursor to busy depending upon the given state. * * @param isBusy Flag indicating if the app is busy. */ private void setCursorBusy(final boolean isBusy) { Cursor newCursor; if (isBusy) { previousCursor = getCursor(); newCursor = Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR); } else { if (previousCursor != null) { newCursor = previousCursor; previousCursor = null; } else { newCursor = Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR); } } setCursor(newCursor); } }
/* Copyright 2004-2012, Martian Software, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.martiansoftware.nailgun; import java.io.IOException; import java.io.PrintStream; /** * The class name is pretty descriptive. This creates a PrintStream * much like a FilterOutputStream, but with the wrapped PrintStream * being local to the current Thread. By setting System.out to a * ThreadLocalPrintStream, different Threads can write to different * PrintStreams simply by using System.out. Of course, the init() * method must be called by the Thread that wishes to use the * wrapped stream. * * @author <a href="http://www.martiansoftware.com/contact.html">Marty Lamb</a> */ class ThreadLocalPrintStream extends PrintStream { /** * The PrintStreams for the various threads */ private InheritableThreadLocal streams = null; private PrintStream defaultPrintStream = null; /** * Creates a new InheritedThreadLocalPrintStream * @param defaultPrintStream the PrintStream that will be used if the * current thread has not called init() */ public ThreadLocalPrintStream(PrintStream defaultPrintStream) { super(defaultPrintStream); streams = new InheritableThreadLocal(); this.defaultPrintStream = defaultPrintStream; init(null); } /** * Sets the PrintStream for the current thread * @param streamForCurrentThread the PrintStream for the current thread */ void init(PrintStream streamForCurrentThread) { streams.set(streamForCurrentThread); } /** * Returns this thread's PrintStream * @return this thread's PrintStream */ PrintStream getPrintStream() { PrintStream result = (PrintStream) streams.get(); return ((result == null) ? defaultPrintStream : result); } // BEGIN delegated java.io.PrintStream methods /** * @see java.io.PrintStream#checkError() */ public boolean checkError() { return (getPrintStream().checkError()); } /** * @see java.io.PrintStream#close() */ public void close() { getPrintStream().close(); } /** * @see java.io.PrintStream#flush() */ public void flush() { getPrintStream().flush(); } /** * @see java.io.PrintStream#print(boolean) */ public void print(boolean b) { getPrintStream().print(b); } /** * @see java.io.PrintStream#print(char) */ public void print(char c) { getPrintStream().print(c); } /** * @see java.io.PrintStream#print(char[]) */ public void print(char[] s) { getPrintStream().print(s); } /** * @see java.io.PrintStream#print(double) */ public void print(double d) { getPrintStream().print(d); } /** * @see java.io.PrintStream#print(float) */ public void print(float f) { getPrintStream().print(f); } /** * @see java.io.PrintStream#print(int) */ public void print(int i) { getPrintStream().print(i); } /** * @see java.io.PrintStream#print(long) */ public void print(long l) { getPrintStream().print(l); } /** * @see java.io.PrintStream#print(Object) */ public void print(Object obj) { getPrintStream().print(obj); } /** * @see java.io.PrintStream#print(String) */ public void print(String s) { getPrintStream().print(s); } /** * @see java.io.PrintStream#println() */ public void println() { getPrintStream().println(); } /** * @see java.io.PrintStream#println(boolean) */ public void println(boolean x) { getPrintStream().println(x); } /** * @see java.io.PrintStream#println(char) */ public void println(char x) { getPrintStream().println(x); } /** * @see java.io.PrintStream#println(char[]) */ public void println(char[] x) { getPrintStream().println(x); } /** * @see java.io.PrintStream#println(double) */ public void println(double x) { getPrintStream().println(x); } /** * @see java.io.PrintStream#println(float) */ public void println(float x) { getPrintStream().println(x); } /** * @see java.io.PrintStream#println(int) */ public void println(int x) { getPrintStream().println(x); } /** * @see java.io.PrintStream#println(long) */ public void println(long x) { getPrintStream().println(x); } /** * @see java.io.PrintStream#println(Object) */ public void println(Object x) { getPrintStream().println(x); } /** * @see java.io.PrintStream#println(String) */ public void println(String x) { getPrintStream().println(x); } /** * @see java.io.PrintStream#write(byte[],int,int) */ public void write(byte[] buf, int off, int len) { getPrintStream().write(buf, off, len); } /** * @see java.io.PrintStream#write(int) */ public void write(int b) { getPrintStream().write(b); } // END delegated java.io.PrintStream methods // BEGIN delegated java.io.FilterOutputStream methods /** * @see java.io.FilterOutputStream#write(byte[]) */ public void write(byte[] b) throws IOException { getPrintStream().write(b); } // END delegated java.io.FilterOutputStream methods // Note: Should java.lang.Object methods be delegated? If not, and // someone synchronizes on this stream, processes might be blocked // that shouldn't be. It would certainly be stupid to delegate // finalize(). Not so clear are hashcode(), equals(), notify(), and // the wait() methods. }
package org.jfree.data.statistics; import java.util.ArrayList; import java.util.Date; import java.util.List; import org.jfree.chart.axis.DateAxis; import org.jfree.data.Range; import org.jfree.data.RangeInfo; import org.jfree.data.xy.AbstractXYDataset; import org.jfree.util.ObjectUtilities; public class DefaultBoxAndWhiskerXYDataset extends AbstractXYDataset implements BoxAndWhiskerXYDataset, RangeInfo { private List dates; private double faroutCoefficient; private List items; private Number maximumRangeValue; private Number minimumRangeValue; private double outlierCoefficient; private Range rangeBounds; private Comparable seriesKey; public DefaultBoxAndWhiskerXYDataset(Comparable seriesKey) { this.outlierCoefficient = 1.5d; this.faroutCoefficient = DateAxis.DEFAULT_AUTO_RANGE_MINIMUM_SIZE_IN_MILLISECONDS; this.seriesKey = seriesKey; this.dates = new ArrayList(); this.items = new ArrayList(); this.minimumRangeValue = null; this.maximumRangeValue = null; this.rangeBounds = null; } public double getOutlierCoefficient() { return this.outlierCoefficient; } public void setOutlierCoefficient(double outlierCoefficient) { this.outlierCoefficient = outlierCoefficient; } public double getFaroutCoefficient() { return this.faroutCoefficient; } public void setFaroutCoefficient(double faroutCoefficient) { if (faroutCoefficient > getOutlierCoefficient()) { this.faroutCoefficient = faroutCoefficient; return; } throw new IllegalArgumentException("Farout value must be greater than the outlier value, which is currently set at: (" + getOutlierCoefficient() + ")"); } public int getSeriesCount() { return 1; } public int getItemCount(int series) { return this.dates.size(); } public void add(Date date, BoxAndWhiskerItem item) { this.dates.add(date); this.items.add(item); if (this.minimumRangeValue == null) { this.minimumRangeValue = item.getMinRegularValue(); } else if (item.getMinRegularValue().doubleValue() < this.minimumRangeValue.doubleValue()) { this.minimumRangeValue = item.getMinRegularValue(); } if (this.maximumRangeValue == null) { this.maximumRangeValue = item.getMaxRegularValue(); } else if (item.getMaxRegularValue().doubleValue() > this.maximumRangeValue.doubleValue()) { this.maximumRangeValue = item.getMaxRegularValue(); } this.rangeBounds = new Range(this.minimumRangeValue.doubleValue(), this.maximumRangeValue.doubleValue()); fireDatasetChanged(); } public Comparable getSeriesKey(int i) { return this.seriesKey; } public BoxAndWhiskerItem getItem(int series, int item) { return (BoxAndWhiskerItem) this.items.get(item); } public Number getX(int series, int item) { return new Long(((Date) this.dates.get(item)).getTime()); } public Date getXDate(int series, int item) { return (Date) this.dates.get(item); } public Number getY(int series, int item) { return getMeanValue(series, item); } public Number getMeanValue(int series, int item) { BoxAndWhiskerItem stats = (BoxAndWhiskerItem) this.items.get(item); if (stats != null) { return stats.getMean(); } return null; } public Number getMedianValue(int series, int item) { BoxAndWhiskerItem stats = (BoxAndWhiskerItem) this.items.get(item); if (stats != null) { return stats.getMedian(); } return null; } public Number getQ1Value(int series, int item) { BoxAndWhiskerItem stats = (BoxAndWhiskerItem) this.items.get(item); if (stats != null) { return stats.getQ1(); } return null; } public Number getQ3Value(int series, int item) { BoxAndWhiskerItem stats = (BoxAndWhiskerItem) this.items.get(item); if (stats != null) { return stats.getQ3(); } return null; } public Number getMinRegularValue(int series, int item) { BoxAndWhiskerItem stats = (BoxAndWhiskerItem) this.items.get(item); if (stats != null) { return stats.getMinRegularValue(); } return null; } public Number getMaxRegularValue(int series, int item) { BoxAndWhiskerItem stats = (BoxAndWhiskerItem) this.items.get(item); if (stats != null) { return stats.getMaxRegularValue(); } return null; } public Number getMinOutlier(int series, int item) { BoxAndWhiskerItem stats = (BoxAndWhiskerItem) this.items.get(item); if (stats != null) { return stats.getMinOutlier(); } return null; } public Number getMaxOutlier(int series, int item) { BoxAndWhiskerItem stats = (BoxAndWhiskerItem) this.items.get(item); if (stats != null) { return stats.getMaxOutlier(); } return null; } public List getOutliers(int series, int item) { BoxAndWhiskerItem stats = (BoxAndWhiskerItem) this.items.get(item); if (stats != null) { return stats.getOutliers(); } return null; } public double getRangeLowerBound(boolean includeInterval) { if (this.minimumRangeValue != null) { return this.minimumRangeValue.doubleValue(); } return Double.NaN; } public double getRangeUpperBound(boolean includeInterval) { if (this.maximumRangeValue != null) { return this.maximumRangeValue.doubleValue(); } return Double.NaN; } public Range getRangeBounds(boolean includeInterval) { return this.rangeBounds; } public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof DefaultBoxAndWhiskerXYDataset)) { return false; } DefaultBoxAndWhiskerXYDataset that = (DefaultBoxAndWhiskerXYDataset) obj; if (!ObjectUtilities.equal(this.seriesKey, that.seriesKey)) { return false; } if (!this.dates.equals(that.dates)) { return false; } if (this.items.equals(that.items)) { return true; } return false; } public Object clone() throws CloneNotSupportedException { DefaultBoxAndWhiskerXYDataset clone = (DefaultBoxAndWhiskerXYDataset) super.clone(); clone.dates = new ArrayList(this.dates); clone.items = new ArrayList(this.items); return clone; } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package cmsc.pkg105.lab03; import static cmsc.pkg105.lab03.CMSC105LAB03.dataInterpretation; import static cmsc.pkg105.lab03.CMSC105LAB03.showDescription; import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedList; import java.util.Scanner; import java.util.Set; import java.util.TreeSet; /** * * @author alfisalvacion */ public class Ungrouped { Double[] inputarray; Data[] uniqarr; int maxin; int cont; double mean; double median; double variance; double standarddeviation; double max; double min; double range; LinkedList mode = new LinkedList(); Ungrouped() { runUngroupedanalysis(); } void runUngroupedanalysis() { Scanner sc = new Scanner(System.in); getInputdata(); do { showDescription(); displayInputdata(); changeInput(); find(); inputInterpretation(); showInterpretation(); System.out.print("\n\nWhat do you want to do?\n" + "[1] Input new data for ungrouped data [2] Back to main menu [Any number except " + "1 and 2] Reuse data\nYour choice: "); cont = sc.nextInt(); if(cont == 1) runUngroupedanalysis(); if(cont == 2) break; } while((cont < 1 || cont > 2)); }//DONE BUT COULD BE BETTER void getMaxin() { Scanner sc = new Scanner(System.in); System.out.println("\nEnter the maximum number of input."); do{ try{ System.out.print("Your maximum number of inputs is: "); maxin = sc.nextInt(); if(maxin < 1) throw new IllegalArgumentException(); } catch(Exception e){ System.out.println("\nEnter a positive number for maximum number of inputs."); sc = new Scanner(System.in); } }while(maxin < 1); }//DONE void getInputdata() { getMaxin(); Scanner sc = new Scanner(System.in); inputarray = new Double[maxin]; double inp; System.out.println(); for(int i = 0; i < maxin; i++) { try { System.out.print("[" + (i + 1) + "] "); inputarray[i] = sc.nextDouble(); } catch(Exception e) { System.out.println("Invalid input. Input not a number."); sc = new Scanner(System.in); i--; } } }//DONE void displayInputdata() { System.out.println("\nYour input data: "); for(int i = 0; i < maxin; i++) { System.out.println("[" + (i+1) + "] " + inputarray[i] + " "); } }//DONE void changeInput() { Scanner sc = new Scanner(System.in); int choice = 0; do { try { System.out.println("\nDo you want to change anything from your input?"); System.out.println("[1] Yes [2] No"); System.out.print("You choose: "); choice = sc.nextInt(); if(choice < 1 || choice > 2) throw new IllegalArgumentException(); } catch(Exception e) { System.out.println("Choice not in range. "); sc = new Scanner(System.in); } } while(choice < 1 || choice > 2); if(choice == 1) { changeInputYes(); } else if(choice == 2) { System.out.println("Your data is final."); } }//DONE private void changeInputYes() { Scanner sc = new Scanner(System.in); int ind; double newvalue; int cont = 0; System.out.println("\nEnter the index of the data you want to change."); System.out.println("Choose between 1 and " + maxin); do { try { System.out.print("\nIndex you want to change: "); ind = sc.nextInt(); if(ind < 1 || ind > maxin) throw new IllegalArgumentException("Input out of range."); System.out.println("You chose index [" + ind + "] " + inputarray[(ind-1)]); System.out.print("Enter new value: "); newvalue = sc.nextDouble(); inputarray[(ind-1)] = newvalue; displayInputdata(); do { System.out.println("\nDo you want to continue changing values?"); System.out.printf("Enter 0 if Yes, 1 if No.\nYou choose: "); cont = sc.nextInt(); if(cont < 0 || cont > 1) System.out.println("Input not in range."); } while(cont < 0 || cont > 1); } catch(Exception e) { System.out.println("Invalid input."); sc = new Scanner(System.in); } } while(cont == 0); }//DONE void find() { Scanner sc = new Scanner(System.in);LinkedList Mod = new <Mode>LinkedList(); // double temp = max(); // int ctr = 0; // Mode mode[]; // for (int i = 0; i < data1.length; i++) { // if (data1[i][2] == temp) { // ctr++; //// for(int cnt = 0; cnt < Mod.size(); cnt++){ //// if(data1[i][2] == ((Mode)(Mod.get(cnt))).freq) //// Mod.add(data1[i]); //// } // } // } // if(ctr != data1.length){ // mode = new Mode[ctr]; // for (int i = 0, modei = 0; i < data1.length; i++) { // if (data1[i][2] == temp && temp != 1) { // mode[modei] = new Mode(data1[i][0], data1[i][1]); // modei++; // } // } // if (mode.length == 0) { // System.out.println("\nMode: no mode"); // } // else { // System.out.println("\nModes: "); // for(int i = 0; i < data1.length; i++) { // if(data1[i][2] == temp) { // System.out.println(data1[i][0] + " - " + data1[i][1]); // } // } // if (ctr == 1) { // System.out.println("Unimodal"); // } else if (ctr == 2) { // System.out.println("Bimodal"); // } else if (ctr >= 3) { // System.out.println("Multimodal"); // } // } // } // else // System.out.println("\nMode: no mode"); int choice = 0; do { System.out.println("\nWhat do you want to do?"); System.out.println("[1] Find Mean [2] Find Median [3] Find Mode [4] All Measures"); System.out.print("Your choice: "); choice = sc.nextInt(); switch(choice){ case 1: displayMean(); break; case 2: displayMedian(); break; case 3: displayMode(); case 4: displayAll(); break; default: System.out.println("Invalid Input. Input not in range."); } } while(choice < 1 || choice > 4); }//DONE void displayMean() { mean = findMean(inputarray); variance = findVariance(inputarray); standarddeviation = findStandardDeviation(variance); System.out.printf("\nMean: %.2f\n", mean); System.out.printf("\nVariance: %.2f\n", variance); System.out.printf("\nStandard Deviation: %.2f\n", standarddeviation); }//DONE void displayMedian() { median = findMedian(inputarray); range = findRange(inputarray); System.out.printf("\nMedian: %.2f\n", median); System.out.printf("\nRange: %.2f\n", range); }//DONE void displayMode() { mode = findMode(inputarray); System.out.println("\nModes: " + mode); if(mode.isEmpty()) System.out.print("No Mode"); else if(mode.size() == 1) System.out.print("Unimodal Distribution"); else if(mode.size() == 2) System.out.print("Bimodal Distribution"); else if(mode.size() > 2) System.out.print("Multimodal Distribution"); }//DONE void displayAll() { displayMean(); displayMedian(); displayMode(); }//DONE double findMean(Double[] arrinp) { double ans; double sum = 0; for(int i = 0; i < maxin; i++) { sum += inputarray[i]; } ans = sum/maxin; return ans; }//DONE double findVariance(Double[] arrinp) { double ave; double sqrd; double sum = 0; double minus = 0; for(int i = 0; i < maxin; i++) { minus = (inputarray[i] - mean); sqrd = minus*minus; sum = sum + sqrd; } ave = sum/(maxin-1); return ave; }//DONE double findStandardDeviation(Double variance) { return Math.sqrt(variance); }//DONE double findMedian(Double[] arrinp) { double ans; if(maxin % 2 == 0) { double lowmid = arrinp[(maxin/2)-1];//minus one because indeces in array starts in 0 double himid = arrinp[((maxin+2)/2)-1]; ans = (lowmid + himid)/2; } else ans = arrinp[(maxin/2)]; return ans; }//DONE double findRange(Double[] arrinp) { min = findMin(arrinp); max = findMax(arrinp); return max-min; }//DONE double findMin(Double[] arrinp) { double tempmin = arrinp[0]; for(int i = 1; i < maxin; i++) { if(arrinp[i] < tempmin) tempmin = arrinp[i]; } return tempmin; }//DONE double findMax(Double[] arrinp) { double tempmax = arrinp[0]; for(int i = 1; i < maxin; i++) { if(arrinp[i] > tempmax) tempmax = arrinp[i]; } return tempmax; }//DONE Set findUniqvalues(Double[] arrinp) { Set<Double> uniqKeys = new TreeSet<Double>(); uniqKeys.addAll(Arrays.asList(arrinp)); //System.out.println("uniqKeys: " + uniqKeys); return uniqKeys; }//DONE void setUniqvalues() { Set<Double> uniqKeys = findUniqvalues(inputarray); Object[] unique = uniqKeys.toArray(); uniqarr = new Data[unique.length]; LinkedList index; for(int i = 0, count = 0; i < unique.length; i++) { index = new LinkedList(); count = 0; for(int j = 0; j < maxin; j++) { if(unique[i].equals(inputarray[j])) { count++; index.add(j+1); uniqarr[i] = new Data((Double)unique[i], count, index); } } } }//DONE void printUniqvalues() { System.out.println(""); for(int i = 0; i < uniqarr.length; i++) { System.out.println("Value: " + uniqarr[i].strvalue + " Count: " + uniqarr[i].freq + " Indeces: " + uniqarr[i].indeces); } }//DONE LinkedList findMode(Double[] arrinp) { LinkedList mod = new LinkedList(); setUniqvalues(); printUniqvalues(); int max = uniqarr[0].freq; int min = uniqarr[0].freq; for(int i = 1; i < uniqarr.length; i++) { if(uniqarr[i].freq > max) { max = uniqarr[i].freq; } if(uniqarr[i].freq < min) min = uniqarr[i].freq; } for(int i = 0; i < uniqarr.length; i++) { if(uniqarr[i].freq == max) mod.add(uniqarr[i].strvalue); } if(uniqarr.length != 1) if(max == min) return new LinkedList(); return mod; }//DONE void inputInterpretation() { Scanner sc = new Scanner(System.in); System.out.println("\n\nPlease input an Interpretation for the output."); System.out.print("Your interpretation: "); dataInterpretation = sc.nextLine(); }//DONE void showInterpretation() { System.out.println("\n\nInterpretation of output: " + dataInterpretation); }//DONE }
/* * Copyright (c) 2016-present, RxJava Contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See * the License for the specific language governing permissions and limitations under the License. */ package io.reactivex.rxjava3.internal.operators.flowable; import static org.junit.Assert.*; import static org.mockito.ArgumentMatchers.*; import static org.mockito.Mockito.*; import java.util.*; import org.junit.*; import org.mockito.InOrder; import org.reactivestreams.*; import io.reactivex.rxjava3.core.*; import io.reactivex.rxjava3.exceptions.TestException; import io.reactivex.rxjava3.functions.*; import io.reactivex.rxjava3.internal.functions.Functions; import io.reactivex.rxjava3.internal.subscriptions.BooleanSubscription; import io.reactivex.rxjava3.internal.util.ExceptionHelper; import io.reactivex.rxjava3.operators.QueueFuseable; import io.reactivex.rxjava3.operators.QueueSubscription; import io.reactivex.rxjava3.plugins.RxJavaPlugins; import io.reactivex.rxjava3.processors.UnicastProcessor; import io.reactivex.rxjava3.testsupport.*; public class FlowableDistinctTest extends RxJavaTest { Subscriber<String> w; // nulls lead to exceptions final Function<String, String> TO_UPPER_WITH_EXCEPTION = new Function<String, String>() { @Override public String apply(String s) { if (s.equals("x")) { return "XX"; } return s.toUpperCase(); } }; @Before public void before() { w = TestHelper.mockSubscriber(); } @Test public void distinctOfNone() { Flowable<String> src = Flowable.empty(); src.distinct().subscribe(w); verify(w, never()).onNext(anyString()); verify(w, never()).onError(any(Throwable.class)); verify(w, times(1)).onComplete(); } @Test public void distinctOfNoneWithKeySelector() { Flowable<String> src = Flowable.empty(); src.distinct(TO_UPPER_WITH_EXCEPTION).subscribe(w); verify(w, never()).onNext(anyString()); verify(w, never()).onError(any(Throwable.class)); verify(w, times(1)).onComplete(); } @Test public void distinctOfNormalSource() { Flowable<String> src = Flowable.just("a", "b", "c", "c", "c", "b", "b", "a", "e"); src.distinct().subscribe(w); InOrder inOrder = inOrder(w); inOrder.verify(w, times(1)).onNext("a"); inOrder.verify(w, times(1)).onNext("b"); inOrder.verify(w, times(1)).onNext("c"); inOrder.verify(w, times(1)).onNext("e"); inOrder.verify(w, times(1)).onComplete(); inOrder.verify(w, never()).onNext(anyString()); verify(w, never()).onError(any(Throwable.class)); } @Test public void distinctOfNormalSourceWithKeySelector() { Flowable<String> src = Flowable.just("a", "B", "c", "C", "c", "B", "b", "a", "E"); src.distinct(TO_UPPER_WITH_EXCEPTION).subscribe(w); InOrder inOrder = inOrder(w); inOrder.verify(w, times(1)).onNext("a"); inOrder.verify(w, times(1)).onNext("B"); inOrder.verify(w, times(1)).onNext("c"); inOrder.verify(w, times(1)).onNext("E"); inOrder.verify(w, times(1)).onComplete(); inOrder.verify(w, never()).onNext(anyString()); verify(w, never()).onError(any(Throwable.class)); } @Test public void error() { Flowable.error(new TestException()) .distinct() .test() .assertFailure(TestException.class); } @Test public void fusedSync() { TestSubscriberEx<Integer> ts = new TestSubscriberEx<Integer>().setInitialFusionMode(QueueFuseable.ANY); Flowable.just(1, 1, 2, 1, 3, 2, 4, 5, 4) .distinct() .subscribe(ts); ts.assertFusionMode(QueueFuseable.SYNC) .assertResult(1, 2, 3, 4, 5); } @Test public void fusedAsync() { TestSubscriberEx<Integer> ts = new TestSubscriberEx<Integer>().setInitialFusionMode(QueueFuseable.ANY); UnicastProcessor<Integer> up = UnicastProcessor.create(); up .distinct() .subscribe(ts); TestHelper.emit(up, 1, 1, 2, 1, 3, 2, 4, 5, 4); ts.assertFusionMode(QueueFuseable.ASYNC) .assertResult(1, 2, 3, 4, 5); } @Test public void fusedClear() { Flowable.just(1, 1, 2, 1, 3, 2, 4, 5, 4) .distinct() .subscribe(new FlowableSubscriber<Integer>() { @Override public void onSubscribe(Subscription s) { QueueSubscription<?> qs = (QueueSubscription<?>)s; assertFalse(qs.isEmpty()); qs.clear(); assertTrue(qs.isEmpty()); } @Override public void onNext(Integer value) { } @Override public void onError(Throwable e) { } @Override public void onComplete() { } }); } @Test public void collectionSupplierThrows() { Flowable.just(1) .distinct(Functions.identity(), new Supplier<Collection<Object>>() { @Override public Collection<Object> get() throws Exception { throw new TestException(); } }) .test() .assertFailure(TestException.class); } @Test public void collectionSupplierIsNull() { Flowable.just(1) .distinct(Functions.identity(), new Supplier<Collection<Object>>() { @Override public Collection<Object> get() throws Exception { return null; } }) .to(TestHelper.<Integer>testConsumer()) .assertFailure(NullPointerException.class) .assertErrorMessage(ExceptionHelper.nullWarning("The collectionSupplier returned a null Collection.")); } @Test public void badSource() { List<Throwable> errors = TestHelper.trackPluginErrors(); try { new Flowable<Integer>() { @Override protected void subscribeActual(Subscriber<? super Integer> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); subscriber.onNext(1); subscriber.onComplete(); subscriber.onNext(2); subscriber.onError(new TestException()); subscriber.onComplete(); } } .distinct() .test() .assertResult(1); TestHelper.assertUndeliverable(errors, 0, TestException.class); } finally { RxJavaPlugins.reset(); } } }
/* Copyright (c) 2011, Sony Ericsson Mobile Communications AB All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the Sony Ericsson Mobile Communications AB nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.sonyericsson.extras.liveware.extension.util.registration; import com.sonyericsson.extras.liveware.aef.registration.Registration.Device; import com.sonyericsson.extras.liveware.aef.registration.Registration.DeviceColumns; import com.sonyericsson.extras.liveware.extension.util.Dbg; import android.content.Context; import android.database.Cursor; import android.database.SQLException; import java.util.ArrayList; import java.util.List; /** * The host application class contains information about a host application. */ public class HostApplicationInfo { private final long mId; private final String mPackageName; private final int mWidgetApiVersion; private final int mControlApiVersion; private final int mSensorApiVersion; private final int mNotificationApiVersion; private final int mWidgetRefreshRate; private final Context mContext; private List<DeviceInfo> mDevices = null; /** * Create host application info. * * @param context The context. * @param packageName The package name. * @param id The host application id. * @param widgetApiVersion The widget API version. * @param controlApiVersion The control API version. * @param sensorApiVersion The sensor API version. * @param notificationApiVersion The notification API version. * @param widgetRefreshRate The widget refresh rate. */ public HostApplicationInfo(final Context context, final String packageName, final long id, final int widgetApiVersion, final int controlApiVersion, final int sensorApiVersion, final int notificationApiVersion, final int widgetRefreshRate) { mContext = context; mPackageName = packageName; mId = id; mWidgetApiVersion = widgetApiVersion; mControlApiVersion = controlApiVersion; mSensorApiVersion = sensorApiVersion; mNotificationApiVersion = notificationApiVersion; mWidgetRefreshRate = widgetRefreshRate; } /** * Get the devices for this host application. * * @return List of the devices. */ public List<DeviceInfo> getDevices() { if (mDevices != null) { // List of devices already available. Avoid re-reading from database. return mDevices; } mDevices = new ArrayList<DeviceInfo>(); Cursor cursor = null; try { cursor = mContext.getContentResolver().query(Device.URI, null, DeviceColumns.HOST_APPLICATION_ID + " = " + mId, null, null); while (cursor != null && cursor.moveToNext()) { long deviceId = cursor.getLong(cursor.getColumnIndexOrThrow(DeviceColumns._ID)); int widgetWidth = cursor.getInt(cursor .getColumnIndexOrThrow(DeviceColumns.WIDGET_IMAGE_WIDTH)); int widgetHeight = cursor.getInt(cursor .getColumnIndexOrThrow(DeviceColumns.WIDGET_IMAGE_HEIGHT)); boolean vibrator = (cursor.getInt(cursor .getColumnIndexOrThrow(DeviceColumns.VIBRATOR)) == 1); DeviceInfo device = new DeviceInfo(mContext, mPackageName, deviceId, widgetWidth, widgetHeight, vibrator); mDevices.add(device); } } catch (SQLException e) { if (Dbg.DEBUG) { Dbg.w("Failed to query device", e); } } catch (SecurityException e) { if (Dbg.DEBUG) { Dbg.w("Failed to query device", e); } } catch (IllegalArgumentException e) { if (Dbg.DEBUG) { Dbg.w("Failed to query device", e); } } finally { if (cursor != null) { cursor.close(); } } return mDevices; } /** * Get the id. * * @return The id. * @see Registration.HostAppColumns.#_ID */ public long getId() { return mId; } /** * Get the widget API version. * * @return The widget API version. * @see Registration.HostAppColumns.#WIDGET_API_VERSION */ public int getWidgetApiVersion() { return mWidgetApiVersion; } /** * Get the control API version. * * @return The control API version. * @see Registration.HostAppColumns.#CONTROL_API_VERSION */ public int getControlApiVersion() { return mControlApiVersion; } /** * Get the registration API version. Certain Registration values rely on SDK * version 2.0 although they depend on no specific API version. This method * returns the lowest safe API value. * * @see Registration.ExtensionColumns.#EXTENSION_48PX_ICON_URI */ public int getRegistrationApiVersion() { return mControlApiVersion; } /** * Get the sensor API version. * * @return The sensor API version. * @see Registration.HostAppColumns.#SENSOR_API_VERSION */ public int getSensorApiVersion() { return mSensorApiVersion; } /** * Get the notification API version. * * @return The notification API version. * @see Registration.HostAppColumns.#NOTIFICATION_API_VERSION */ public int getNotificationApiVersion() { return mNotificationApiVersion; } /** * Get the widget refresh rate. * * @return The widget refresh rate. * @see Registration.HostAppColumns.#WIDGET_REFRESH_RATE */ public int getWidgetRefreshRate() { return mWidgetRefreshRate; } }
/*=========================================================================== * Licensed Materials - Property of IBM * "Restricted Materials of IBM" * * IBM SDK, Java(tm) Technology Edition, v8 * (C) Copyright IBM Corp. 1996, 2014. All Rights Reserved * * US Government Users Restricted Rights - Use, duplication or disclosure * restricted by GSA ADP Schedule Contract with IBM Corp. *=========================================================================== */ /* * Copyright (c) 1996, 2014, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ /* * (C) Copyright Taligent, Inc. 1996 - All Rights Reserved * (C) Copyright IBM Corp. 1996 - All Rights Reserved * * The original version of this source code and documentation is copyrighted * and owned by Taligent, Inc., a wholly-owned subsidiary of IBM. These * materials are provided under terms of a License Agreement between Taligent * and Sun. This technology is protected by multiple US and International * patents. This notice and attribution to Taligent may not be removed. * Taligent is a registered trademark of Taligent, Inc. * */ package java.util; import java.io.Serializable; import java.security.AccessController; import java.security.PrivilegedAction; import java.time.ZoneId; import sun.security.action.GetPropertyAction; import sun.util.calendar.ZoneInfo; import sun.util.calendar.ZoneInfoFile; import sun.util.locale.provider.TimeZoneNameUtility; /** * <code>TimeZone</code> represents a time zone offset, and also figures out daylight * savings. * * <p> * Typically, you get a <code>TimeZone</code> using <code>getDefault</code> * which creates a <code>TimeZone</code> based on the time zone where the program * is running. For example, for a program running in Japan, <code>getDefault</code> * creates a <code>TimeZone</code> object based on Japanese Standard Time. * * <p> * You can also get a <code>TimeZone</code> using <code>getTimeZone</code> * along with a time zone ID. For instance, the time zone ID for the * U.S. Pacific Time zone is "America/Los_Angeles". So, you can get a * U.S. Pacific Time <code>TimeZone</code> object with: * <blockquote><pre> * TimeZone tz = TimeZone.getTimeZone("America/Los_Angeles"); * </pre></blockquote> * You can use the <code>getAvailableIDs</code> method to iterate through * all the supported time zone IDs. You can then choose a * supported ID to get a <code>TimeZone</code>. * If the time zone you want is not represented by one of the * supported IDs, then a custom time zone ID can be specified to * produce a TimeZone. The syntax of a custom time zone ID is: * * <blockquote><pre> * <a name="CustomID"><i>CustomID:</i></a> * <code>GMT</code> <i>Sign</i> <i>Hours</i> <code>:</code> <i>Minutes</i> * <code>GMT</code> <i>Sign</i> <i>Hours</i> <i>Minutes</i> * <code>GMT</code> <i>Sign</i> <i>Hours</i> * <i>Sign:</i> one of * <code>+ -</code> * <i>Hours:</i> * <i>Digit</i> * <i>Digit</i> <i>Digit</i> * <i>Minutes:</i> * <i>Digit</i> <i>Digit</i> * <i>Digit:</i> one of * <code>0 1 2 3 4 5 6 7 8 9</code> * </pre></blockquote> * * <i>Hours</i> must be between 0 to 23 and <i>Minutes</i> must be * between 00 to 59. For example, "GMT+10" and "GMT+0010" mean ten * hours and ten minutes ahead of GMT, respectively. * <p> * The format is locale independent and digits must be taken from the * Basic Latin block of the Unicode standard. No daylight saving time * transition schedule can be specified with a custom time zone ID. If * the specified string doesn't match the syntax, <code>"GMT"</code> * is used. * <p> * When creating a <code>TimeZone</code>, the specified custom time * zone ID is normalized in the following syntax: * <blockquote><pre> * <a name="NormalizedCustomID"><i>NormalizedCustomID:</i></a> * <code>GMT</code> <i>Sign</i> <i>TwoDigitHours</i> <code>:</code> <i>Minutes</i> * <i>Sign:</i> one of * <code>+ -</code> * <i>TwoDigitHours:</i> * <i>Digit</i> <i>Digit</i> * <i>Minutes:</i> * <i>Digit</i> <i>Digit</i> * <i>Digit:</i> one of * <code>0 1 2 3 4 5 6 7 8 9</code> * </pre></blockquote> * For example, TimeZone.getTimeZone("GMT-8").getID() returns "GMT-08:00". * * <h3>Three-letter time zone IDs</h3> * * For compatibility with JDK 1.1.x, some other three-letter time zone IDs * (such as "PST", "CTT", "AST") are also supported. However, <strong>their * use is deprecated</strong> because the same abbreviation is often used * for multiple time zones (for example, "CST" could be U.S. "Central Standard * Time" and "China Standard Time"), and the Java platform can then only * recognize one of them. * * * @see Calendar * @see GregorianCalendar * @see SimpleTimeZone * @author Mark Davis, David Goldsmith, Chen-Lieh Huang, Alan Liu * @since JDK1.1 */ abstract public class TimeZone implements Serializable, Cloneable { /** * Sole constructor. (For invocation by subclass constructors, typically * implicit.) */ public TimeZone() { } /** * A style specifier for <code>getDisplayName()</code> indicating * a short name, such as "PST." * @see #LONG * @since 1.2 */ public static final int SHORT = 0; /** * A style specifier for <code>getDisplayName()</code> indicating * a long name, such as "Pacific Standard Time." * @see #SHORT * @since 1.2 */ public static final int LONG = 1; // Constants used internally; unit is milliseconds private static final int ONE_MINUTE = 60*1000; private static final int ONE_HOUR = 60*ONE_MINUTE; private static final int ONE_DAY = 24*ONE_HOUR; // Proclaim serialization compatibility with JDK 1.1 static final long serialVersionUID = 3581463369166924961L; /** * Gets the time zone offset, for current date, modified in case of * daylight savings. This is the offset to add to UTC to get local time. * <p> * This method returns a historically correct offset if an * underlying <code>TimeZone</code> implementation subclass * supports historical Daylight Saving Time schedule and GMT * offset changes. * * @param era the era of the given date. * @param year the year in the given date. * @param month the month in the given date. * Month is 0-based. e.g., 0 for January. * @param day the day-in-month of the given date. * @param dayOfWeek the day-of-week of the given date. * @param milliseconds the milliseconds in day in <em>standard</em> * local time. * * @return the offset in milliseconds to add to GMT to get local time. * * @see Calendar#ZONE_OFFSET * @see Calendar#DST_OFFSET */ public abstract int getOffset(int era, int year, int month, int day, int dayOfWeek, int milliseconds); /** * Returns the offset of this time zone from UTC at the specified * date. If Daylight Saving Time is in effect at the specified * date, the offset value is adjusted with the amount of daylight * saving. * <p> * This method returns a historically correct offset value if an * underlying TimeZone implementation subclass supports historical * Daylight Saving Time schedule and GMT offset changes. * * @param date the date represented in milliseconds since January 1, 1970 00:00:00 GMT * @return the amount of time in milliseconds to add to UTC to get local time. * * @see Calendar#ZONE_OFFSET * @see Calendar#DST_OFFSET * @since 1.4 */ public int getOffset(long date) { if (inDaylightTime(new Date(date))) { return getRawOffset() + getDSTSavings(); } return getRawOffset(); } /** * Gets the raw GMT offset and the amount of daylight saving of this * time zone at the given time. * @param date the milliseconds (since January 1, 1970, * 00:00:00.000 GMT) at which the time zone offset and daylight * saving amount are found * @param offsets an array of int where the raw GMT offset * (offset[0]) and daylight saving amount (offset[1]) are stored, * or null if those values are not needed. The method assumes that * the length of the given array is two or larger. * @return the total amount of the raw GMT offset and daylight * saving at the specified date. * * @see Calendar#ZONE_OFFSET * @see Calendar#DST_OFFSET */ int getOffsets(long date, int[] offsets) { int rawoffset = getRawOffset(); int dstoffset = 0; if (inDaylightTime(new Date(date))) { dstoffset = getDSTSavings(); } if (offsets != null) { offsets[0] = rawoffset; offsets[1] = dstoffset; } return rawoffset + dstoffset; } /** * Sets the base time zone offset to GMT. * This is the offset to add to UTC to get local time. * <p> * If an underlying <code>TimeZone</code> implementation subclass * supports historical GMT offset changes, the specified GMT * offset is set as the latest GMT offset and the difference from * the known latest GMT offset value is used to adjust all * historical GMT offset values. * * @param offsetMillis the given base time zone offset to GMT. */ abstract public void setRawOffset(int offsetMillis); /** * Returns the amount of time in milliseconds to add to UTC to get * standard time in this time zone. Because this value is not * affected by daylight saving time, it is called <I>raw * offset</I>. * <p> * If an underlying <code>TimeZone</code> implementation subclass * supports historical GMT offset changes, the method returns the * raw offset value of the current date. In Honolulu, for example, * its raw offset changed from GMT-10:30 to GMT-10:00 in 1947, and * this method always returns -36000000 milliseconds (i.e., -10 * hours). * * @return the amount of raw offset time in milliseconds to add to UTC. * @see Calendar#ZONE_OFFSET */ public abstract int getRawOffset(); /** * Gets the ID of this time zone. * @return the ID of this time zone. */ public String getID() { return ID; } /** * Sets the time zone ID. This does not change any other data in * the time zone object. * @param ID the new time zone ID. */ public void setID(String ID) { if (ID == null) { throw new NullPointerException(); } this.ID = ID; } /** * Returns a long standard time name of this {@code TimeZone} suitable for * presentation to the user in the default locale. * * <p>This method is equivalent to: * <blockquote><pre> * getDisplayName(false, {@link #LONG}, * Locale.getDefault({@link Locale.Category#DISPLAY})) * </pre></blockquote> * * @return the human-readable name of this time zone in the default locale. * @since 1.2 * @see #getDisplayName(boolean, int, Locale) * @see Locale#getDefault(Locale.Category) * @see Locale.Category */ public final String getDisplayName() { return getDisplayName(false, LONG, Locale.getDefault(Locale.Category.DISPLAY)); } /** * Returns a long standard time name of this {@code TimeZone} suitable for * presentation to the user in the specified {@code locale}. * * <p>This method is equivalent to: * <blockquote><pre> * getDisplayName(false, {@link #LONG}, locale) * </pre></blockquote> * * @param locale the locale in which to supply the display name. * @return the human-readable name of this time zone in the given locale. * @exception NullPointerException if {@code locale} is {@code null}. * @since 1.2 * @see #getDisplayName(boolean, int, Locale) */ public final String getDisplayName(Locale locale) { return getDisplayName(false, LONG, locale); } /** * Returns a name in the specified {@code style} of this {@code TimeZone} * suitable for presentation to the user in the default locale. If the * specified {@code daylight} is {@code true}, a Daylight Saving Time name * is returned (even if this {@code TimeZone} doesn't observe Daylight Saving * Time). Otherwise, a Standard Time name is returned. * * <p>This method is equivalent to: * <blockquote><pre> * getDisplayName(daylight, style, * Locale.getDefault({@link Locale.Category#DISPLAY})) * </pre></blockquote> * * @param daylight {@code true} specifying a Daylight Saving Time name, or * {@code false} specifying a Standard Time name * @param style either {@link #LONG} or {@link #SHORT} * @return the human-readable name of this time zone in the default locale. * @exception IllegalArgumentException if {@code style} is invalid. * @since 1.2 * @see #getDisplayName(boolean, int, Locale) * @see Locale#getDefault(Locale.Category) * @see Locale.Category * @see java.text.DateFormatSymbols#getZoneStrings() */ public final String getDisplayName(boolean daylight, int style) { return getDisplayName(daylight, style, Locale.getDefault(Locale.Category.DISPLAY)); } /** * Returns a name in the specified {@code style} of this {@code TimeZone} * suitable for presentation to the user in the specified {@code * locale}. If the specified {@code daylight} is {@code true}, a Daylight * Saving Time name is returned (even if this {@code TimeZone} doesn't * observe Daylight Saving Time). Otherwise, a Standard Time name is * returned. * * <p>When looking up a time zone name, the {@linkplain * ResourceBundle.Control#getCandidateLocales(String,Locale) default * <code>Locale</code> search path of <code>ResourceBundle</code>} derived * from the specified {@code locale} is used. (No {@linkplain * ResourceBundle.Control#getFallbackLocale(String,Locale) fallback * <code>Locale</code>} search is performed.) If a time zone name in any * {@code Locale} of the search path, including {@link Locale#ROOT}, is * found, the name is returned. Otherwise, a string in the * <a href="#NormalizedCustomID">normalized custom ID format</a> is returned. * * @param daylight {@code true} specifying a Daylight Saving Time name, or * {@code false} specifying a Standard Time name * @param style either {@link #LONG} or {@link #SHORT} * @param locale the locale in which to supply the display name. * @return the human-readable name of this time zone in the given locale. * @exception IllegalArgumentException if {@code style} is invalid. * @exception NullPointerException if {@code locale} is {@code null}. * @since 1.2 * @see java.text.DateFormatSymbols#getZoneStrings() */ public String getDisplayName(boolean daylight, int style, Locale locale) { if (style != SHORT && style != LONG) { throw new IllegalArgumentException("Illegal style: " + style); } String id = getID(); String[] names = TimeZoneNameUtility.retrieveDisplayNames(id, locale); int index = daylight ? 3 : 1; if (style == SHORT) { index++; } if (names == null || names[index] == null) { if (id.startsWith("GMT") && id.length() > 3) { char sign = id.charAt(3); if (sign == '+' || sign == '-') { return id; } } int offset = getRawOffset(); if (daylight) { offset += getDSTSavings(); } return ZoneInfoFile.toCustomID(offset); } return names[index]; } private static String[] getDisplayNames(String id, Locale locale) { return TimeZoneNameUtility.retrieveDisplayNames(id, locale); } /** * Returns the amount of time to be added to local standard time * to get local wall clock time. * * <p>The default implementation returns 3600000 milliseconds * (i.e., one hour) if a call to {@link #useDaylightTime()} * returns {@code true}. Otherwise, 0 (zero) is returned. * * <p>If an underlying {@code TimeZone} implementation subclass * supports historical and future Daylight Saving Time schedule * changes, this method returns the amount of saving time of the * last known Daylight Saving Time rule that can be a future * prediction. * * <p>If the amount of saving time at any given time stamp is * required, construct a {@link Calendar} with this {@code * TimeZone} and the time stamp, and call {@link Calendar#get(int) * Calendar.get}{@code (}{@link Calendar#DST_OFFSET}{@code )}. * * @return the amount of saving time in milliseconds * @since 1.4 * @see #inDaylightTime(Date) * @see #getOffset(long) * @see #getOffset(int,int,int,int,int,int) * @see Calendar#ZONE_OFFSET */ public int getDSTSavings() { if (useDaylightTime()) { return 3600000; } return 0; } /** * Queries if this {@code TimeZone} uses Daylight Saving Time. * * <p>If an underlying {@code TimeZone} implementation subclass * supports historical and future Daylight Saving Time schedule * changes, this method refers to the last known Daylight Saving Time * rule that can be a future prediction and may not be the same as * the current rule. Consider calling {@link #observesDaylightTime()} * if the current rule should also be taken into account. * * @return {@code true} if this {@code TimeZone} uses Daylight Saving Time, * {@code false}, otherwise. * @see #inDaylightTime(Date) * @see Calendar#DST_OFFSET */ public abstract boolean useDaylightTime(); /** * Returns {@code true} if this {@code TimeZone} is currently in * Daylight Saving Time, or if a transition from Standard Time to * Daylight Saving Time occurs at any future time. * * <p>The default implementation returns {@code true} if * {@code useDaylightTime()} or {@code inDaylightTime(new Date())} * returns {@code true}. * * @return {@code true} if this {@code TimeZone} is currently in * Daylight Saving Time, or if a transition from Standard Time to * Daylight Saving Time occurs at any future time; {@code false} * otherwise. * @since 1.7 * @see #useDaylightTime() * @see #inDaylightTime(Date) * @see Calendar#DST_OFFSET */ public boolean observesDaylightTime() { return useDaylightTime() || inDaylightTime(new Date()); } /** * Queries if the given {@code date} is in Daylight Saving Time in * this time zone. * * @param date the given Date. * @return {@code true} if the given date is in Daylight Saving Time, * {@code false}, otherwise. */ abstract public boolean inDaylightTime(Date date); /** * Gets the <code>TimeZone</code> for the given ID. * * @param ID the ID for a <code>TimeZone</code>, either an abbreviation * such as "PST", a full name such as "America/Los_Angeles", or a custom * ID such as "GMT-8:00". Note that the support of abbreviations is * for JDK 1.1.x compatibility only and full names should be used. * * @return the specified <code>TimeZone</code>, or the GMT zone if the given ID * cannot be understood. */ public static synchronized TimeZone getTimeZone(String ID) { return getTimeZone(ID, true); } /** * Gets the {@code TimeZone} for the given {@code zoneId}. * * @param zoneId a {@link ZoneId} from which the time zone ID is obtained * @return the specified {@code TimeZone}, or the GMT zone if the given ID * cannot be understood. * @throws NullPointerException if {@code zoneId} is {@code null} * @since 1.8 */ public static TimeZone getTimeZone(ZoneId zoneId) { String tzid = zoneId.getId(); // throws an NPE if null char c = tzid.charAt(0); if (c == '+' || c == '-') { tzid = "GMT" + tzid; } else if (c == 'Z' && tzid.length() == 1) { tzid = "UTC"; } return getTimeZone(tzid, true); } /** * Converts this {@code TimeZone} object to a {@code ZoneId}. * * @return a {@code ZoneId} representing the same time zone as this * {@code TimeZone} * @since 1.8 */ public ZoneId toZoneId() { String id = getID(); if (ZoneInfoFile.useOldMapping() && id.length() == 3) { if ("EST".equals(id)) return ZoneId.of("America/New_York"); if ("MST".equals(id)) return ZoneId.of("America/Denver"); if ("HST".equals(id)) return ZoneId.of("America/Honolulu"); } return ZoneId.of(id, ZoneId.SHORT_IDS); } private static TimeZone getTimeZone(String ID, boolean fallback) { TimeZone tz = ZoneInfo.getTimeZone(ID); if (tz == null) { tz = parseCustomTimeZone(ID); if (tz == null && fallback) { tz = new ZoneInfo(GMT_ID, 0); } } return tz; } /** * Gets the available IDs according to the given time zone offset in milliseconds. * * @param rawOffset the given time zone GMT offset in milliseconds. * @return an array of IDs, where the time zone for that ID has * the specified GMT offset. For example, "America/Phoenix" and "America/Denver" * both have GMT-07:00, but differ in daylight saving behavior. * @see #getRawOffset() */ public static synchronized String[] getAvailableIDs(int rawOffset) { return ZoneInfo.getAvailableIDs(rawOffset); } /** * Gets all the available IDs supported. * @return an array of IDs. */ public static synchronized String[] getAvailableIDs() { return ZoneInfo.getAvailableIDs(); } /** * Gets the platform defined TimeZone ID. **/ private static native String getSystemTimeZoneID(String javaHome); /** * Gets the custom time zone ID based on the GMT offset of the * platform. (e.g., "GMT+08:00") */ private static native String getSystemGMTOffsetID(); /** * Gets the default {@code TimeZone} of the Java virtual machine. If the * cached default {@code TimeZone} is available, its clone is returned. * Otherwise, the method takes the following steps to determine the default * time zone. * * <ul> * <li>Use the {@code user.timezone} property value as the default * time zone ID if it's available.</li> * <li>Detect the platform time zone ID. The source of the * platform time zone and ID mapping may vary with implementation.</li> * <li>Use {@code GMT} as the last resort if the given or detected * time zone ID is unknown.</li> * </ul> * * <p>The default {@code TimeZone} created from the ID is cached, * and its clone is returned. The {@code user.timezone} property * value is set to the ID upon return. * * @return the default {@code TimeZone} * @see #setDefault(TimeZone) */ public static TimeZone getDefault() { return (TimeZone) getDefaultRef().clone(); } /** * Returns the reference to the default TimeZone object. This * method doesn't create a clone. */ static TimeZone getDefaultRef() { TimeZone defaultZone = defaultTimeZone; if (defaultZone == null) { // Need to initialize the default time zone. defaultZone = setDefaultZone(); assert defaultZone != null; } // Don't clone here. return defaultZone; } private static synchronized TimeZone setDefaultZone() { TimeZone tz; // get the time zone ID from the system properties String zoneID = AccessController.doPrivileged( new GetPropertyAction("user.timezone")); // if the time zone ID is not set (yet), perform the // platform to Java time zone ID mapping. if (zoneID == null || zoneID.isEmpty()) { String javaHome = AccessController.doPrivileged( new GetPropertyAction("java.home")); try { zoneID = getSystemTimeZoneID(javaHome); if (zoneID == null) { zoneID = GMT_ID; } } catch (NullPointerException e) { zoneID = GMT_ID; } } // Get the time zone for zoneID. But not fall back to // "GMT" here. tz = getTimeZone(zoneID, false); if (tz == null) { // If the given zone ID is unknown in Java, try to // get the GMT-offset-based time zone ID, // a.k.a. custom time zone ID (e.g., "GMT-08:00"). String gmtOffsetID = getSystemGMTOffsetID(); if (gmtOffsetID != null) { zoneID = gmtOffsetID; } tz = getTimeZone(zoneID, true); } assert tz != null; final String id = zoneID; AccessController.doPrivileged(new PrivilegedAction<Void>() { @Override public Void run() { System.setProperty("user.timezone", id); return null; } }); defaultTimeZone = tz; return tz; } /** * Sets the {@code TimeZone} that is returned by the {@code getDefault} * method. {@code zone} is cached. If {@code zone} is null, the cached * default {@code TimeZone} is cleared. This method doesn't change the value * of the {@code user.timezone} property. * * @param zone the new default {@code TimeZone}, or null * @throws SecurityException if the security manager's {@code checkPermission} * denies {@code PropertyPermission("user.timezone", * "write")} * @see #getDefault * @see PropertyPermission */ public static void setDefault(TimeZone zone) { SecurityManager sm = System.getSecurityManager(); if (sm != null) { sm.checkPermission(new PropertyPermission ("user.timezone", "write")); } defaultTimeZone = zone; } /** * Returns true if this zone has the same rule and offset as another zone. * That is, if this zone differs only in ID, if at all. Returns false * if the other zone is null. * @param other the <code>TimeZone</code> object to be compared with * @return true if the other zone is not null and is the same as this one, * with the possible exception of the ID * @since 1.2 */ public boolean hasSameRules(TimeZone other) { return other != null && getRawOffset() == other.getRawOffset() && useDaylightTime() == other.useDaylightTime(); } /** * Creates a copy of this <code>TimeZone</code>. * * @return a clone of this <code>TimeZone</code> */ public Object clone() { try { TimeZone other = (TimeZone) super.clone(); if (other instanceof SimpleTimeZone) { ((SimpleTimeZone)other).invalidateCache(); } other.ID = ID; return other; } catch (CloneNotSupportedException e) { throw new InternalError(e); } } /** * The null constant as a TimeZone. */ static final TimeZone NO_TIMEZONE = null; // =======================privates=============================== /** * The string identifier of this <code>TimeZone</code>. This is a * programmatic identifier used internally to look up <code>TimeZone</code> * objects from the system table and also to map them to their localized * display names. <code>ID</code> values are unique in the system * table but may not be for dynamically created zones. * @serial */ private String ID; private static volatile TimeZone defaultTimeZone; static final String GMT_ID = "GMT"; private static final int GMT_ID_LENGTH = 3; // a static TimeZone we can reference if no AppContext is in place private static volatile TimeZone mainAppContextDefault; /** * Parses a custom time zone identifier and returns a corresponding zone. * This method doesn't support the RFC 822 time zone format. (e.g., +hhmm) * * @param id a string of the <a href="#CustomID">custom ID form</a>. * @return a newly created TimeZone with the given offset and * no daylight saving time, or null if the id cannot be parsed. */ private static final TimeZone parseCustomTimeZone(String id) { int length; // Error if the length of id isn't long enough or id doesn't // start with "GMT". if ((length = id.length()) < (GMT_ID_LENGTH + 2) || id.indexOf(GMT_ID) != 0) { return null; } ZoneInfo zi; // First, we try to find it in the cache with the given // id. Even the id is not normalized, the returned ZoneInfo // should have its normalized id. zi = ZoneInfoFile.getZoneInfo(id); if (zi != null) { return zi; } int index = GMT_ID_LENGTH; boolean negative = false; char c = id.charAt(index++); if (c == '-') { negative = true; } else if (c != '+') { return null; } int hours = 0; int num = 0; int countDelim = 0; int len = 0; while (index < length) { c = id.charAt(index++); if (c == ':') { if (countDelim > 0) { return null; } if (len > 2) { return null; } hours = num; countDelim++; num = 0; len = 0; continue; } if (c < '0' || c > '9') { return null; } num = num * 10 + (c - '0'); len++; } if (index != length) { return null; } if (countDelim == 0) { if (len <= 2) { hours = num; num = 0; } else { hours = num / 100; num %= 100; } } else { if (len != 2) { return null; } } if (hours > 23 || num > 59) { return null; } int gmtOffset = (hours * 60 + num) * 60 * 1000; if (gmtOffset == 0) { zi = ZoneInfoFile.getZoneInfo(GMT_ID); if (negative) { zi.setID("GMT-00:00"); } else { zi.setID("GMT+00:00"); } } else { zi = ZoneInfoFile.getCustomTimeZone(id, negative ? -gmtOffset : gmtOffset); } return zi; } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package de.elt.dispotool.service; import de.elt.dispotool.dao.BewegungDao; import de.elt.dispotool.dao.BwaDao; import de.elt.dispotool.dao.BestandDao; import de.elt.dispotool.entities.Bewegung; import de.elt.dispotool.entities.Bwa; import de.elt.dispotool.util.BewegungUtils; import de.elt.dispotool.util.ChartUtils; import de.elt.dispotool.util.Constants; import de.elt.dispotool.view.BewegungView; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; import java.util.logging.Level; import java.util.logging.Logger; import javax.ejb.Stateful; import javax.faces.context.FacesContext; import javax.inject.Inject; import lombok.Getter; import lombok.Setter; import lombok.extern.java.Log; import org.apache.commons.lang3.time.DateUtils; import org.primefaces.model.chart.BarChartModel; /** * * @author Jonas */ @Stateful @Getter @Setter @Log public class BewegungService { @Inject BewegungDao bewegungDao; @Inject BwaDao bwaDao; @Inject BestandDao bestandDao; List<Bewegung> bewegungen; List<Bewegung> zugaenge; List<Bewegung> abgaenge; int abgangsInterval = 30; Date first, last; List<Bwa> bwas; Map<String, Integer> vorzeichen; SortedMap<String, SortedMap<String, Integer>> bewegungsMap; SortedMap<String, SortedMap<String, Integer>> abgangsMap; SortedMap<String, Integer> simZugangsMap; SortedMap<String, Integer> bestandsMap; SortedMap<String, Integer> simBestandsMap; SortedMap<String, SortedMap<String, Integer>> chartMap; SortedMap<String, SortedMap<String, Integer>> simChartMap; Map<String, Integer> bwaMengen; Integer bestellmenge; Integer minBestand, maxBestand, minSimBestand, maxSimBestand; String materialNummer; BarChartModel barChartModel; List<String> types = Constants.TYPES; public void initBewegungsMap() { SimpleDateFormat format = new SimpleDateFormat("yyyy,MM,dd"); bewegungsMap = BewegungUtils.makeEmptyMap(first, last); // for (Bwa bwa : bwas) { // String bwaString = bwa.getBwa(); // int vorz = getVorzeichen().get(bwaString); // //log.fine("bwaString: "+bwaString+", vorz: "+vorz); // if (getVorzeichen().get(bwaString) == 0) { // continue; // } // Map<String, Integer> series = getBewegungenAsMap(bwaString); // BewegungUtils.addSeries(bewegungsMap, series, bwaString); //// log.fine("bwaString: "+bwaString+", vorz: "+vorz); // } log.info("Bewegungen.size: " + bewegungen.size()); for(Bewegung b : bewegungen) { String bwa = b.getBewegungsart(); String date = format.format(b.getBuchungsdatum()); SortedMap<String, Integer> subMap = bewegungsMap.get(date); if (subMap == null) { subMap = new TreeMap(); bewegungsMap.put(date, subMap); } Integer menge = subMap.get(bwa); if (menge == null){ menge = 0; } subMap.put(bwa, menge + getValue(b)); } } public void initAbgangsMap() { abgangsMap = BewegungUtils.makeEmptyMap(first, last); for (Bwa bwa : bwas) { String bwaString = bwa.getBwa(); if (getVorzeichen().get(bwaString) != -1) { continue; } Map<String, Integer> series = getBewegungenAsMap(bwaString); BewegungUtils.addSeries(abgangsMap, series, bwaString); } } public void initChartMap() { chartMap = new TreeMap(); for (Map.Entry<String, SortedMap<String, Integer>> entry : bewegungsMap.entrySet()) { chartMap.put(entry.getKey(), new TreeMap(entry.getValue())); } BewegungUtils.addSeries(chartMap, new TreeMap(getBestandsMap()), " Bestand"); // log.log(Level.INFO, "ChartMap: {0}", chartMap.toString()); } public SortedMap<String, Integer> getBestandsMap() { if (bestandsMap == null) { initBestandsMap(); } return bestandsMap; } public SortedMap<String, Integer> getSimBestandsMap() { if (simBestandsMap == null) { initSimBestandsMap(); } return simBestandsMap; } // private Integer getAnfangsbestand2() { // String matNum = getMaterialNummer(); // Integer bestand = materialDao.getBestand2015(matNum); // if (bestand == null) { // bestand = 0; // } // return bestand; // } public Integer getAnfangsbestand() { Integer bestand = bestandDao.getBestand2016(materialNummer); if (bestand == null) { bestand = 0; } int count = 0, count2 = 0, zuSum = 0, abSum = 0; for (Bewegung zu : zugaenge) { bestand -= zu.getMenge(); zuSum += zu.getMenge(); count++; } for (Bewegung ab : abgaenge) { bestand += ab.getMenge(); abSum += ab.getMenge(); count2++; } log.log(Level.INFO, "zugaenge: {0}, count: {1}, ab: {2}, count: {3}, diff: {4}, bes2016: {5}, bes2015(calc): {6}", new Object[]{zuSum, count, abSum, count2, zuSum - abSum, bestandDao.getBestand2016(materialNummer), bestand}); return bestand; } private void initZuAbgaenge() { zugaenge = new ArrayList<>(); abgaenge = new ArrayList<>(); for (Bewegung b : bewegungen) { int vz = getVorzeichen().get(b.getBewegungsart()); switch (vz) { case 0: // log.fine("case"+vz+": " +b.getBewegungsart()); break; case 1: zugaenge.add(b); // log.fine("case"+vz+": " +b.getBewegungsart()); break; case -1: abgaenge.add(b); // log.fine("case"+vz+": " +b.getBewegungsart()); break; default: log.warning("Default Case in Vorzeichen switch! Value was: " + vz); } } } public void initBestandsMap() { if (bewegungsMap == null) { initBewegungsMap(); } bestandsMap = new TreeMap<>(); // String matnr = getMaterialNummer(); // bewegungen = bewegungDao.getByMaterialnummer(matnr); Date lastPlusOne = DateUtils.addDays(last, 1); Integer currentBestand = getAnfangsbestand(); int zuSum = 0, abSum = 0, count1 = 0, count2 = 0, count3=0, count4=0; SimpleDateFormat format = new SimpleDateFormat("yyyy,MM,dd"); for (Date day = first; day.before(lastPlusOne); day = DateUtils.addDays(day, 1)) { String dateString = format.format(day); SortedMap<String, Integer> dayBewegungen = bewegungsMap.get(dateString); count3++; if (dayBewegungen != null) { for (Map.Entry<String, Integer> entry : dayBewegungen.entrySet()) { count4++; Integer menge = entry.getValue(); if (menge > 0) { zuSum += menge; count1++; } else { abSum += menge; count2++; } currentBestand += menge; //log.log(Level.FINEST, "CurrentBestand changed. Date: {0}, Bestand: {1}", new Object[]{dateString, currentBestand}); } } bestandsMap.put(dateString, currentBestand); adjustMinMax(currentBestand); // log.log(Level.FINER, "Date and Bestand: {0}: {1}", new Object[]{dateString, currentBestand}); } log.log(Level.INFO, "zugaenge: {0}, count: {1}, ab: {2}, count: {3}, diff: {4}, bes2016: {5}, bes2015(calc): {6}, bewegungsMap.size: {7}, count3: {8}, count4: {9}", new Object[]{zuSum, count1, abSum, count2, zuSum + abSum, bestandDao.getBestand2016(materialNummer), currentBestand, bewegungsMap.size(),count3, count4}); } private void adjustMinMax(Integer currentBestand) { if (minBestand == null || minBestand > currentBestand) { minBestand = currentBestand; } if (maxBestand == null || maxBestand < currentBestand) { maxBestand = currentBestand; } } private void adjustSimMinMax(Integer currentBestand) { if (minSimBestand == null || minSimBestand > currentBestand) { minSimBestand = currentBestand; } if (maxSimBestand == null || maxSimBestand < currentBestand) { maxSimBestand = currentBestand; } } private void resetMinMax() { minSimBestand = null; maxSimBestand = null; maxBestand = null; minBestand = null; } public void initSimBestandsMap() { Integer ordered = 0; if (abgangsMap == null) { initBewegungsMap(); } // simZugangsMap = new TreeMap<>(); simZugangsMap = BewegungUtils.makeEmpty1Map(first, last); simBestandsMap = new TreeMap<>(); Integer abgangsInterval = getAbgangsInterval(); Integer maxAbgangsmenge = getMaxAbgangsmenge(abgangsInterval); Date lastPlusOne = DateUtils.addDays(last, 1); Integer currentBestand = getAnfangsbestand(); SimpleDateFormat format = new SimpleDateFormat("yyyy,MM,dd"); for (Date day = first; day.before(lastPlusOne); day = DateUtils.addDays(day, 1)) { String dateString = format.format(day); SortedMap<String, Integer> dayBewegungen = abgangsMap.get(dateString); if (dayBewegungen != null) { for (Map.Entry<String, Integer> entry : dayBewegungen.entrySet()) { Integer menge = entry.getValue(); currentBestand += menge; } Integer zugang = simZugangsMap.get(dateString); if (zugang != null) { currentBestand += zugang; ordered -= zugang; } } simBestandsMap.put(dateString, currentBestand); adjustSimMinMax(currentBestand); if (currentBestand + ordered <= maxAbgangsmenge) { Date nowPlusInterval = DateUtils.addDays(day, abgangsInterval); String arrivalDate = format.format(nowPlusInterval); simZugangsMap.put(arrivalDate, bestellmenge); ordered += bestellmenge; } // log.log(Level.FINER, "Date and Bestand: {0}: {1}", new Object[]{dateString, currentBestand}); } } public String getBewegungsArray() { return ChartUtils.makeChartData(bewegungsMap); } public String getChartArray() { return ChartUtils.makeChartData(chartMap); } public String getBestandsArray() { SortedMap<String, SortedMap<String, Integer>> tempMap = BewegungUtils.makeEmptyMap(first, last); BewegungUtils.addSeries(tempMap, bestandsMap, "Bestand"); //Map testSeries = getBewegungenAsMap(bwas.get(0).getBwa()); // BewegungUtils.addSeries(tempMap, testSeries, "BestandTest"); return ChartUtils.makeChartData(tempMap); } public String getSimBestandsArray() { SortedMap<String, SortedMap<String, Integer>> tempMap = BewegungUtils.makeEmptyMap(first, last); BewegungUtils.addSeries(tempMap, simBestandsMap, "2_Bestand(Simuliert)"); Map minSim = BewegungUtils.makeEmpty1Map(first, last, minSimBestand); BewegungUtils.addSeries(tempMap, minSim, "2_Min(Simuliert)"); Map maxSim = BewegungUtils.makeEmpty1Map(first, last, maxSimBestand); BewegungUtils.addSeries(tempMap, maxSim, "2_Max(Simuliert)"); BewegungUtils.addSeries(tempMap, bestandsMap, "1_Bestand(Real)"); Map min = BewegungUtils.makeEmpty1Map(first, last, minBestand); BewegungUtils.addSeries(tempMap, min, "1_Min(Real)"); Map max = BewegungUtils.makeEmpty1Map(first, last, maxBestand); BewegungUtils.addSeries(tempMap, max, "1_Max(Real)"); BewegungUtils.addSeries(tempMap, simZugangsMap, "Reorders(Simuliert)"); return ChartUtils.makeChartData(tempMap); } private SortedMap<String, Integer> getBewegungenAsMap(String bwa) { List<Bewegung> bewegungenOfBwa = bewegungDao.getBewegungen(materialNummer, bwa); SortedMap<String, Integer> map = new TreeMap(); for (Bewegung b : bewegungenOfBwa) { map.put(b.getDateString(), getValue(b)); } return map; } public List<Bwa> getBwasOfMaterial() { return getBwasOfMaterial(materialNummer); } public List<Bwa> getBwasOfMaterial(String materialNummer) { return bwaDao.getBewegungsartenOfMaterial(materialNummer); } public void setMaterialNummer(String matnr) { if (matnr == null) { log.warning("materialnummer was null. setting default value."); matnr = "64365703"; } bewegungen = bewegungDao.getByMaterialnummer(matnr); if (bewegungen == null || bewegungen.isEmpty()) { try { FacesContext.getCurrentInstance().getExternalContext().redirect("materialnummern.xhtml"); } catch (IOException ex) { Logger.getLogger(BewegungView.class.getName()).log(Level.SEVERE, null, ex); } } materialNummer = matnr; first = BewegungUtils.getFirstDate(bewegungen); last = BewegungUtils.getLastDate(bewegungen); bwas = getBwasOfMaterial(); bwaMengen = bewegungDao.getBwaMengen(materialNummer); resetMinMax(); initVorzeichen(); initZuAbgaenge(); initBewegungsMap(); initAbgangsMap(); initBestandsMap(); bestellmenge = getMaxAbgangsmenge(); initSimBestandsMap(); initChartMap(); } void initVorzeichen() { vorzeichen = new HashMap(); for (Bwa bwa : bwas) { vorzeichen.put(bwa.getBwa(), BewegungUtils.getVorzeichenOfType(bwa.getTyp())); } } int getVorzeichenOfBwa(String bwa) { return vorzeichen.get(bwa); } int getValue(Bewegung bewegung) { String bwa = bewegung.getBewegungsart(); return getVorzeichenOfBwa(bwa) * bewegung.getMenge(); } public int getMaxAbgangsmenge() { return getMaxAbgangsmenge(abgangsInterval); } private int getMaxAbgangsmenge(int intervalInDays) { int maxAbgang = 0; Date from = getFirst(); Date to = getLast(); // log.info("from: " + from + ", to: " + to + " toMinusInterval: " + toMinusInterval); Date intervalEnd = DateUtils.addDays(from, intervalInDays); // log.log(Level.INFO, "from: {0}, intervalEnd: {1}", new Object[]{from, intervalEnd}); int currentAbgang = 0; for (Bewegung ab : abgaenge) { Date abDate = ab.getBuchungsdatum(); if (abDate.after(from) && abDate.before(intervalEnd)) { // log.log(Level.INFO, "abDate: {0}", abDate); currentAbgang += ab.getMenge(); } } maxAbgang = currentAbgang; // log.log(Level.INFO, "maxAbgang: {0}", maxAbgang); Date toMinusInterval = DateUtils.addDays(to, -intervalInDays + 1); for (Date intervalStart = from; intervalStart.before(toMinusInterval); intervalStart = DateUtils.addDays(intervalStart, 1)) { intervalEnd = DateUtils.addDays(intervalStart, intervalInDays); for (Bewegung ab : abgaenge) { Date abDate = ab.getBuchungsdatum(); if (abDate.equals(intervalStart)) { currentAbgang -= ab.getMenge(); } else if (abDate.equals(intervalEnd)) { currentAbgang += ab.getMenge(); } } if (currentAbgang > maxAbgang) { maxAbgang = currentAbgang; // log.log(Level.INFO, "maxAbgang: {0}", maxAbgang); } } return maxAbgang; } public Integer getMengeOfBwa(String bwa) { return getBwaMengen().get(bwa); } public void setAbgangsInterval(int ab) { abgangsInterval = ab; initSimBestandsMap(); } public Integer getBestellmenge() { return bestellmenge; } public void setBestellmenge(Integer x) { bestellmenge = x; log.finest("setBestellmenge " + x); } }
/* * Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package jdk.nashorn.api.scripting; import java.util.Objects; import javax.script.Invocable; import javax.script.ScriptContext; import javax.script.ScriptEngine; import javax.script.ScriptEngineManager; import javax.script.ScriptException; import javax.script.SimpleScriptContext; import org.testng.Assert; import static org.testng.Assert.assertEquals; import static org.testng.Assert.fail; import org.testng.annotations.Test; /** * Tests for javax.script.Invocable implementation of nashorn. */ public class InvocableTest { private void log(String msg) { org.testng.Reporter.log(msg, true); } @Test public void invokeMethodTest() { final ScriptEngineManager m = new ScriptEngineManager(); final ScriptEngine e = m.getEngineByName("nashorn"); try { e.eval("var Example = function() { this.hello = function() { return 'Hello World!'; };}; myExample = new Example();"); final Object obj = e.get("myExample"); final Object res = ((Invocable) e).invokeMethod(obj, "hello"); assertEquals(res, "Hello World!"); } catch (final Exception exp) { exp.printStackTrace(); fail(exp.getMessage()); } } @Test /** * Check that we can call invokeMethod on an object that we got by * evaluating script with different Context set. */ public void invokeMethodDifferentContextTest() { ScriptEngineManager m = new ScriptEngineManager(); ScriptEngine e = m.getEngineByName("nashorn"); try { // define an object with method on it Object obj = e.eval("({ hello: function() { return 'Hello World!'; } })"); final ScriptContext ctxt = new SimpleScriptContext(); ctxt.setBindings(e.createBindings(), ScriptContext.ENGINE_SCOPE); e.setContext(ctxt); // invoke 'func' on obj - but with current script context changed final Object res = ((Invocable) e).invokeMethod(obj, "hello"); assertEquals(res, "Hello World!"); } catch (final Exception exp) { exp.printStackTrace(); fail(exp.getMessage()); } } @Test /** * Check that invokeMethod throws NPE on null method name. */ public void invokeMethodNullNameTest() { final ScriptEngineManager m = new ScriptEngineManager(); final ScriptEngine e = m.getEngineByName("nashorn"); try { final Object obj = e.eval("({})"); final Object res = ((Invocable) e).invokeMethod(obj, null); fail("should have thrown NPE"); } catch (final Exception exp) { if (!(exp instanceof NullPointerException)) { exp.printStackTrace(); fail(exp.getMessage()); } } } @Test /** * Check that invokeMethod throws NoSuchMethodException on missing method. */ public void invokeMethodMissingTest() { final ScriptEngineManager m = new ScriptEngineManager(); final ScriptEngine e = m.getEngineByName("nashorn"); try { final Object obj = e.eval("({})"); final Object res = ((Invocable) e).invokeMethod(obj, "nonExistentMethod"); fail("should have thrown NoSuchMethodException"); } catch (final Exception exp) { if (!(exp instanceof NoSuchMethodException)) { exp.printStackTrace(); fail(exp.getMessage()); } } } @Test /** * Check that calling method on non-script object 'thiz' results in * IllegalArgumentException. */ public void invokeMethodNonScriptObjectThizTest() { final ScriptEngineManager m = new ScriptEngineManager(); final ScriptEngine e = m.getEngineByName("nashorn"); try { ((Invocable) e).invokeMethod(new Object(), "toString"); fail("should have thrown IllegalArgumentException"); } catch (final Exception exp) { if (!(exp instanceof IllegalArgumentException)) { exp.printStackTrace(); fail(exp.getMessage()); } } } @Test /** * Check that calling method on null 'thiz' results in * IllegalArgumentException. */ public void invokeMethodNullThizTest() { final ScriptEngineManager m = new ScriptEngineManager(); final ScriptEngine e = m.getEngineByName("nashorn"); try { ((Invocable) e).invokeMethod(null, "toString"); fail("should have thrown IllegalArgumentException"); } catch (final Exception exp) { if (!(exp instanceof IllegalArgumentException)) { exp.printStackTrace(); fail(exp.getMessage()); } } } @Test /** * Check that calling method on mirror created by another engine results in * IllegalArgumentException. */ public void invokeMethodMixEnginesTest() { final ScriptEngineManager m = new ScriptEngineManager(); final ScriptEngine engine1 = m.getEngineByName("nashorn"); final ScriptEngine engine2 = m.getEngineByName("nashorn"); try { Object obj = engine1.eval("({ run: function() {} })"); // pass object from engine1 to engine2 as 'thiz' for invokeMethod ((Invocable) engine2).invokeMethod(obj, "run"); fail("should have thrown IllegalArgumentException"); } catch (final Exception exp) { if (!(exp instanceof IllegalArgumentException)) { exp.printStackTrace(); fail(exp.getMessage()); } } } @Test public void getInterfaceTest() { final ScriptEngineManager m = new ScriptEngineManager(); final ScriptEngine e = m.getEngineByName("nashorn"); final Invocable inv = (Invocable) e; // try to get interface from global functions try { e.eval("function run() { print('run'); };"); final Runnable runnable = inv.getInterface(Runnable.class); runnable.run(); } catch (final Exception exp) { exp.printStackTrace(); fail(exp.getMessage()); } // try interface on specific script object try { e.eval("var obj = { run: function() { print('run from obj'); } };"); Object obj = e.get("obj"); final Runnable runnable = inv.getInterface(obj, Runnable.class); runnable.run(); } catch (final Exception exp) { exp.printStackTrace(); fail(exp.getMessage()); } } public interface Foo { public void bar(); } public interface Foo2 extends Foo { public void bar2(); } @Test public void getInterfaceMissingTest() { final ScriptEngineManager manager = new ScriptEngineManager(); final ScriptEngine engine = manager.getEngineByName("nashorn"); // don't define any function. try { engine.eval(""); } catch (final Exception exp) { exp.printStackTrace(); fail(exp.getMessage()); } Runnable runnable = ((Invocable) engine).getInterface(Runnable.class); if (runnable != null) { fail("runnable is not null!"); } // now define "run" try { engine.eval("function run() { print('this is run function'); }"); } catch (final Exception exp) { exp.printStackTrace(); fail(exp.getMessage()); } runnable = ((Invocable) engine).getInterface(Runnable.class); // should not return null now! runnable.run(); // define only one method of "Foo2" try { engine.eval("function bar() { print('bar function'); }"); } catch (final Exception exp) { exp.printStackTrace(); fail(exp.getMessage()); } Foo2 foo2 = ((Invocable) engine).getInterface(Foo2.class); if (foo2 != null) { throw new RuntimeException("foo2 is not null!"); } // now define other method of "Foo2" try { engine.eval("function bar2() { print('bar2 function'); }"); } catch (final Exception exp) { exp.printStackTrace(); fail(exp.getMessage()); } foo2 = ((Invocable) engine).getInterface(Foo2.class); foo2.bar(); foo2.bar2(); } @Test /** * Try passing non-interface Class object for interface implementation. */ public void getNonInterfaceGetInterfaceTest() { final ScriptEngineManager manager = new ScriptEngineManager(); final ScriptEngine engine = manager.getEngineByName("nashorn"); try { log(Objects.toString(((Invocable) engine).getInterface(Object.class))); fail("Should have thrown IllegalArgumentException"); } catch (final Exception exp) { if (!(exp instanceof IllegalArgumentException)) { fail("IllegalArgumentException expected, got " + exp); } } } @Test /** * Check that we can get interface out of a script object even after * switching to use different ScriptContext. */ public void getInterfaceDifferentContext() { ScriptEngineManager m = new ScriptEngineManager(); ScriptEngine e = m.getEngineByName("nashorn"); try { Object obj = e.eval("({ run: function() { } })"); // change script context ScriptContext ctxt = new SimpleScriptContext(); ctxt.setBindings(e.createBindings(), ScriptContext.ENGINE_SCOPE); e.setContext(ctxt); Runnable r = ((Invocable) e).getInterface(obj, Runnable.class); r.run(); } catch (final Exception exp) { exp.printStackTrace(); fail(exp.getMessage()); } } @Test /** * Check that getInterface on non-script object 'thiz' results in * IllegalArgumentException. */ public void getInterfaceNonScriptObjectThizTest() { final ScriptEngineManager m = new ScriptEngineManager(); final ScriptEngine e = m.getEngineByName("nashorn"); try { ((Invocable) e).getInterface(new Object(), Runnable.class); fail("should have thrown IllegalArgumentException"); } catch (final Exception exp) { if (!(exp instanceof IllegalArgumentException)) { exp.printStackTrace(); fail(exp.getMessage()); } } } @Test /** * Check that getInterface on null 'thiz' results in * IllegalArgumentException. */ public void getInterfaceNullThizTest() { final ScriptEngineManager m = new ScriptEngineManager(); final ScriptEngine e = m.getEngineByName("nashorn"); try { ((Invocable) e).getInterface(null, Runnable.class); fail("should have thrown IllegalArgumentException"); } catch (final Exception exp) { if (!(exp instanceof IllegalArgumentException)) { exp.printStackTrace(); fail(exp.getMessage()); } } } @Test /** * Check that calling getInterface on mirror created by another engine * results in IllegalArgumentException. */ public void getInterfaceMixEnginesTest() { final ScriptEngineManager m = new ScriptEngineManager(); final ScriptEngine engine1 = m.getEngineByName("nashorn"); final ScriptEngine engine2 = m.getEngineByName("nashorn"); try { Object obj = engine1.eval("({ run: function() {} })"); // pass object from engine1 to engine2 as 'thiz' for getInterface ((Invocable) engine2).getInterface(obj, Runnable.class); fail("should have thrown IllegalArgumentException"); } catch (final Exception exp) { if (!(exp instanceof IllegalArgumentException)) { exp.printStackTrace(); fail(exp.getMessage()); } } } @Test /** * check that null function name results in NPE. */ public void invokeFunctionNullNameTest() { final ScriptEngineManager m = new ScriptEngineManager(); final ScriptEngine e = m.getEngineByName("nashorn"); try { final Object res = ((Invocable) e).invokeFunction(null); fail("should have thrown NPE"); } catch (final Exception exp) { if (!(exp instanceof NullPointerException)) { exp.printStackTrace(); fail(exp.getMessage()); } } } @Test /** * Check that attempt to call missing function results in * NoSuchMethodException. */ public void invokeFunctionMissingTest() { final ScriptEngineManager m = new ScriptEngineManager(); final ScriptEngine e = m.getEngineByName("nashorn"); try { final Object res = ((Invocable) e).invokeFunction("NonExistentFunc"); fail("should have thrown NoSuchMethodException"); } catch (final Exception exp) { if (!(exp instanceof NoSuchMethodException)) { exp.printStackTrace(); fail(exp.getMessage()); } } } @Test /** * Check that invokeFunction calls functions only from current context's * Bindings. */ public void invokeFunctionDifferentContextTest() { ScriptEngineManager m = new ScriptEngineManager(); ScriptEngine e = m.getEngineByName("nashorn"); try { // define an object with method on it Object obj = e.eval("function hello() { return 'Hello World!'; }"); final ScriptContext ctxt = new SimpleScriptContext(); ctxt.setBindings(e.createBindings(), ScriptContext.ENGINE_SCOPE); // change engine's current context e.setContext(ctxt); ((Invocable) e).invokeFunction("hello"); // no 'hello' in new context! fail("should have thrown NoSuchMethodException"); } catch (final Exception exp) { if (!(exp instanceof NoSuchMethodException)) { exp.printStackTrace(); fail(exp.getMessage()); } } } @Test public void invokeFunctionExceptionTest() { final ScriptEngineManager m = new ScriptEngineManager(); final ScriptEngine e = m.getEngineByName("nashorn"); try { e.eval("function func() { throw new TypeError(); }"); } catch (final Throwable t) { t.printStackTrace(); fail(t.getMessage()); } try { ((Invocable) e).invokeFunction("func"); fail("should have thrown exception"); } catch (final ScriptException se) { // ECMA TypeError property wrapped as a ScriptException log("got " + se + " as expected"); } catch (final Throwable t) { t.printStackTrace(); fail(t.getMessage()); } } @Test public void invokeMethodExceptionTest() { final ScriptEngineManager m = new ScriptEngineManager(); final ScriptEngine e = m.getEngineByName("nashorn"); try { e.eval("var sobj = {}; sobj.foo = function func() { throw new TypeError(); }"); } catch (final Throwable t) { t.printStackTrace(); fail(t.getMessage()); } try { final Object sobj = e.get("sobj"); ((Invocable) e).invokeMethod(sobj, "foo"); fail("should have thrown exception"); } catch (final ScriptException se) { // ECMA TypeError property wrapped as a ScriptException log("got " + se + " as expected"); } catch (final Throwable t) { t.printStackTrace(); fail(t.getMessage()); } } @Test /** * Tests whether invocation of a JavaScript method through a variable arity * Java method will pass the vararg array. Both non-vararg and vararg * JavaScript methods are tested. * * @throws ScriptException */ public void variableArityInterfaceTest() throws ScriptException { final ScriptEngineManager m = new ScriptEngineManager(); final ScriptEngine e = m.getEngineByName("nashorn"); e.eval( "function test1(i, strings) {" + " return 'i == ' + i + ', strings instanceof java.lang.String[] == ' + (strings instanceof Java.type('java.lang.String[]')) + ', strings == ' + java.util.Arrays.toString(strings)" + "}" + "function test2() {" + " return 'arguments[0] == ' + arguments[0] + ', arguments[1] instanceof java.lang.String[] == ' + (arguments[1] instanceof Java.type('java.lang.String[]')) + ', arguments[1] == ' + java.util.Arrays.toString(arguments[1])" + "}"); final VariableArityTestInterface itf = ((Invocable) e).getInterface(VariableArityTestInterface.class); Assert.assertEquals(itf.test1(42, "a", "b"), "i == 42, strings instanceof java.lang.String[] == true, strings == [a, b]"); Assert.assertEquals(itf.test2(44, "c", "d", "e"), "arguments[0] == 44, arguments[1] instanceof java.lang.String[] == true, arguments[1] == [c, d, e]"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.search; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; import org.apache.lucene.document.DoublePoint; import org.apache.lucene.document.FloatPoint; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.LongPoint; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.PointValues; import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.PrefixCodedTerms; import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermState; import org.apache.lucene.index.TermStates; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.ConstantScoreScorer; import org.apache.lucene.search.ConstantScoreWeight; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.BytesRefIterator; import org.apache.lucene.util.DocIdSetBuilder; import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.RamUsageEstimator; import org.apache.solr.common.params.SolrParams; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.schema.FieldType; import org.apache.solr.schema.NumberType; import org.apache.solr.schema.SchemaField; /** * The GraphTermsQuery builds a disjunction query from a list of terms. The terms are first filtered * by the maxDocFreq parameter. This allows graph traversals to skip traversing high frequency nodes * which is often desirable from a performance standpoint. * * <p>Syntax: {!graphTerms f=field maxDocFreq=10000}term1,term2,term3 */ public class GraphTermsQParserPlugin extends QParserPlugin { public static final String NAME = "graphTerms"; @Override public QParser createParser( String qstr, SolrParams localParams, SolrParams params, SolrQueryRequest req) { return new QParser(qstr, localParams, params, req) { @Override public Query parse() throws SyntaxError { String fname = localParams.get(QueryParsing.F); FieldType ft = req.getSchema().getFieldTypeNoEx(fname); int maxDocFreq = localParams.getInt("maxDocFreq", Integer.MAX_VALUE); String qstr = localParams.get(QueryParsing.V); // never null if (qstr.length() == 0) { return new MatchNoDocsQuery(); } final String[] splitVals = qstr.split(","); SchemaField sf = req.getSchema().getField(fname); // if we don't limit by maxDocFreq, then simply use a normal set query if (maxDocFreq == Integer.MAX_VALUE) { return sf.getType().getSetQuery(this, sf, Arrays.asList(splitVals)); } if (sf.getType().isPointField()) { PointSetQuery setQ = null; if (sf.getType().getNumberType() == NumberType.INTEGER) { int[] vals = new int[splitVals.length]; for (int i = 0; i < vals.length; i++) { vals[i] = Integer.parseInt(splitVals[i]); } Arrays.sort(vals); setQ = PointSetQuery.newSetQuery(sf.getName(), vals); } else if (sf.getType().getNumberType() == NumberType.LONG || sf.getType().getNumberType() == NumberType.DATE) { long[] vals = new long[splitVals.length]; for (int i = 0; i < vals.length; i++) { vals[i] = Long.parseLong(splitVals[i]); } Arrays.sort(vals); setQ = PointSetQuery.newSetQuery(sf.getName(), vals); } else if (sf.getType().getNumberType() == NumberType.FLOAT) { float[] vals = new float[splitVals.length]; for (int i = 0; i < vals.length; i++) { vals[i] = Float.parseFloat(splitVals[i]); } Arrays.sort(vals); setQ = PointSetQuery.newSetQuery(sf.getName(), vals); } else if (sf.getType().getNumberType() == NumberType.DOUBLE) { double[] vals = new double[splitVals.length]; for (int i = 0; i < vals.length; i++) { vals[i] = Double.parseDouble(splitVals[i]); } Arrays.sort(vals); setQ = PointSetQuery.newSetQuery(sf.getName(), vals); } setQ.setMaxDocFreq(maxDocFreq); return setQ; } Term[] terms = new Term[splitVals.length]; BytesRefBuilder term = new BytesRefBuilder(); for (int i = 0; i < splitVals.length; i++) { String stringVal = splitVals[i].trim(); if (ft != null) { ft.readableToIndexed(stringVal, term); } else { term.copyChars(stringVal); } BytesRef ref = term.toBytesRef(); terms[i] = new Term(fname, ref); } ArrayUtil.timSort(terms); return new ConstantScoreQuery(new GraphTermsQuery(fname, terms, maxDocFreq)); } }; } /** Similar to {@code TermsQuery} but adds a {@code maxDocFreq}. */ private class GraphTermsQuery extends Query implements ExtendedQuery { // Not a post filter. This will typically be used as the main query. private Term[] queryTerms; private String field; private int maxDocFreq; private Object id; public GraphTermsQuery(String field, Term[] terms, int maxDocFreq) { this.maxDocFreq = maxDocFreq; this.field = field; this.queryTerms = terms; this.id = new Object(); } // Just for cloning private GraphTermsQuery(String field, Term[] terms, int maxDocFreq, Object id) { this.field = field; this.queryTerms = terms; this.maxDocFreq = maxDocFreq; this.id = id; } public boolean getCache() { return false; } public void setCache(boolean cache) { // TODO support user choice } public int getCost() { // 0 is the default and keeping it avoids a needless wrapper for TwoPhaseIterator matchCost. return 0; } public void setCost(int cost) {} @Override public Query rewrite(IndexReader reader) throws IOException { return this; } public int hashCode() { return 31 * classHash() + id.hashCode(); } public boolean equals(Object other) { return sameClassAs(other) && id == ((GraphTermsQuery) other).id; } public GraphTermsQuery clone() { GraphTermsQuery clone = new GraphTermsQuery(this.field, this.queryTerms, this.maxDocFreq, this.id); return clone; } @Override public String toString(String defaultField) { return Arrays.stream(this.queryTerms).map(Term::toString).collect(Collectors.joining(",")); } @Override public void visit(QueryVisitor visitor) { visitor.visitLeaf(this); } @Override public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { List<TermStates> finalContexts = new ArrayList<>(); List<Term> finalTerms = new ArrayList<>(); { List<LeafReaderContext> contexts = searcher.getTopReaderContext().leaves(); TermStates[] termStates = new TermStates[this.queryTerms.length]; collectTermStates(searcher.getIndexReader(), contexts, termStates, this.queryTerms); for (int i = 0; i < termStates.length; i++) { TermStates ts = termStates[i]; if (ts != null && ts.docFreq() <= this.maxDocFreq) { finalContexts.add(ts); finalTerms.add(queryTerms[i]); } } } return new ConstantScoreWeight(this, boost) { @Override public Scorer scorer(LeafReaderContext context) throws IOException { final LeafReader reader = context.reader(); Terms terms = reader.terms(field); if (terms == null) { return null; } TermsEnum termsEnum = terms.iterator(); PostingsEnum docs = null; DocIdSetBuilder builder = new DocIdSetBuilder(reader.maxDoc(), terms); for (int i = 0; i < finalContexts.size(); i++) { TermStates ts = finalContexts.get(i); TermState termState = ts.get(context); if (termState != null) { Term term = finalTerms.get(i); termsEnum.seekExact(term.bytes(), ts.get(context)); docs = termsEnum.postings(docs, PostingsEnum.NONE); builder.add(docs); } } DocIdSet docIdSet = builder.build(); DocIdSetIterator disi = docIdSet.iterator(); return disi == null ? null : new ConstantScoreScorer(this, score(), scoreMode, disi); } @Override public boolean isCacheable(LeafReaderContext ctx) { return true; } }; } private void collectTermStates( IndexReader reader, List<LeafReaderContext> leaves, TermStates[] contextArray, Term[] queryTerms) throws IOException { TermsEnum termsEnum = null; for (LeafReaderContext context : leaves) { Terms terms = context.reader().terms(this.field); if (terms == null) { // field does not exist continue; } termsEnum = terms.iterator(); if (termsEnum == TermsEnum.EMPTY) continue; for (int i = 0; i < queryTerms.length; i++) { Term term = queryTerms[i]; TermStates termStates = contextArray[i]; if (termsEnum.seekExact(term.bytes())) { if (termStates == null) { contextArray[i] = new TermStates( reader.getContext(), termsEnum.termState(), context.ord, termsEnum.docFreq(), termsEnum.totalTermFreq()); } else { termStates.register( termsEnum.termState(), context.ord, termsEnum.docFreq(), termsEnum.totalTermFreq()); } } } } } } } /** Modified version of {@code PointInSetQuery} to support {@code maxDocFreq}. */ abstract class PointSetQuery extends Query implements DocSetProducer, Accountable { protected static final long BASE_RAM_BYTES = RamUsageEstimator.shallowSizeOfInstance(PointSetQuery.class); // A little bit overkill for us, since all of our "terms" are always in the same field: final PrefixCodedTerms sortedPackedPoints; final int sortedPackedPointsHashCode; final String field; final int bytesPerDim; final int numDims; int maxDocFreq = Integer.MAX_VALUE; final long ramBytesUsed; // cache /** Iterator of encoded point values. */ // TODO: if we want to stream, maybe we should use jdk stream class? public abstract static class Stream implements BytesRefIterator { @Override public abstract BytesRef next(); } public void setMaxDocFreq(int maxDocFreq) { this.maxDocFreq = maxDocFreq; } public static PointSetQuery newSetQuery(String field, float... sortedValues) { final BytesRef encoded = new BytesRef(new byte[Float.BYTES]); return new PointSetQuery( field, 1, Float.BYTES, new PointSetQuery.Stream() { int upto; @Override public BytesRef next() { if (upto == sortedValues.length) { return null; } else { FloatPoint.encodeDimension(sortedValues[upto], encoded.bytes, 0); upto++; return encoded; } } }) { @Override protected String toString(byte[] value) { assert value.length == Float.BYTES; return Float.toString(FloatPoint.decodeDimension(value, 0)); } }; } public static PointSetQuery newSetQuery(String field, long... sortedValues) { final BytesRef encoded = new BytesRef(new byte[Long.BYTES]); return new PointSetQuery( field, 1, Long.BYTES, new PointSetQuery.Stream() { int upto; @Override public BytesRef next() { if (upto == sortedValues.length) { return null; } else { LongPoint.encodeDimension(sortedValues[upto], encoded.bytes, 0); upto++; return encoded; } } }) { @Override protected String toString(byte[] value) { assert value.length == Long.BYTES; return Long.toString(LongPoint.decodeDimension(value, 0)); } }; } public static PointSetQuery newSetQuery(String field, int... sortedValues) { final BytesRef encoded = new BytesRef(new byte[Integer.BYTES]); return new PointSetQuery( field, 1, Integer.BYTES, new PointSetQuery.Stream() { int upto; @Override public BytesRef next() { if (upto == sortedValues.length) { return null; } else { IntPoint.encodeDimension(sortedValues[upto], encoded.bytes, 0); upto++; return encoded; } } }) { @Override protected String toString(byte[] value) { assert value.length == Integer.BYTES; return Integer.toString(IntPoint.decodeDimension(value, 0)); } }; } public static PointSetQuery newSetQuery(String field, double... values) { // Don't unexpectedly change the user's incoming values array: double[] sortedValues = values.clone(); Arrays.sort(sortedValues); final BytesRef encoded = new BytesRef(new byte[Double.BYTES]); return new PointSetQuery( field, 1, Double.BYTES, new PointSetQuery.Stream() { int upto; @Override public BytesRef next() { if (upto == sortedValues.length) { return null; } else { DoublePoint.encodeDimension(sortedValues[upto], encoded.bytes, 0); upto++; return encoded; } } }) { @Override protected String toString(byte[] value) { assert value.length == Double.BYTES; return Double.toString(DoublePoint.decodeDimension(value, 0)); } }; } public PointSetQuery(String field, int numDims, int bytesPerDim, Stream packedPoints) { this.field = field; this.bytesPerDim = bytesPerDim; this.numDims = numDims; // In the 1D case this works well (the more points, the more common prefixes they share, // typically), but in the > 1 D case, where we are only looking at the first dimension's prefix // bytes, it can at worst not hurt: PrefixCodedTerms.Builder builder = new PrefixCodedTerms.Builder(); BytesRefBuilder previous = null; BytesRef current; while ((current = packedPoints.next()) != null) { if (current.length != numDims * bytesPerDim) { throw new IllegalArgumentException( "packed point length should be " + (numDims * bytesPerDim) + " but got " + current.length + "; field=\"" + field + "\" numDims=" + numDims + " bytesPerDim=" + bytesPerDim); } if (previous == null) { previous = new BytesRefBuilder(); } else { int cmp = previous.get().compareTo(current); if (cmp == 0) { continue; // deduplicate } else if (cmp > 0) { throw new IllegalArgumentException( "values are out of order: saw " + previous + " before " + current); } } builder.add(field, current); previous.copyBytes(current); } sortedPackedPoints = builder.finish(); sortedPackedPointsHashCode = sortedPackedPoints.hashCode(); ramBytesUsed = BASE_RAM_BYTES + RamUsageEstimator.sizeOfObject(sortedPackedPoints); } private FixedBitSet getLiveDocs(IndexSearcher searcher) throws IOException { if (!searcher.getIndexReader().hasDeletions()) { return null; } if (searcher instanceof SolrIndexSearcher) { return ((SolrIndexSearcher) searcher).getLiveDocSet().getBits(); } else { // could happen in Delete-by-query situation // smallSetSize==0 thus will always produce a BitDocSet (FixedBitSet) DocSetCollector docSetCollector = new DocSetCollector(0, searcher.getIndexReader().maxDoc()); searcher.search(new MatchAllDocsQuery(), docSetCollector); return ((BitDocSet) docSetCollector.getDocSet()).getBits(); } } @Override public DocSet createDocSet(SolrIndexSearcher searcher) throws IOException { return getDocSet(searcher); } @Override public long ramBytesUsed() { return ramBytesUsed; } public DocSet getDocSet(IndexSearcher searcher) throws IOException { IndexReaderContext top = ReaderUtil.getTopLevelContext(searcher.getTopReaderContext()); List<LeafReaderContext> segs = top.leaves(); DocSetBuilder builder = new DocSetBuilder(top.reader().maxDoc(), Math.min(64, (top.reader().maxDoc() >>> 10) + 4)); PointValues[] segPoints = new PointValues[segs.size()]; for (int i = 0; i < segPoints.length; i++) { segPoints[i] = segs.get(i).reader().getPointValues(field); } int maxCollect = Math.min(maxDocFreq, top.reader().maxDoc()); PointSetQuery.CutoffPointVisitor visitor = new PointSetQuery.CutoffPointVisitor(maxCollect); PrefixCodedTerms.TermIterator iterator = sortedPackedPoints.iterator(); outer: for (BytesRef point = iterator.next(); point != null; point = iterator.next()) { visitor.setPoint(point); for (int i = 0; i < segs.size(); i++) { if (segPoints[i] == null) continue; visitor.setBase(segs.get(i).docBase); segPoints[i].intersect(visitor); if (visitor.getCount() > maxDocFreq) { continue outer; } } int collected = visitor.getCount(); int[] ids = visitor.getGlobalIds(); for (int i = 0; i < collected; i++) { builder.add(ids[i]); } } FixedBitSet liveDocs = getLiveDocs(searcher); DocSet set = builder.build(liveDocs); return set; } @Override public final Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, boost) { DocSet docs; @Override public Scorer scorer(LeafReaderContext context) throws IOException { if (docs == null) { docs = getDocSet(searcher); } // Although this set only includes live docs, other filters can be pushed down to queries. DocIdSetIterator readerSetIterator = docs.iterator(context); if (readerSetIterator == null) { return null; } return new ConstantScoreScorer(this, score(), scoreMode, readerSetIterator); } @Override public boolean isCacheable(LeafReaderContext ctx) { return true; } }; } /** Cutoff point visitor that collects a maximum number of points before stopping. */ private class CutoffPointVisitor implements PointValues.IntersectVisitor { int[] ids; int base; int count; private final byte[] pointBytes; public CutoffPointVisitor(int sz) { this.pointBytes = new byte[bytesPerDim * numDims]; ids = new int[sz]; } private void add(int id) { if (count < ids.length) { ids[count] = id + base; } count++; } public int getCount() { return count; } public int[] getGlobalIds() { return ids; } public void setPoint(BytesRef point) { // we verified this up front in query's ctor: assert point.length == pointBytes.length; System.arraycopy(point.bytes, point.offset, pointBytes, 0, pointBytes.length); count = 0; } public void setBase(int base) { this.base = base; } @Override public void grow(int count) {} @Override public void visit(int docID) { add(docID); } @Override public void visit(int docID, byte[] packedValue) { if (Arrays.equals(packedValue, pointBytes)) { add(docID); } } @Override public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { boolean crosses = false; for (int dim = 0; dim < numDims; dim++) { int offset = dim * bytesPerDim; int cmpMin = Arrays.compareUnsigned( minPackedValue, offset, offset + bytesPerDim, pointBytes, offset, offset + bytesPerDim); if (cmpMin > 0) { return PointValues.Relation.CELL_OUTSIDE_QUERY; } int cmpMax = Arrays.compareUnsigned( maxPackedValue, offset, offset + bytesPerDim, pointBytes, offset, offset + bytesPerDim); if (cmpMax < 0) { return PointValues.Relation.CELL_OUTSIDE_QUERY; } if (cmpMin != 0 || cmpMax != 0) { crosses = true; } } if (crosses) { return PointValues.Relation.CELL_CROSSES_QUERY; } else { // NOTE: we only hit this if we are on a cell whose min and max values are exactly equal to // our point, which can easily happen if many docs share this one value return PointValues.Relation.CELL_INSIDE_QUERY; } } } public String getField() { return field; } public int getNumDims() { return numDims; } public int getBytesPerDim() { return bytesPerDim; } @Override public final int hashCode() { int hash = classHash(); hash = 31 * hash + field.hashCode(); hash = 31 * hash + sortedPackedPointsHashCode; hash = 31 * hash + numDims; hash = 31 * hash + bytesPerDim; hash = 31 * hash + maxDocFreq; return hash; } @Override public final boolean equals(Object other) { return sameClassAs(other) && equalsTo(getClass().cast(other)); } private boolean equalsTo(PointSetQuery other) { return other.field.equals(field) && other.numDims == numDims && other.bytesPerDim == bytesPerDim && other.sortedPackedPointsHashCode == sortedPackedPointsHashCode && other.sortedPackedPoints.equals(sortedPackedPoints) && other.maxDocFreq == maxDocFreq; } @Override public final String toString(String field) { final StringBuilder sb = new StringBuilder(); if (this.field.equals(field) == false) { sb.append(this.field); sb.append(':'); } sb.append("{"); PrefixCodedTerms.TermIterator iterator = sortedPackedPoints.iterator(); byte[] pointBytes = new byte[numDims * bytesPerDim]; boolean first = true; for (BytesRef point = iterator.next(); point != null; point = iterator.next()) { if (first == false) { sb.append(" "); } first = false; System.arraycopy(point.bytes, point.offset, pointBytes, 0, pointBytes.length); sb.append(toString(pointBytes)); } sb.append("}"); return sb.toString(); } @Override public void visit(QueryVisitor visitor) { visitor.visitLeaf(this); } protected abstract String toString(byte[] value); }
/* * SPDX-License-Identifier: MIT * * The MIT License (MIT) * * Copyright (c) 2015-2021 <Your name and contributors> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.github.games647.fastlogin.core.shared; import com.github.games647.craftapi.resolver.MojangResolver; import com.github.games647.craftapi.resolver.http.RotatingProxySelector; import com.github.games647.fastlogin.core.CommonUtil; import com.github.games647.fastlogin.core.RateLimiter; import com.github.games647.fastlogin.core.hooks.AuthPlugin; import com.github.games647.fastlogin.core.hooks.DefaultPasswordGenerator; import com.github.games647.fastlogin.core.hooks.PasswordGenerator; import com.github.games647.fastlogin.core.storage.MySQLStorage; import com.github.games647.fastlogin.core.storage.SQLStorage; import com.github.games647.fastlogin.core.storage.SQLiteStorage; import com.google.common.net.HostAndPort; import com.zaxxer.hikari.HikariConfig; import java.io.IOException; import java.io.InputStream; import java.io.Reader; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.Proxy; import java.net.Proxy.Type; import java.net.UnknownHostException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Collection; import java.util.HashSet; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import net.md_5.bungee.config.Configuration; import net.md_5.bungee.config.ConfigurationProvider; import net.md_5.bungee.config.YamlConfiguration; import org.slf4j.Logger; import static java.util.function.Function.identity; import static java.util.stream.Collectors.toMap; import static java.util.stream.Collectors.toSet; /** * @param <P> GameProfile class * @param <C> CommandSender * @param <T> Plugin class */ public class FastLoginCore<P extends C, C, T extends PlatformPlugin<C>> { private static final long MAX_EXPIRE_RATE = 1_000_000; private final Map<String, String> localeMessages = new ConcurrentHashMap<>(); private final ConcurrentMap<String, Object> pendingLogin = CommonUtil.buildCache(5, -1); private final Collection<UUID> pendingConfirms = new HashSet<>(); private final T plugin; private final MojangResolver resolver = new MojangResolver(); private Configuration config; private SQLStorage storage; private RateLimiter rateLimiter; private PasswordGenerator<P> passwordGenerator = new DefaultPasswordGenerator<>(); private AuthPlugin<P> authPlugin; public FastLoginCore(T plugin) { this.plugin = plugin; } public void load() { saveDefaultFile("messages.yml"); saveDefaultFile("config.yml"); try { config = loadFile("config.yml"); Configuration messages = loadFile("messages.yml"); messages.getKeys() .stream() .filter(key -> messages.get(key) != null) .collect(toMap(identity(), messages::get)) .forEach((key, message) -> { String colored = CommonUtil.translateColorCodes((String) message); if (!colored.isEmpty()) { localeMessages.put(key, colored.replace("/newline", "\n")); } }); } catch (IOException ioEx) { plugin.getLog().error("Failed to load yaml files", ioEx); return; } int maxCon = config.getInt("anti-bot.connections", 200); long expireTime = config.getLong("anti-bot.expire", 5) * 60 * 1_000L; if (expireTime > MAX_EXPIRE_RATE) { expireTime = MAX_EXPIRE_RATE; } rateLimiter = new RateLimiter(maxCon, expireTime); Set<Proxy> proxies = config.getStringList("proxies") .stream() .map(HostAndPort::fromString) .map(proxy -> new InetSocketAddress(proxy.getHost(), proxy.getPort())) .map(sa -> new Proxy(Type.HTTP, sa)) .collect(toSet()); Collection<InetAddress> addresses = new HashSet<>(); for (String localAddress : config.getStringList("ip-addresses")) { try { addresses.add(InetAddress.getByName(localAddress.replace('-', '.'))); } catch (UnknownHostException ex) { plugin.getLog().error("IP-Address is unknown to us", ex); } } resolver.setMaxNameRequests(config.getInt("mojang-request-limit")); resolver.setProxySelector(new RotatingProxySelector(proxies)); resolver.setOutgoingAddresses(addresses); } private Configuration loadFile(String fileName) throws IOException { ConfigurationProvider configProvider = ConfigurationProvider.getProvider(YamlConfiguration.class); Configuration defaults; try (InputStream defaultStream = getClass().getClassLoader().getResourceAsStream(fileName)) { defaults = configProvider.load(defaultStream); } Path file = plugin.getPluginFolder().resolve(fileName); Configuration config; try (Reader reader = Files.newBufferedReader(file)) { config = configProvider.load(reader, defaults); } // explicitly add keys here, because Configuration.getKeys doesn't return the keys from the default configuration for (String key : defaults.getKeys()) { config.set(key, config.get(key)); } return config; } public MojangResolver getResolver() { return resolver; } public SQLStorage getStorage() { return storage; } public T getPlugin() { return plugin; } public void sendLocaleMessage(String key, C receiver) { String message = localeMessages.get(key); if (message != null) { plugin.sendMultiLineMessage(receiver, message); } } public String getMessage(String key) { return localeMessages.get(key); } public boolean setupDatabase() { String driver = config.getString("driver"); if (!checkDriver(driver)) { return false; } HikariConfig databaseConfig = new HikariConfig(); databaseConfig.setDriverClassName(driver); String database = config.getString("database"); databaseConfig.setConnectionTimeout(config.getInt("timeout", 30) * 1_000L); databaseConfig.setMaxLifetime(config.getInt("lifetime", 30) * 1_000L); if (driver.contains("sqlite")) { storage = new SQLiteStorage(this, database, databaseConfig); } else { String host = config.get("host", ""); int port = config.get("port", 3306); boolean useSSL = config.get("useSSL", false); if (useSSL) { databaseConfig.addDataSourceProperty("allowPublicKeyRetrieval", config.getBoolean("allowPublicKeyRetrieval", false)); databaseConfig.addDataSourceProperty("serverRSAPublicKeyFile", config.getString("ServerRSAPublicKeyFile")); databaseConfig.addDataSourceProperty("sslMode", config.getString("sslMode", "Required")); } databaseConfig.setUsername(config.get("username", "")); databaseConfig.setPassword(config.getString("password")); storage = new MySQLStorage(this, host, port, database, databaseConfig, useSSL); } try { storage.createTables(); return true; } catch (Exception ex) { plugin.getLog().warn("Failed to setup database. Disabling plugin...", ex); return false; } } private boolean checkDriver(String className) { try { Class.forName(className); return true; } catch (ClassNotFoundException notFoundEx) { Logger log = plugin.getLog(); log.warn("This driver {} is not supported on this platform", className); log.warn("Please choose MySQL (Spigot+BungeeCord), SQLite (Spigot+Sponge) or MariaDB (Sponge)", notFoundEx); } return false; } public Configuration getConfig() { return config; } public PasswordGenerator<P> getPasswordGenerator() { return passwordGenerator; } public void setPasswordGenerator(PasswordGenerator<P> passwordGenerator) { this.passwordGenerator = passwordGenerator; } public ConcurrentMap<String, Object> getPendingLogin() { return pendingLogin; } public Collection<UUID> getPendingConfirms() { return pendingConfirms; } public AuthPlugin<P> getAuthPluginHook() { return authPlugin; } public RateLimiter getRateLimiter() { return rateLimiter; } public void setAuthPluginHook(AuthPlugin<P> authPlugin) { this.authPlugin = authPlugin; } public void saveDefaultFile(String fileName) { Path dataFolder = plugin.getPluginFolder(); try { Files.createDirectories(dataFolder); Path configFile = dataFolder.resolve(fileName); if (Files.notExists(configFile)) { try (InputStream defaultStream = getClass().getClassLoader().getResourceAsStream(fileName)) { Files.copy(Objects.requireNonNull(defaultStream), configFile); } } } catch (IOException ioExc) { plugin.getLog().error("Cannot create plugin folder {}", dataFolder, ioExc); } } public void close() { plugin.getLog().info("Safely shutting down scheduler. This could take up to one minute."); plugin.getScheduler().shutdown(); if (storage != null) { storage.close(); } } }
/* * Copyright 2016 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.pce.web; import static javax.ws.rs.core.Response.Status.OK; import java.io.IOException; import java.io.InputStream; import java.util.List; import java.util.LinkedList; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.onosproject.incubator.net.tunnel.Tunnel; import org.onosproject.net.DeviceId; import org.onosproject.net.intent.Constraint; import org.onosproject.pce.pceservice.PcePath; import org.onosproject.pce.pceservice.DefaultPcePath; import org.onosproject.pce.pceservice.LspType; import org.onosproject.rest.AbstractWebResource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; /** * Query and program pce path. */ @Path("path") public class PcePathWebResource extends AbstractWebResource { private final Logger log = LoggerFactory.getLogger(PcePathWebResource.class); public static final String PCE_PATH_NOT_FOUND = "Path not found"; public static final String PCE_PATH_ID_EXIST = "Path exists"; public static final String PCE_PATH_ID_NOT_EXIST = "Path does not exist for the identifier"; /** * Retrieve details of all paths created. * * @return 200 OK */ @GET @Produces(MediaType.APPLICATION_JSON) public Response queryAllPath() { log.debug("Query all paths."); //TODO: need to uncomment below line once queryAllPath method is added to PceService Iterable<Tunnel> tunnels = null; // = get(PceService.class).queryAllPath(); ObjectNode result = mapper().createObjectNode(); ArrayNode pathEntry = result.putArray("paths"); if (tunnels != null) { for (final Tunnel tunnel : tunnels) { PcePath path = DefaultPcePath.builder().of(tunnel).build(); pathEntry.add(codec(PcePath.class).encode(path, this)); } } return ok(result.toString()).build(); } /** * Retrieve details of a specified path id. * * @param id path id * @return 200 OK, 404 if given identifier does not exist */ @GET @Path("{path_id}") @Produces(MediaType.APPLICATION_JSON) public Response queryPath(@PathParam("path_id") String id) { log.debug("Query path by identifier {}.", id); //TODO: need to uncomment below lines once queryPath method is added to PceService Tunnel tunnel = null; // = nullIsNotFound(get(PceService.class).queryPath(PcePathId.of(id)), //PCE_PATH_NOT_FOUND); PcePath path = DefaultPcePath.builder().of(tunnel).build(); ObjectNode result = mapper().createObjectNode(); result.set("path", codec(PcePath.class).encode(path, this)); return ok(result.toString()).build(); } /** * Creates a new path. * * @param stream pce path from json * @return status of the request */ @POST @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response setupPath(InputStream stream) { log.debug("Setup path."); try { ObjectNode jsonTree = (ObjectNode) mapper().readTree(stream); JsonNode port = jsonTree.get("path"); PcePath path = codec(PcePath.class).decode((ObjectNode) port, this); DeviceId srcDevice = DeviceId.deviceId(path.source()); DeviceId dstDevice = DeviceId.deviceId(path.destination()); LspType lspType = path.lspType(); List<Constraint> listConstrnt = new LinkedList<Constraint>(); // add cost //TODO: need to uncomment below lines once Bandwidth and Cost constraint classes are ready //CostConstraint.Type costType = CostConstraint.Type.values()[Integer.valueOf(path.constraint().cost())]; //listConstrnt.add(CostConstraint.of(costType)); // add bandwidth. Data rate unit is in BPS. //listConstrnt.add(LocalBandwidthConstraint.of(Double.valueOf(path.constraint().bandwidth()), DataRateUnit // .valueOf("BPS"))); //TODO: need to uncomment below lines once setupPath method is modified in PceService Boolean issuccess = true; // = (null != get(PceService.class) //.setupPath(srcDevice, dstDevice, path.name(), listConstrnt, lspType)) ? true : false; return Response.status(OK).entity(issuccess.toString()).build(); } catch (IOException e) { log.error("Exception while creating path {}.", e.toString()); throw new IllegalArgumentException(e); } } /** * Update details of a specified path id. * * @param id path id * @param stream pce path from json * @return 200 OK, 404 if given identifier does not exist */ @PUT @Path("{path_id}") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) public Response updatePath(@PathParam("path_id") String id, final InputStream stream) { log.debug("Update path by identifier {}.", id); try { ObjectNode jsonTree = (ObjectNode) mapper().readTree(stream); JsonNode pathNode = jsonTree.get("path"); PcePath path = codec(PcePath.class).decode((ObjectNode) pathNode, this); // Assign cost List<Constraint> constrntList = new LinkedList<Constraint>(); //TODO: need to uncomment below lines once CostConstraint class is ready if (path.costConstraint() != null) { //CostConstraint.Type costType = CostConstraint.Type.values()[path.constraint().cost()]; //constrntList.add(CostConstraint.of(costType)); } // Assign bandwidth. Data rate unit is in BPS. if (path.bandwidthConstraint() != null) { //TODO: need to uncomment below lines once BandwidthConstraint class is ready //constrntList.add(LocalBandwidthConstraint // .of(path.constraint().bandwidth(), DataRateUnit.valueOf("BPS"))); } //TODO: need to uncomment below line once updatePath is added to PceService Boolean result = true; // = (null != (get(PceService.class).updatePath(PcePathId.of(id), constrntList))) //? true : false; return Response.status(OK).entity(result.toString()).build(); } catch (IOException e) { log.error("Update path failed because of exception {}.", e.toString()); throw new IllegalArgumentException(e); } } /** * Release a specified path. * * @param id path id * @return 200 OK, 404 if given identifier does not exist */ @Path("{path_id}") @DELETE public Response releasePath(@PathParam("path_id") String id) { log.debug("Deletes path by identifier {}.", id); //TODO: need to uncomment below lines once releasePath method is added to PceService Boolean isSuccess = true; // = nullIsNotFound(get(PceService.class).releasePath(PcePathId.of(id)), //PCE_PATH_NOT_FOUND); if (!isSuccess) { log.debug("Path identifier {} does not exist", id); } return Response.status(OK).entity(isSuccess.toString()).build(); } }
/* * Copyright 2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.test.fixtures.file; import groovy.lang.Closure; import org.gradle.api.GradleException; import org.gradle.test.fixtures.ConcurrentTestUtil; import org.junit.rules.TestRule; import org.junit.runner.Description; import org.junit.runners.model.Statement; import java.io.IOException; import java.util.Random; import java.util.regex.Pattern; /** * A JUnit rule which provides a unique temporary folder for the test. * * Note: to avoid 260 char path length limitation on Windows, we should keep the test dir path as short as possible, * ideally < 90 chars (from repo root to test dir root, e.g. "core/build/tmp/test files/{TestClass}/{testMethod}/qqlj8"), * or < 40 chars for "{TestClass}/{testMethod}/qqlj8" */ abstract class AbstractTestDirectoryProvider implements TestRule, TestDirectoryProvider { protected final TestFile root; private final String className; private static final Random RANDOM = new Random(); private static final int ALL_DIGITS_AND_LETTERS_RADIX = 36; private static final int MAX_RANDOM_PART_VALUE = Integer.parseInt("zzzzz", ALL_DIGITS_AND_LETTERS_RADIX); private static final Pattern WINDOWS_RESERVED_NAMES = Pattern.compile("(con)|(prn)|(aux)|(nul)|(com\\d)|(lpt\\d)", Pattern.CASE_INSENSITIVE); private String prefix; private TestFile dir; private boolean cleanup = true; private boolean suppressCleanupErrors = false; protected AbstractTestDirectoryProvider(TestFile root, Class<?> testClass) { this.root = root; this.className = shortenPath(testClass.getSimpleName(), 16); } @Override public void suppressCleanup() { cleanup = false; } @Override public void suppressCleanupErrors() { suppressCleanupErrors = true; } public boolean isCleanup() { return cleanup; } public void cleanup() { if (cleanup && dir != null && dir.exists()) { ConcurrentTestUtil.poll(new Closure(null, null) { @SuppressWarnings("UnusedDeclaration") void doCall() throws IOException { dir.forceDeleteDir(); } }); } } @Override public Statement apply(final Statement base, Description description) { init(description.getMethodName()); return new TestDirectoryCleaningStatement(base, description); } private class TestDirectoryCleaningStatement extends Statement { private final Statement base; private final Description description; TestDirectoryCleaningStatement(Statement base, Description description) { this.base = base; this.description = description; } @Override public void evaluate() throws Throwable { // implicitly don't clean up if this throws base.evaluate(); try { cleanup(); } catch (Exception e) { if (suppressCleanupErrors()) { System.err.println(cleanupErrorMessage()); e.printStackTrace(System.err); } else { throw new GradleException(cleanupErrorMessage(), e); } } } private boolean suppressCleanupErrors() { return suppressCleanupErrors || testClass().getAnnotation(LeaksFileHandles.class) != null || description.getAnnotation(LeaksFileHandles.class) != null; } private Class<?> testClass() { return description.getTestClass(); } private String cleanupErrorMessage() { return "Couldn't delete test dir for `" + displayName() + "` (test is holding files open). " + "In order to find out which files are held open, you may find `org.gradle.integtests.fixtures.executer.GradleExecuter.withFileLeakDetection` useful."; } private String displayName() { return description.getDisplayName(); } } protected void init(String methodName) { if (methodName == null) { // must be a @ClassRule; use the rule's class name instead methodName = getClass().getSimpleName(); } if (prefix == null) { String safeMethodName = shortenPath(methodName.replaceAll("[^\\w]", "_"), 16); prefix = String.format("%s/%s", className, safeMethodName); } } /* Shorten a long name to at most {expectedMaxLength}, replace middle characters with ".". */ private String shortenPath(String longName, int expectedMaxLength) { if (longName.length() <= expectedMaxLength) { return longName; } else { return longName.substring(0, expectedMaxLength - 5) + "." + longName.substring(longName.length() - 4); } } @Override public TestFile getTestDirectory() { if (dir == null) { dir = createUniqueTestDirectory(); } return dir; } private TestFile createUniqueTestDirectory() { while (true) { // Use a random prefix to avoid reusing test directories String randomPrefix = Integer.toString(RANDOM.nextInt(MAX_RANDOM_PART_VALUE), ALL_DIGITS_AND_LETTERS_RADIX); if (WINDOWS_RESERVED_NAMES.matcher(randomPrefix).matches()) { continue; } TestFile dir = root.file(getPrefix(), randomPrefix); if (dir.mkdirs()) { return dir; } } } private String getPrefix() { if (prefix == null) { // This can happen if this is used in a constructor or a @Before method. It also happens when using // @RunWith(SomeRunner) when the runner does not support rules. prefix = className; } return prefix; } public TestFile file(Object... path) { return getTestDirectory().file(path); } public TestFile createFile(Object... path) { return file(path).createFile(); } public TestFile createDir(Object... path) { return file(path).createDir(); } public TestFile getRoot() { return root; } }
package anonymerniklasistanonym.karaokemusicvideomanager.desktopclient.gui.controller; import java.util.ArrayList; import java.util.Random; import anonymerniklasistanonym.karaokemusicvideomanager.desktopclient.gui.Main; import anonymerniklasistanonym.karaokemusicvideomanager.desktopclient.handler.DialogHandler; import anonymerniklasistanonym.karaokemusicvideomanager.desktopclient.libaries.ExternalApplicationModule; import anonymerniklasistanonym.karaokemusicvideomanager.desktopclient.libaries.WindowModule; import anonymerniklasistanonym.karaokemusicvideomanager.desktopclient.objects.MusicVideoRandomElement; import anonymerniklasistanonym.karaokemusicvideomanager.desktopclient.translations.Internationalization; import javafx.fxml.FXML; import javafx.scene.control.Button; import javafx.scene.control.Label; /** * The controller class for the random music video window. * * @author AnonymerNiklasistanonym <niklas.mikeler@gmail.com> | <a href= * "https://github.com/AnonymerNiklasistanonym">https://github.com/AnonymerNiklasistanonym</a> */ public class RandomWindowController { // FXML views /** * Random music video text label #1 of 5 */ @FXML private Label randomLable1; /** * Random music video text label #2 of 5 */ @FXML private Label randomLable2; /** * Random music video text label #3 of 5 */ @FXML private Label randomLable3; /** * Random music video text label #4 of 5 */ @FXML private Label randomLable4; /** * Random music video text label #5 of 5 */ @FXML private Label randomLable5; /** * Random music video add button #1 of 5 */ @FXML private Button randomAdd1; /** * Random music video add button #2 of 5 */ @FXML private Button randomAdd2; /** * Random music video add button #3 of 5 */ @FXML private Button randomAdd3; /** * Random music video add button #4 of 5 */ @FXML private Button randomAdd4; /** * Random music video add button #5 of 5 */ @FXML private Button randomAdd5; /** * Random music video play button #1 of 5 */ @FXML private Button randomPlay1; /** * Random music video play button #2 of 5 */ @FXML private Button randomPlay2; /** * Random music video play button #3 of 5 */ @FXML private Button randomPlay3; /** * Random music video play button #4 of 5 */ @FXML private Button randomPlay4; /** * Random music video play button #5 of 5 */ @FXML private Button randomPlay5; /** * Add all random music videos to the playlist */ @FXML private Button buttonAddAll; /** * Refresh the current random music videos */ @FXML private Button buttonRefresh; /** * All random music videos */ private MusicVideoRandomElement[] labelContent; /** * All random music video text labels */ private Label[] allLabels; /** * The main class to interact with the playlist */ private Main mainClass; /** * The main window controller class to update the playlist table */ private MainWindowController mainWindowController; /** * Setup for the window [controller] RandomWindow[Controller] * * @param mainClass * (Main | The main class) * @param mainWindowController * (MainWindowController | The main window controller) */ public void setWindowController(Main mainClass, MainWindowController mainWindowController) { // set the main class and main window controller this.mainClass = mainClass; this.mainWindowController = mainWindowController; // add all labels to an array this.allLabels = new Label[] { this.randomLable1, this.randomLable2, this.randomLable3, this.randomLable4, this.randomLable5 }; // create an array that has as many elements as the labels this.labelContent = new MusicVideoRandomElement[this.allLabels.length]; // now refresh/create the random music video list refreshRandom(); translateText(); } /** * Window text that should be translated on language change/load */ private void translateText() { this.randomPlay1.setText(Internationalization.translate("play")); this.randomPlay2.setText(Internationalization.translate("play")); this.randomPlay3.setText(Internationalization.translate("play")); this.randomPlay4.setText(Internationalization.translate("play")); this.randomPlay5.setText(Internationalization.translate("play")); this.randomAdd1.setText(Internationalization.translate("add")); this.randomAdd2.setText(Internationalization.translate("add")); this.randomAdd3.setText(Internationalization.translate("add")); this.randomAdd4.setText(Internationalization.translate("add")); this.randomAdd5.setText(Internationalization.translate("add")); this.buttonRefresh.setText(Internationalization.translate("Refresh")); this.buttonAddAll.setText(Internationalization.translate("Add all to Playlist")); } /** * This method get's called when the FXML file get's loaded */ @FXML private void initialize() { /** * Set icons */ // "add" symbol buttons final Button[] allAddButtons = new Button[] { this.randomAdd1, this.randomAdd2, this.randomAdd3, this.randomAdd4, this.randomAdd5, this.buttonAddAll }; for (int i = 0; i < allAddButtons.length; i++) { allAddButtons[i].setGraphic(WindowModule.createMenuIcon("add")); } // "play" symbol buttons final Button[] allPlayButtons = new Button[] { this.randomPlay1, this.randomPlay2, this.randomPlay3, this.randomPlay4, this.randomPlay5 }; for (int i = 0; i < allPlayButtons.length; i++) { allPlayButtons[i].setGraphic(WindowModule.createMenuIcon("play")); } // "refresh" button this.buttonRefresh.setGraphic(WindowModule.createMenuIcon("refresh")); } /** * Add an entry in the random list to the playlist */ private void addVideoMain(int position, boolean addAll) { // open a dialog to input name and comment final String[] authorComment = DialogHandler.createPlaylistEntry(this.mainWindowController.getNameOfAuthor()); // if at least the author wasn't null if (authorComment != null && authorComment[0] != null) { // save the new name in the main controller class this.mainWindowController.setNameOfAuthor(authorComment[0]); // set the comment to "" if it was null if (authorComment[1] == null) { authorComment[1] = ""; } // if all should be added if (addAll) { // iterate through this loop through all labels for (int i = 0; i < this.labelContent.length; i++) { // and add each element this.mainClass.getMusicVideohandler().addMusicVideoToPlaylist(this.labelContent[i].getIndex(), authorComment[0], authorComment[1]); } } else { // else only add the one at the given position this.mainClass.getMusicVideohandler().addMusicVideoToPlaylist(this.labelContent[position].getIndex(), authorComment[0], authorComment[1]); } // last but not least refresh the playlist table in the main window this.mainWindowController.refreshMusicVideoPlaylistTable(); } } /** * Add an entry in the random list to the playlist */ private void addVideo(int position) { addVideoMain(position, false); } /** * Add an entry in the random list to the playlist */ private void addAllVideos() { addVideoMain(0, true); } /** * Add the 1. entry to the playlist */ @FXML private void addVideo1() { addVideo(0); } /** * Add the 2. entry to the playlist */ @FXML private void addVideo2() { addVideo(1); } /** * Add the 3. entry to the playlist */ @FXML private void addVideo3() { addVideo(2); } /** * Add the 4. entry to the playlist */ @FXML private void addVideo4() { addVideo(3); } /** * Add the 5. entry to the playlist */ @FXML private void addVideo5() { addVideo(4); } /** * Add all entries to the playlist */ @FXML private void addVideoAll() { addAllVideos(); } /** * Play the music video from the entry position * * @param position * (Integer) */ private void playVideo(int position) { ExternalApplicationModule.openFile(this.labelContent[position].getMusicVideo().getPath().toFile()); } /** * Add the music video of the 1. entry */ @FXML private void playVideo1() { playVideo(0); } /** * Add the music video of the 2. entry */ @FXML private void playVideo2() { playVideo(1); } /** * Add the music video of the 3. entry */ @FXML private void playVideo3() { playVideo(2); } /** * Add the music video of the 4. entry */ @FXML private void playVideo4() { playVideo(3); } /** * Add the music video of the 5. entry */ @FXML private void playVideo5() { playVideo(4); } /** * Create an array with random number where no number is doubled contained * * @param minNumber * (Integer | minimal number in array) * @param maxNumber * (Integer | maximal number in array) * @param sizeOfArray * (Integer | how many numbers should be in the array) * @return Integer[] */ public Integer[] arrayWithRandomNumbersNoDuplicates(int minNumber, int maxNumber, int sizeOfArray) { ArrayList<Integer> randomNonDuplicateInteger = new ArrayList<Integer>(); Random randomnumberGenerator = new Random(); while (randomNonDuplicateInteger.size() < sizeOfArray) { int random = randomnumberGenerator.nextInt((maxNumber - minNumber) + 1) + minNumber; if (!randomNonDuplicateInteger.contains(random)) { randomNonDuplicateInteger.add(random); } } return randomNonDuplicateInteger.toArray(new Integer[0]); } /** * Update/Create the random music video table */ @FXML private void refreshRandom() { Integer[] randomNumbers = arrayWithRandomNumbersNoDuplicates(0, this.mainClass.getMusicVideohandler().getMusicVideoList().length - 1, this.labelContent.length); // do for every label for (int i = 0; i < this.labelContent.length; i++) { // create a random music video element with this number and save it this.labelContent[i] = new MusicVideoRandomElement( this.mainClass.getMusicVideohandler().getMusicVideoList()[randomNumbers[i]], randomNumbers[i]); // set the text to each label of this entry this.allLabels[i].setText(this.labelContent[i].getMusicVideo().getTitle() + " " + Internationalization.translate("from") + " " + this.labelContent[i].getMusicVideo().getArtist()); } } }
/** * Copyright 2014 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package rx.internal.operators; import java.util.*; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.atomic.*; import rx.*; import rx.Completable.OnSubscribe; import rx.Observable; import rx.exceptions.CompositeException; import rx.plugins.RxJavaHooks; import rx.subscriptions.CompositeSubscription; public final class CompletableOnSubscribeMerge implements OnSubscribe { final Observable<Completable> source; final int maxConcurrency; final boolean delayErrors; @SuppressWarnings("unchecked") public CompletableOnSubscribeMerge(Observable<? extends Completable> source, int maxConcurrency, boolean delayErrors) { this.source = (Observable<Completable>)source; this.maxConcurrency = maxConcurrency; this.delayErrors = delayErrors; } @Override public void call(CompletableSubscriber s) { CompletableMergeSubscriber parent = new CompletableMergeSubscriber(s, maxConcurrency, delayErrors); s.onSubscribe(parent); source.unsafeSubscribe(parent); } static final class CompletableMergeSubscriber extends Subscriber<Completable> { final CompletableSubscriber actual; final CompositeSubscription set; final boolean delayErrors; volatile boolean done; final AtomicReference<Queue<Throwable>> errors; final AtomicBoolean once; final AtomicInteger wip; public CompletableMergeSubscriber(CompletableSubscriber actual, int maxConcurrency, boolean delayErrors) { this.actual = actual; this.delayErrors = delayErrors; this.set = new CompositeSubscription(); this.wip = new AtomicInteger(1); this.once = new AtomicBoolean(); this.errors = new AtomicReference<Queue<Throwable>>(); if (maxConcurrency == Integer.MAX_VALUE) { request(Long.MAX_VALUE); } else { request(maxConcurrency); } } Queue<Throwable> getOrCreateErrors() { Queue<Throwable> q = errors.get(); if (q != null) { return q; } q = new ConcurrentLinkedQueue<Throwable>(); if (errors.compareAndSet(null, q)) { return q; } return errors.get(); } @Override public void onNext(Completable t) { if (done) { return; } wip.getAndIncrement(); t.unsafeSubscribe(new CompletableSubscriber() { Subscription d; boolean innerDone; @Override public void onSubscribe(Subscription d) { this.d = d; set.add(d); } @Override public void onError(Throwable e) { if (innerDone) { RxJavaHooks.onError(e); return; } innerDone = true; set.remove(d); getOrCreateErrors().offer(e); terminate(); if (delayErrors && !done) { request(1); } } @Override public void onCompleted() { if (innerDone) { return; } innerDone = true; set.remove(d); terminate(); if (!done) { request(1); } } }); } @Override public void onError(Throwable t) { if (done) { RxJavaHooks.onError(t); return; } getOrCreateErrors().offer(t); done = true; terminate(); } @Override public void onCompleted() { if (done) { return; } done = true; terminate(); } void terminate() { if (wip.decrementAndGet() == 0) { Queue<Throwable> q = errors.get(); if (q == null || q.isEmpty()) { actual.onCompleted(); } else { Throwable e = collectErrors(q); if (once.compareAndSet(false, true)) { actual.onError(e); } else { RxJavaHooks.onError(e); } } } else if (!delayErrors) { Queue<Throwable> q = errors.get(); if (q != null && !q.isEmpty()) { Throwable e = collectErrors(q); if (once.compareAndSet(false, true)) { actual.onError(e); } else { RxJavaHooks.onError(e); } } } } } /** * Collects the Throwables from the queue, adding subsequent Throwables as suppressed to * the first Throwable and returns it. * @param q the queue to drain * @return the Throwable containing all other Throwables as suppressed */ public static Throwable collectErrors(Queue<Throwable> q) { List<Throwable> list = new ArrayList<Throwable>(); Throwable t; while ((t = q.poll()) != null) { list.add(t); } if (list.isEmpty()) { return null; } if (list.size() == 1) { return list.get(0); } return new CompositeException(list); } }
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.dmn.client.editors.expressions.types.function; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.function.Supplier; import java.util.stream.Collectors; import javax.enterprise.event.Event; import com.ait.lienzo.shared.core.types.EventPropagationMode; import org.jboss.errai.common.client.api.IsElement; import org.kie.workbench.common.dmn.api.definition.HasExpression; import org.kie.workbench.common.dmn.api.definition.HasName; import org.kie.workbench.common.dmn.api.definition.v1_1.Expression; import org.kie.workbench.common.dmn.api.definition.v1_1.FunctionDefinition; import org.kie.workbench.common.dmn.api.definition.v1_1.InformationItem; import org.kie.workbench.common.dmn.api.property.dmn.Name; import org.kie.workbench.common.dmn.api.property.dmn.QName; import org.kie.workbench.common.dmn.client.commands.expressions.types.function.AddParameterCommand; import org.kie.workbench.common.dmn.client.commands.expressions.types.function.SetKindCommand; import org.kie.workbench.common.dmn.client.commands.general.SetCellValueCommand; import org.kie.workbench.common.dmn.client.editors.expressions.types.ExpressionEditorDefinition; import org.kie.workbench.common.dmn.client.editors.expressions.types.ExpressionEditorDefinitions; import org.kie.workbench.common.dmn.client.editors.expressions.types.ExpressionType; import org.kie.workbench.common.dmn.client.editors.expressions.types.context.ExpressionCellValue; import org.kie.workbench.common.dmn.client.editors.expressions.types.context.ExpressionEditorColumn; import org.kie.workbench.common.dmn.client.events.ExpressionEditorSelectedEvent; import org.kie.workbench.common.dmn.client.widgets.grid.BaseExpressionGrid; import org.kie.workbench.common.dmn.client.widgets.grid.columns.factory.TextBoxSingletonDOMElementFactory; import org.kie.workbench.common.dmn.client.widgets.grid.model.DMNGridRow; import org.kie.workbench.common.dmn.client.widgets.grid.model.GridCellTuple; import org.kie.workbench.common.dmn.client.widgets.grid.model.GridCellValueTuple; import org.kie.workbench.common.dmn.client.widgets.layer.DMNGridLayer; import org.kie.workbench.common.dmn.client.widgets.panel.DMNGridPanel; import org.kie.workbench.common.stunner.core.client.api.SessionManager; import org.kie.workbench.common.stunner.core.client.canvas.AbstractCanvasHandler; import org.kie.workbench.common.stunner.core.client.command.SessionCommandManager; import org.uberfire.ext.wires.core.grids.client.model.GridColumn; import org.uberfire.ext.wires.core.grids.client.widget.layer.impl.GridLayerRedrawManager; public class FunctionGrid extends BaseExpressionGrid<FunctionDefinition, FunctionUIModelMapper> implements FunctionGridControls.Presenter { private final FunctionGridControls controls; private final Supplier<ExpressionEditorDefinitions> expressionEditorDefinitionsSupplier; private final Supplier<ExpressionEditorDefinitions> supplementaryEditorDefinitionsSupplier; public FunctionGrid(final GridCellTuple parent, final HasExpression hasExpression, final Optional<FunctionDefinition> expression, final Optional<HasName> hasName, final DMNGridPanel gridPanel, final DMNGridLayer gridLayer, final SessionManager sessionManager, final SessionCommandManager<AbstractCanvasHandler> sessionCommandManager, final Supplier<ExpressionEditorDefinitions> expressionEditorDefinitionsSupplier, final Supplier<ExpressionEditorDefinitions> supplementaryEditorDefinitionsSupplier, final Event<ExpressionEditorSelectedEvent> editorSelectedEvent, final FunctionGridControls controls, final boolean nested) { super(parent, hasExpression, expression, hasName, gridPanel, gridLayer, new FunctionGridRenderer(nested), sessionManager, sessionCommandManager, editorSelectedEvent); this.controls = controls; this.expressionEditorDefinitionsSupplier = expressionEditorDefinitionsSupplier; this.supplementaryEditorDefinitionsSupplier = supplementaryEditorDefinitionsSupplier; setEventPropagationMode(EventPropagationMode.NO_ANCESTORS); super.doInitialisation(); this.controls.init(this); this.controls.initKinds(Arrays.asList(FunctionDefinition.Kind.values())); this.controls.initExpressionTypes(expressionEditorDefinitionsSupplier.get().stream().map(ExpressionEditorDefinition::getType).collect(Collectors.toList())); } @Override protected void doInitialisation() { // Defer initialisation until after the constructor completes as // makeUiModelMapper needs expressionEditorDefinitionsSupplier to have been set } @Override public FunctionUIModelMapper makeUiModelMapper() { return new FunctionUIModelMapper(this::getModel, () -> expression, expressionEditorDefinitionsSupplier, supplementaryEditorDefinitionsSupplier); } @Override protected void initialiseUiColumns() { final TextBoxSingletonDOMElementFactory headerFactory = new TextBoxSingletonDOMElementFactory(gridPanel, gridLayer, this, sessionManager, sessionCommandManager, newHeaderHasNoValueCommand(), newHeaderHasValueCommand()); final GridColumn expressionColumn = new ExpressionEditorColumn(Arrays.asList(new FunctionColumnNameHeaderMetaData(() -> hasName.orElse(HasName.NOP).getName().getValue(), (s) -> hasName.orElse(HasName.NOP).getName().setValue(s), headerFactory), new FunctionColumnParametersHeaderMetaData(this::extractExpressionLanguage, this::extractFormalParameters)), this); model.appendColumn(expressionColumn); getRenderer().setColumnRenderConstraint((isSelectionLayer, gridColumn) -> !isSelectionLayer || gridColumn.equals(expressionColumn)); } @Override protected void initialiseUiModel() { expression.ifPresent(e -> { model.appendRow(new DMNGridRow()); uiModelMapper.fromDMNModel(0, 0); }); } @Override public Optional<IsElement> getEditorControls() { controls.enableKind(false); controls.enableExpressionType(false); expression.ifPresent(e -> { final FunctionDefinition.Kind kind = extractExpressionLanguage(); controls.initSelectedKind(kind); controls.enableKind(true); switch (kind) { case FEEL: final Optional<ExpressionType> type = extractExpressionType(); type.ifPresent(t -> { controls.initSelectedExpressionType(t); controls.enableExpressionType(true); }); break; case JAVA: case PMML: } }); return Optional.of(controls); } private FunctionDefinition.Kind extractExpressionLanguage() { if (expression.isPresent()) { final FunctionDefinition function = expression.get(); final Map<QName, String> attributes = function.getAdditionalAttributes(); final String code = attributes.getOrDefault(FunctionDefinition.KIND_QNAME, FunctionDefinition.Kind.FEEL.code()); return FunctionDefinition.Kind.determineFromString(code); } else { return FunctionDefinition.Kind.FEEL; } } private List<InformationItem> extractFormalParameters() { if (expression.isPresent()) { final FunctionDefinition function = expression.get(); return function.getFormalParameter(); } return Collections.emptyList(); } private Optional<ExpressionType> extractExpressionType() { if (expression.isPresent()) { final Expression e = expression.get().getExpression(); final Optional<ExpressionEditorDefinition<Expression>> definition = expressionEditorDefinitionsSupplier.get().getExpressionEditorDefinition(Optional.ofNullable(e)); if (definition.isPresent()) { return Optional.of(definition.get().getType()); } } return Optional.empty(); } @Override public void addFormalParameter() { expression.ifPresent(e -> { final InformationItem parameter = new InformationItem(); parameter.setName(new Name("p" + e.getFormalParameter().size())); sessionCommandManager.execute((AbstractCanvasHandler) sessionManager.getCurrentSession().getCanvasHandler(), new AddParameterCommand(e, parameter, gridLayer::batch)); }); } @Override public void setKind(final FunctionDefinition.Kind kind) { expression.ifPresent(function -> { switch (kind) { case FEEL: doSetKind(kind, function, expressionEditorDefinitionsSupplier.get().getExpressionEditorDefinition(ExpressionType.LITERAL_EXPRESSION)); break; case JAVA: doSetKind(kind, function, supplementaryEditorDefinitionsSupplier.get().getExpressionEditorDefinition(ExpressionType.FUNCTION_JAVA)); break; case PMML: doSetKind(kind, function, supplementaryEditorDefinitionsSupplier.get().getExpressionEditorDefinition(ExpressionType.FUNCTION_PMML)); } }); } private void doSetKind(final FunctionDefinition.Kind kind, final FunctionDefinition function, final Optional<ExpressionEditorDefinition<Expression>> oDefinition) { oDefinition.ifPresent(definition -> { final GridCellTuple expressionParent = new GridCellTuple(0, 0, model); final Optional<Expression> expression = definition.getModelClass(); final Optional<BaseExpressionGrid> gridWidget = definition.getEditor(expressionParent, hasExpression, expression, hasName, true); doSetKind(kind, function, expression, gridWidget); }); } void doSetKind(final FunctionDefinition.Kind kind, final FunctionDefinition function, final Optional<Expression> expression, final Optional<BaseExpressionGrid> editor) { final GridCellValueTuple gcv = new GridCellValueTuple<>(0, 0, model, new ExpressionCellValue(editor)); sessionCommandManager.execute((AbstractCanvasHandler) sessionManager.getCurrentSession().getCanvasHandler(), new SetKindCommand(gcv, function, kind, expression, () -> synchroniseViewWhenExpressionEditorChanged(editor))); } @Override public void setExpressionType(final ExpressionType type) { final Optional<ExpressionEditorDefinition<Expression>> expressionEditorDefinition = expressionEditorDefinitionsSupplier.get().getExpressionEditorDefinition(type); expressionEditorDefinition.ifPresent(ed -> { final Optional<Expression> expression = ed.getModelClass(); final GridCellTuple expressionParent = new GridCellTuple(0, 0, model); final Optional<BaseExpressionGrid> editor = ed.getEditor(expressionParent, hasExpression, expression, hasName, true); final GridCellValueTuple gcv = new GridCellValueTuple<>(0, 0, model, new ExpressionCellValue(editor)); sessionCommandManager.execute((AbstractCanvasHandler) sessionManager.getCurrentSession().getCanvasHandler(), new SetCellValueCommand(gcv, () -> uiModelMapper, () -> synchroniseViewWhenExpressionEditorChanged(editor))); }); } void synchroniseViewWhenExpressionEditorChanged(final Optional<BaseExpressionGrid> oEditor) { parent.onResize(); gridPanel.refreshScrollPosition(); gridPanel.updatePanelSize(); gridLayer.batch(new GridLayerRedrawManager.PrioritizedCommand(0) { @Override public void execute() { gridLayer.draw(); oEditor.ifPresent(gridLayer::select); } }); getEditorControls(); } }
/** */ package eclassxmlschemacommon_2_0Simplified.impl; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.impl.MinimalEObjectImpl; import eclassxmlschemacommon_2_0Simplified.Eclassxmlschemacommon_2_0SimplifiedPackage; import eclassxmlschemacommon_2_0Simplified.UNITMAPPING; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>UNITMAPPING</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link eclassxmlschemacommon_2_0Simplified.impl.UNITMAPPINGImpl#getSourceUnit <em>Source Unit</em>}</li> * <li>{@link eclassxmlschemacommon_2_0Simplified.impl.UNITMAPPINGImpl#getTargetUnit <em>Target Unit</em>}</li> * </ul> * * @generated */ public class UNITMAPPINGImpl extends MinimalEObjectImpl.Container implements UNITMAPPING { /** * The default value of the '{@link #getSourceUnit() <em>Source Unit</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getSourceUnit() * @generated * @ordered */ protected static final String SOURCE_UNIT_EDEFAULT = null; /** * The cached value of the '{@link #getSourceUnit() <em>Source Unit</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getSourceUnit() * @generated * @ordered */ protected String sourceUnit = SOURCE_UNIT_EDEFAULT; /** * The default value of the '{@link #getTargetUnit() <em>Target Unit</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getTargetUnit() * @generated * @ordered */ protected static final String TARGET_UNIT_EDEFAULT = null; /** * The cached value of the '{@link #getTargetUnit() <em>Target Unit</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getTargetUnit() * @generated * @ordered */ protected String targetUnit = TARGET_UNIT_EDEFAULT; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected UNITMAPPINGImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return Eclassxmlschemacommon_2_0SimplifiedPackage.Literals.UNITMAPPING; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getSourceUnit() { return sourceUnit; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setSourceUnit(String newSourceUnit) { String oldSourceUnit = sourceUnit; sourceUnit = newSourceUnit; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, Eclassxmlschemacommon_2_0SimplifiedPackage.UNITMAPPING__SOURCE_UNIT, oldSourceUnit, sourceUnit)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getTargetUnit() { return targetUnit; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setTargetUnit(String newTargetUnit) { String oldTargetUnit = targetUnit; targetUnit = newTargetUnit; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, Eclassxmlschemacommon_2_0SimplifiedPackage.UNITMAPPING__TARGET_UNIT, oldTargetUnit, targetUnit)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case Eclassxmlschemacommon_2_0SimplifiedPackage.UNITMAPPING__SOURCE_UNIT: return getSourceUnit(); case Eclassxmlschemacommon_2_0SimplifiedPackage.UNITMAPPING__TARGET_UNIT: return getTargetUnit(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case Eclassxmlschemacommon_2_0SimplifiedPackage.UNITMAPPING__SOURCE_UNIT: setSourceUnit((String)newValue); return; case Eclassxmlschemacommon_2_0SimplifiedPackage.UNITMAPPING__TARGET_UNIT: setTargetUnit((String)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case Eclassxmlschemacommon_2_0SimplifiedPackage.UNITMAPPING__SOURCE_UNIT: setSourceUnit(SOURCE_UNIT_EDEFAULT); return; case Eclassxmlschemacommon_2_0SimplifiedPackage.UNITMAPPING__TARGET_UNIT: setTargetUnit(TARGET_UNIT_EDEFAULT); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case Eclassxmlschemacommon_2_0SimplifiedPackage.UNITMAPPING__SOURCE_UNIT: return SOURCE_UNIT_EDEFAULT == null ? sourceUnit != null : !SOURCE_UNIT_EDEFAULT.equals(sourceUnit); case Eclassxmlschemacommon_2_0SimplifiedPackage.UNITMAPPING__TARGET_UNIT: return TARGET_UNIT_EDEFAULT == null ? targetUnit != null : !TARGET_UNIT_EDEFAULT.equals(targetUnit); } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (sourceUnit: "); result.append(sourceUnit); result.append(", targetUnit: "); result.append(targetUnit); result.append(')'); return result.toString(); } } //UNITMAPPINGImpl
/* * This file is part of wegenenverkeer common-resteasy. * Copyright (c) AWV Agentschap Wegen en Verkeer, Vlaamse Gemeenschap * The program is available in open source according to the Apache License, Version 2.0. * For full licensing details, see LICENSE.txt in the project root. */ package be.wegenenverkeer.common.resteasy.logging; import be.eliwan.profiling.api.ProfilingSink; import be.wegenenverkeer.common.resteasy.exception.AbstractRestException; import be.wegenenverkeer.common.resteasy.exception.ExceptionUtil; import be.wegenenverkeer.common.resteasy.exception.ServiceException; import be.wegenenverkeer.common.resteasy.json.InputStreamSerializer; import be.wegenenverkeer.common.resteasy.json.RestJsonMapper; import org.jboss.resteasy.annotations.interception.ServerInterceptor; import org.jboss.resteasy.core.ResourceMethodInvoker; import org.jboss.resteasy.core.ServerResponse; import org.jboss.resteasy.spi.Failure; import org.jboss.resteasy.spi.HttpRequest; import org.jboss.resteasy.spi.interception.MessageBodyReaderContext; import org.jboss.resteasy.spi.interception.MessageBodyReaderInterceptor; import org.jboss.resteasy.spi.interception.PostProcessInterceptor; import org.jboss.resteasy.spi.interception.PreProcessInterceptor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.stereotype.Component; import java.io.IOException; import java.io.InputStream; import java.lang.annotation.Annotation; import java.util.List; import java.util.Map; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Cookie; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.ext.Provider; /** * Deze klasse is een resteasy interceptor die zichzelf tevens exposed als een spring-bean. De interceptor haakt * zichzelf voor en na elke call en realiseert degelijke logging van elke call waarbij getracht wordt data van * verschillende stadia van de executie van eenzelfde request samen te houden in het kader van concurrent logs. De * interceptor kan gebruikt worden als logger voor servicecode: de logging van de gebruiker wordt dan mee in de output * van deze logger gestopt. */ @Provider @Component("loggerInterceptor") @ServerInterceptor public class PreProcessLoggingInterceptor implements InitializingBean, PreProcessInterceptor, MessageBodyReaderInterceptor, PostProcessInterceptor { private static final Logger LOG = LoggerFactory.getLogger(PreProcessLoggingInterceptor.class); private static final RestJsonMapper MAPPER = new RestJsonMapper(); private static final String NEWLINE = "\n"; private static final String INDENT = "\n\t"; private static final String ARROW = " -> "; private static final ThreadLocal<StringBuilder> STRING_BUILDER = new ThreadLocal<>(); /** * String indicating the grouping for the profiling. Each service handled independently.. */ public static final ThreadLocal<String> PROFILE_GROUP = new ThreadLocal<>(); /** * Service request URL. */ public static final ThreadLocal<Long> START_MOMENT = new ThreadLocal<>(); @Autowired(required = false) @Qualifier("restProfilingRegistrar") private ProfilingSink profilingContainer = (group, duration) -> { // do nothing }; @Override public void afterPropertiesSet() throws Exception { MAPPER.addClassSerializer(InputStream.class, new InputStreamSerializer()); } /** * Implementatie van de PreProcessInterceptor interface. * * @param request De request * @param method De method * @return De server response * @throws Failure De failure exception * @throws WebApplicationException De web application exception */ @Override public ServerResponse preProcess(HttpRequest request, ResourceMethodInvoker method) throws Failure, WebApplicationException { START_MOMENT.set(System.currentTimeMillis()); PROFILE_GROUP.set(method.getMethod().getDeclaringClass().getSimpleName() + ":" + method.getMethod().getName()); STRING_BUILDER.set(new StringBuilder("Service: ")); StringBuilder sb = STRING_BUILDER.get(); sb.append(request.getHttpMethod()); sb.append(' '); sb.append(request.getUri().getAbsolutePath().toASCIIString()); // log HTTP request headers sb.append("\nHTTP request headers:"); for (Map.Entry<String, List<String>> entry : request.getHttpHeaders().getRequestHeaders().entrySet()) { sb.append("\n ").append(entry.getKey()).append(": "); String sep = ""; for (String s : entry.getValue()) { sb.append(sep); sep = ", "; sb.append(s); } } if (null != method.getConsumes()) { sb.append("\nRequest types"); for (MediaType mediaType : method.getConsumes()) { sb.append(' ').append(mediaType.toString()); } } if (null != method.getProduces()) { sb.append("\nResponse types"); for (MediaType mediaType : method.getProduces()) { sb.append(' ').append(mediaType.toString()); } } sb.append("\nCookies: "); Map<String, Cookie> cookies = request.getHttpHeaders().getCookies(); for (Map.Entry<String, Cookie> entry : cookies.entrySet()) { sb.append(INDENT); sb.append(entry.getKey()); sb.append(ARROW); sb.append(entry.getValue()); } sb.append("\nQuery Parameters: "); MultivaluedMap<String, String> params = request.getUri().getQueryParameters(); for (Map.Entry<String, List<String>> entry : params.entrySet()) { sb.append(INDENT); sb.append(entry.getKey()); sb.append(ARROW); sb.append(entry.getValue()); } sb.append("\nPath parameters: "); MultivaluedMap<String, String> pathParams = request.getUri().getPathParameters(); for (Map.Entry<String, List<String>> entry : pathParams.entrySet()) { sb.append(INDENT); sb.append(entry.getKey()); sb.append(ARROW); sb.append(entry.getValue()); } return null; } /** * Implementatie van de MessageBodyReaderInterceptor interface. * * @param context de service context * @return deze methode geeft gewoon het antwoord van de volgende reader in de chain terug * @throws IOException indien de vorige reader deze exception gooit */ @Override public Object read(MessageBodyReaderContext context) throws IOException { Object result = context.proceed(); StringBuilder sb = STRING_BUILDER.get(); sb.append("\nDocument body type: ").append(result.getClass().toString()); sb.append("\nDocument content:\n"); if (result.getClass().isAnnotationPresent(DoNotLog.class)) { sb.append("<Not serialized " + result.getClass().toString() + ">"); } else if (result.getClass().isAnnotationPresent(LogUsingToString.class)) { sb.append(result.toString()); } else { sb.append(MAPPER.writeValueAsString(result)); } return result; } /** * Implementatie van de PostProcessInterceptor interface. * * @param response server response */ @Override public void postProcess(ServerResponse response) { StringBuilder sb = STRING_BUILDER.get(); if (null == sb) { sb = new StringBuilder(); STRING_BUILDER.set(sb); } Object result = response.getEntity(); if (result != null) { sb.append("\nReply type: "); sb.append(result.getClass().toString()); sb.append("\nOutput document:\n"); try { if (result.getClass().isAnnotationPresent(DoNotLog.class)) { sb.append("<Not serialized " + result.getClass().toString() + ">"); } else if (contains(response.getAnnotations(), DoNotLogResponse.class)) { sb.append(String.format("<Not serialized response from method '%s>", PROFILE_GROUP.get())); } else if (result.getClass().isAnnotationPresent(LogUsingToString.class)) { sb.append(result.toString()); } else if (result instanceof String) { sb.append(result); } else { String output = MAPPER.writeValueAsString(result); sb.append(output); } } catch (IOException e) { LOG.warn("JSON probleem met " + result, e); } } finishCall(false); } /** * Afsluitende logging in geval van een error. * * @param exception te loggen fout * @param msg boodschap */ public void postProcessError(Exception exception, String msg) { StringBuilder sb = STRING_BUILDER.get(); if (null == sb) { sb = new StringBuilder(); STRING_BUILDER.set(sb); } sb.append("\nOOPS: ").append(msg).append(NEWLINE); ExceptionUtil eu = new ExceptionUtil(exception); if (exception instanceof AbstractRestException && !(exception instanceof ServiceException)) { // no stack trace, log at info level finishCall(false); } else { sb.append(eu.getStackTrace()); finishCall(true); } } private void finishCall(boolean isError) { StringBuilder sb = STRING_BUILDER.get(); long now = System.currentTimeMillis(); Long start = START_MOMENT.get(); if (null != start) { long time = now - start; profilingContainer.register(PROFILE_GROUP.get(), time); sb.append(String.format("%nDuur: %.3fs", time / 1000.0)); } else { sb.append("\nDuur: Onbekend, kan starttijd niet bepalen."); } if (isError) { LOG.error(sb.toString()); } else { LOG.info(sb.toString()); } PROFILE_GROUP.remove(); START_MOMENT.remove(); STRING_BUILDER.remove(); } private boolean contains(Annotation[] list, Class<?> annotation) { if (null != list) { for (Annotation test : list) { if (annotation.isAssignableFrom(test.getClass())) { return true; } } } return false; } }
/** * Copyright 2011-2013 FoundationDB, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* The original from which this derives bore the following: */ /* Derby - Class org.apache.derby.impl.sql.compile.ResultColumn Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.foundationdb.sql.parser; import com.foundationdb.sql.StandardException; import com.foundationdb.sql.types.DataTypeDescriptor; /** * A ResultColumn represents a result column in a SELECT, INSERT, or UPDATE * statement. In a SELECT statement, the result column just represents an * expression in a row being returned to the client. For INSERT and UPDATE * statements, the result column represents an column in a stored table. * So, a ResultColumn has to be bound differently depending on the type of * statement it appears in. * <P> * The type of the ResultColumn can differ from its underlying expression, * for example in certain joins the ResultColumn can be nullable even if * its underlying column is not. In an INSERT or UPDATE the ResultColumn * will represent the type of the column in the table, the type of * the underlying expresion will be the type of the source of the * value to be insert or updated. The method columnTypeAndLengthMatch() * can be used to detect when normalization is required between * the expression and the tyoe of ResultColumn. This class does * not implement any type normalization (conversion), this is * typically handled by a NormalizeResultSetNode. * */ public class ResultColumn extends ValueNode { /* name and exposedName should point to the same string, unless there is a * derived column list, in which case name will point to the underlying name * and exposedName will point to the name from the derived column list. */ private String name; private String exposedName; private String tableName; private ValueNode expression; private boolean defaultColumn; // tells us if this ResultColumn represents an autoincrement column in a // base table. private boolean autoincrement; private ColumnReference reference; // used to verify quals at bind time, if given. /* virtualColumnId is the ResultColumn's position (1-based) within the ResultSet */ private int virtualColumnId; private boolean isNameGenerated; // tells us whether the user provided the alias names along with the query or // it is internally assigning the column name as the alias name by default. // If the value is true then the user has provided the alias name // along with the query. private boolean havingAlias; public boolean isHavingAlias(){ return this.havingAlias; } /** * Different types of initializer parameters indicate different * types of initialization. Parameters may be: * * <ul> * <li>arg1 The name of the column, if any.</li> * <li>arg2 The expression this result column represents</li> * </ul> * * <p> * - OR - * </p> * * <ul> * <li>arg1 a column reference node</li> * <li>arg2 The expression this result column represents</li> * </ul> * * <p> * - OR - * </p> * * <ul> * <li>arg1 The column descriptor.</li> * <li>arg2 The expression this result column represents</li> * </ul> * * <p> * - OR - * </p> * * <ul> * <li>dtd The type of the column</li> * <li>expression The expression this result column represents</li> * </ul> */ public void init(Object arg1, Object arg2) throws StandardException { // RESOLVE: This is something of a hack - it is not obvious that // the first argument being null means it should be treated as // a String. if ((arg1 instanceof String) || (arg1 == null)) { this.name = (String)arg1; this.exposedName = this.name; if(null==arg1 || ((String)arg1).equals("")){ this.havingAlias = false; }else{ this.havingAlias = true; } setExpression((ValueNode)arg2); } else if (arg1 instanceof ColumnReference) { ColumnReference ref = (ColumnReference)arg1; this.name = ref.getColumnName(); this.exposedName = ref.getColumnName(); /* when we bind, we'll want to make sure the reference has the right table name. */ this.reference = ref; setExpression((ValueNode)arg2); } else { setType((DataTypeDescriptor)arg1); setExpression((ValueNode)arg2); if (arg2 instanceof ColumnReference) { reference = (ColumnReference)arg2; } } /* this result column represents a <default> keyword in an insert or * update statement */ if (expression != null && expression.isInstanceOf(NodeTypes.DEFAULT_NODE)) defaultColumn = true; } /** * Fill this node with a deep copy of the given node. */ public void copyFrom(QueryTreeNode node) throws StandardException { super.copyFrom(node); ResultColumn other = (ResultColumn)node; this.name = other.name; this.exposedName = other.exposedName; this.tableName = other.tableName; this.expression = (ValueNode)getNodeFactory().copyNode(other.expression, getParserContext()); this.defaultColumn = other.defaultColumn; this.autoincrement = other.autoincrement; this.reference = (ColumnReference)getNodeFactory().copyNode(other.reference, getParserContext()); this.virtualColumnId = other.virtualColumnId; this.isNameGenerated = other.isNameGenerated; } /** * Returns TRUE if the ResultColumn is standing in for a DEFAULT keyword in * an insert/update statement. */ public boolean isDefaultColumn() { return defaultColumn; } public void setDefaultColumn(boolean value) { defaultColumn = value; } /** * Return TRUE if this result column matches the provided column name. * * This function is used by ORDER BY column resolution. For the * ORDER BY clause, Derby will prefer to match on the column's * alias (exposedName), but will also successfully match on the * underlying column name. Thus the following statements are * treated equally: * select name from person order by name; * select name as person_name from person order by name; * select name as person_name from person order by person_name; * See DERBY-2351 for more discussion. */ boolean columnNameMatches(String columnName) { return columnName.equals(exposedName) || columnName.equals(name) || columnName.equals(getSourceColumnName()); } /** * Returns the underlying source column name, if this ResultColumn * is a simple direct reference to a table column, or NULL otherwise. */ String getSourceColumnName() { if (expression instanceof ColumnReference) return ((ColumnReference)expression).getColumnName(); return null; } /** * The following methods implement the ResultColumnDescriptor * interface. See the Language Module Interface for details. */ public String getName() { return exposedName; } public String getSchemaName() throws StandardException { if (expression != null) return expression.getSchemaName(); else return null; } public String getTableName() { if (tableName != null) { return tableName; } else if (expression != null) return expression.getTableName(); else return null; } public int getColumnPosition() { return virtualColumnId; } /** * Set the expression in this ResultColumn. This is useful in those * cases where you don't know the expression in advance, like for * INSERT statements with column lists, where the column list and * SELECT or VALUES clause are parsed separately, and then have to * be hooked up. * * @param expression The expression to be set in this ResultColumn */ public void setExpression(ValueNode expression) { this.expression = expression; } /** * Get the expression in this ResultColumn. * * @return ValueNode this.expression */ public ValueNode getExpression() { return expression; } /** * Set the expression to a null node of the * correct type. * * @exception StandardException Thrown on error */ void setExpressionToNullNode() throws StandardException { setExpression(getNullNode(getType())); } /** * Set the name in this ResultColumn. This is useful when you don't * know the name at the time you create the ResultColumn, for example, * in an insert-select statement, where you want the names of the * result columns to match the table being inserted into, not the * table they came from. * * @param name The name to set in this ResultColumn */ public void setName(String name) { if (this.name == null) { this.name = name; this.havingAlias = false; } else { this.havingAlias = true; assert (reference == null || name.equals(reference.getColumnName())) : "don't change name from reference name"; } this.exposedName = name; } /** * Is the name for this ResultColumn generated? */ public boolean isNameGenerated() { return isNameGenerated; } /** * Set that this result column name is generated. */ public void setNameGenerated(boolean value) { isNameGenerated = value; } /** * Adjust the virtualColumnId for this ResultColumn by the specified amount * * @param adjust The adjustment for the virtualColumnId */ public void adjustVirtualColumnId(int adjust) { virtualColumnId += adjust; } /** * Set the virtualColumnId for this ResultColumn * * @param id The virtualColumnId for this ResultColumn */ public void setVirtualColumnId(int id) { virtualColumnId = id; } /** * Get the virtualColumnId for this ResultColumn * * @return virtualColumnId for this ResultColumn */ public int getVirtualColumnId() { return virtualColumnId; } /** * Convert this object to a String. See comments in QueryTreeNode.java * for how this should be done for tree printing. * * @return This object as a String */ public String toString() { return "exposedName: " + exposedName + "\n" + "name: " + name + "\n" + "tableName: " + tableName + "\n" + "isDefaultColumn: " + defaultColumn + "\n" + super.toString(); } /** * Prints the sub-nodes of this object. See QueryTreeNode.java for * how tree printing is supposed to work. * * @param depth The depth of this node in the tree */ public void printSubNodes(int depth) { super.printSubNodes(depth); if (expression != null) { printLabel(depth, "expression: "); expression.treePrint(depth + 1); } if (reference != null) { printLabel(depth, "reference: "); reference.treePrint(depth + 1); } } /** * Accept the visitor for all visitable children of this node. * * @param v the visitor * * @exception StandardException on error */ void acceptChildren(Visitor v) throws StandardException { super.acceptChildren(v); if (expression != null) { setExpression((ValueNode)expression.accept(v)); } if (reference != null) { reference = (ColumnReference)reference.accept(v); } } public TableName getTableNameObject() { return null; } /* Get the wrapped reference if any */ public ColumnReference getReference() { return reference; } public boolean isEquivalent(ValueNode o) throws StandardException { if (o.getNodeType() == getNodeType()) { ResultColumn other = (ResultColumn)o; if (expression != null) { return expression.isEquivalent(other.expression); } } return false; } }
/* * Copyright 2012-2014 Dan Cioca * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dci.intellij.dbn.ddl; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.swing.JList; import javax.swing.ListSelectionModel; import org.jdom.Element; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import com.dci.intellij.dbn.common.AbstractProjectComponent; import com.dci.intellij.dbn.common.Constants; import com.dci.intellij.dbn.common.thread.WriteActionRunner; import com.dci.intellij.dbn.common.ui.ListUtil; import com.dci.intellij.dbn.common.util.MessageUtil; import com.dci.intellij.dbn.common.util.VirtualFileUtil; import com.dci.intellij.dbn.connection.ConnectionHandler; import com.dci.intellij.dbn.connection.ConnectionManager; import com.dci.intellij.dbn.ddl.ui.AttachDDLFileDialog; import com.dci.intellij.dbn.ddl.ui.DDLFileNameListCellRenderer; import com.dci.intellij.dbn.ddl.ui.DetachDDLFileDialog; import com.dci.intellij.dbn.object.DBSchema; import com.dci.intellij.dbn.object.common.DBSchemaObject; import com.dci.intellij.dbn.vfs.DatabaseEditableObjectFile; import com.dci.intellij.dbn.vfs.DatabaseFileSystem; import com.intellij.openapi.fileChooser.FileChooser; import com.intellij.openapi.fileChooser.FileChooserDescriptor; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ModuleRootManager; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.ui.SelectFromListDialog; import com.intellij.openapi.util.InvalidDataException; import com.intellij.openapi.util.JDOMExternalizable; import com.intellij.openapi.util.WriteExternalException; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileCopyEvent; import com.intellij.openapi.vfs.VirtualFileEvent; import com.intellij.openapi.vfs.VirtualFileListener; import com.intellij.openapi.vfs.VirtualFileManager; import com.intellij.openapi.vfs.VirtualFileMoveEvent; import com.intellij.openapi.vfs.VirtualFilePropertyEvent; public class DDLFileAttachmentManager extends AbstractProjectComponent implements VirtualFileListener, JDOMExternalizable { private Map<String, String> mappings = new HashMap<String, String>(); private Map<VirtualFile, DBSchemaObject> cache = new HashMap<VirtualFile, DBSchemaObject>(); private DDLFileAttachmentManager(Project project) { super(project); VirtualFileManager.getInstance().addVirtualFileListener(this); } @Nullable public List<VirtualFile> getBoundDDLFiles(DBSchemaObject object) { List<String> filePaths = getBoundFilePaths(object); List<VirtualFile> virtualFiles = null; if (filePaths.size() > 0) { for (String filePath : filePaths) { VirtualFile virtualFile = LocalFileSystem.getInstance().findFileByPath(filePath); if (virtualFile == null || !virtualFile.isValid()) { mappings.remove(filePath); } else { if (virtualFiles == null) virtualFiles = new ArrayList<VirtualFile>(); virtualFiles.add(virtualFile); } } } checkInvalidBoundFiles(virtualFiles, object); return virtualFiles; } @Nullable public DBSchemaObject getEditableObject(VirtualFile ddlFile) { if (cache.containsKey(ddlFile)) { return cache.get(ddlFile); } String objectPath = mappings.get(ddlFile.getPath()); if (objectPath != null) { int startIndex = 1; int endIndex = objectPath.indexOf(']'); String connectionId = objectPath.substring(startIndex, endIndex); ConnectionManager connectionManager = ConnectionManager.getInstance(getProject()); ConnectionHandler connectionHandler = connectionManager.getConnectionHandler(connectionId); if (connectionHandler != null) { startIndex = endIndex + 1; endIndex = objectPath.indexOf('.', startIndex); String schemaName = objectPath.substring(startIndex, endIndex); DBSchema schema = connectionHandler.getObjectBundle().getSchema(schemaName); if (schema != null) { startIndex = endIndex + 1; endIndex = objectPath.length(); String objectName = objectPath.substring(startIndex, endIndex); DBSchemaObject object = (DBSchemaObject) schema.getChildObject(objectName, false); cache.put(ddlFile, object); return object; } } } cache.put(ddlFile, null); return null; } public boolean hasBoundDDLFiles(DBSchemaObject object) { String objectPath = object.getQualifiedNameWithConnectionId(); for (String filePath : mappings.keySet()) { String path = mappings.get(filePath); if (path.equals(objectPath)) { return true; } } return false; } private void checkInvalidBoundFiles(List<VirtualFile> virtualFiles, DBSchemaObject object) { if (virtualFiles != null && virtualFiles.size() > 0) { List<VirtualFile> obsolete = null; for (VirtualFile virtualFile : virtualFiles) { if (!virtualFile.isValid() || !isValidDDLFile(virtualFile, object)) { if (obsolete == null) obsolete = new ArrayList<VirtualFile>(); obsolete.add(virtualFile); } } if (obsolete != null) { virtualFiles.removeAll(obsolete); for (VirtualFile virtualFile : obsolete) { detachDDLFile(virtualFile); } } } } private boolean isValidDDLFile(VirtualFile virtualFile, DBSchemaObject object) { for (DDLFileType ddlFileType : object.getDDLFileTypes()) { if (ddlFileType.getExtensions().contains(virtualFile.getExtension())) { return true; } } return false; } public int showFileAttachDialog(DBSchemaObject object, List<VirtualFile> virtualFiles) { AttachDDLFileDialog dialog = new AttachDDLFileDialog(virtualFiles, object); dialog.show(); return dialog.getExitCode(); } public int showFileDetachDialog(DBSchemaObject object, List<VirtualFile> virtualFiles) { DetachDDLFileDialog dialog = new DetachDDLFileDialog(virtualFiles, object); dialog.show(); return dialog.getExitCode(); } public void bindDDLFile(DBSchemaObject object, VirtualFile virtualFile) { cache.put(virtualFile, object); mappings.put(virtualFile.getPath(), object.getQualifiedNameWithConnectionId()); } public void detachDDLFile(VirtualFile virtualFile) { cache.remove(virtualFile); mappings.remove(virtualFile.getPath()); } private List<VirtualFile> lookupApplicableDDLFiles(DBSchemaObject object) { Module module = object.getConnectionHandler().getModule(); Project project = object.getConnectionHandler().getProject(); List<VirtualFile> fileList = new ArrayList<VirtualFile>(); for (DDLFileType ddlFileType : object.getDDLFileTypes()) { for (String extension : ddlFileType.getExtensions()) { String fileName = object.getName().toLowerCase() + "." + extension; if (module == null) { VirtualFile[] files = VirtualFileUtil.lookupFilesForName(project, fileName); fileList.addAll(Arrays.asList(files)); } else { VirtualFile[] files = VirtualFileUtil.lookupFilesForName(module, fileName); fileList.addAll(Arrays.asList(files)); } } } return fileList; } public List<VirtualFile> lookupUnboundDDLFiles(DBSchemaObject object) { List<String> filePaths = getBoundFilePaths(object); List<VirtualFile> virtualFiles = lookupApplicableDDLFiles(object); List<VirtualFile> unboundVirtualFiles = new ArrayList<VirtualFile>(); for (VirtualFile virtualFile : virtualFiles) { if (!filePaths.contains(virtualFile.getPath())) { unboundVirtualFiles.add(virtualFile); } } return unboundVirtualFiles; } public void createDDLFile(final DBSchemaObject object) { ConnectionHandler connectionHandler = object.getConnectionHandler(); final Project project = object.getProject(); FileChooserDescriptor descriptor = new FileChooserDescriptor(false, true, false, false, false, false); descriptor.setTitle("Select new ddl-file location"); VirtualFile[] contentRoots; Module module = connectionHandler.getModule(); if (module == null) { ProjectRootManager rootManager = ProjectRootManager.getInstance(project); contentRoots = rootManager.getContentRoots(); } else { ModuleRootManager rootManager = ModuleRootManager.getInstance(module); contentRoots = rootManager.getContentRoots(); } descriptor.setIsTreeRootVisible(contentRoots.length == 1); descriptor.setRoots(contentRoots); DDLFileNameProvider fileNameProvider = getDDLFileNameProvider(object); if (fileNameProvider != null) { VirtualFile[] selectedDirectories = FileChooser.chooseFiles(descriptor, project, null); if (selectedDirectories.length > 0) { final String fileName = fileNameProvider.getFileName(); final VirtualFile parentDirectory = selectedDirectories[0]; new WriteActionRunner() { @Override public void run() { try { VirtualFile virtualFile = parentDirectory.createChildData(this, fileName); bindDDLFile(object, virtualFile); DatabaseEditableObjectFile databaseFile = object.getVirtualFile(); databaseFile.updateDDLFiles(); DatabaseFileSystem.getInstance().reopenEditor(object); } catch (IOException e) { MessageUtil.showErrorDialog("Could not create file " + parentDirectory + File.separator + fileName + ".", e); } } }.start(); } } } public void bindDDLFiles(DBSchemaObject object) { Project project = object.getProject(); List<VirtualFile> virtualFiles = lookupUnboundDDLFiles(object); if (virtualFiles.size() == 0) { Module module = object.getConnectionHandler().getModule(); List<String> boundFiles = getBoundFilePaths(object); StringBuilder message = new StringBuilder(); message.append(boundFiles.size() == 0 ? "No DDL Files were found in " : "No additional DDL Files were found in "); if (module == null) { message.append("project scope."); } else { message.append("scope of module\""); message.append(module.getName()); message.append("\"."); } if (boundFiles.size() > 0) { message.append("\n\nFollowing files are already bound to "); message.append(object.getQualifiedNameWithType()); message.append(":"); for (String boundFile : boundFiles) { message.append("\n"); message.append(boundFile); } } String[] options = {"OK", "Create new..."}; int optionIndex = Messages.showDialog(project, message.toString(), Constants.DBN_TITLE_PREFIX + "No DDL Files found", options, 0, Messages.getInformationIcon() ); if (optionIndex == 1) { createDDLFile(object); } } else { int exitCode = showFileAttachDialog(object, virtualFiles); if (exitCode != DialogWrapper.CANCEL_EXIT_CODE) { DatabaseFileSystem.getInstance().reopenEditor(object); } } } public void detachDDLFiles(DBSchemaObject object) { List<VirtualFile> virtualFiles = getBoundDDLFiles(object); int exitCode = showFileDetachDialog(object, virtualFiles); if (exitCode != DialogWrapper.CANCEL_EXIT_CODE) { DatabaseFileSystem.getInstance().reopenEditor(object); } } private DDLFileNameProvider getDDLFileNameProvider(DBSchemaObject object) { DDLFileType[] ddlFileTypes = object.getDDLFileTypes(); if (ddlFileTypes.length == 1 && ddlFileTypes[0].getExtensions().size() == 1) { DDLFileType ddlFileType = ddlFileTypes[0]; return new DDLFileNameProvider(object, ddlFileType, ddlFileType.getExtensions().get(0)); } else { List<DDLFileNameProvider> fileNameProviders = new ArrayList<DDLFileNameProvider>(); for (DDLFileType ddlFileType : ddlFileTypes) { for (String extension : ddlFileType.getExtensions()) { DDLFileNameProvider fileNameProvider = new DDLFileNameProvider(object, ddlFileType, extension); fileNameProviders.add(fileNameProvider); } } SelectFromListDialog fileTypeDialog = new SelectFromListDialog( object.getProject(), fileNameProviders.toArray(), ListUtil.BASIC_TO_STRING_ASPECT, "Select DDL file type", ListSelectionModel.SINGLE_SELECTION); JList list = (JList) fileTypeDialog.getPreferredFocusedComponent(); list.setCellRenderer(new DDLFileNameListCellRenderer()); fileTypeDialog.show(); Object[] selectedFileTypes = fileTypeDialog.getSelection(); if (selectedFileTypes != null) { return (DDLFileNameProvider) selectedFileTypes[0]; } } return null; } private List<String> getBoundFilePaths(DBSchemaObject object) { String objectPath = object.getQualifiedNameWithConnectionId(); List<String> filePaths = new ArrayList<String>(); for (String filePath : mappings.keySet()) { String path = mappings.get(filePath); if (path.equals(objectPath)) { filePaths.add(filePath); } } return filePaths; } private String getObjectPath(String filePath) { return mappings.get(filePath); } /*************************************** * ProjectComponent * ***************************************/ public static DDLFileAttachmentManager getInstance(Project project) { return project.getComponent(DDLFileAttachmentManager.class); } @NonNls @NotNull public String getComponentName() { return "DBNavigator.Project.DDLFileAttachmentManager"; } public void disposeComponent() { mappings.clear(); cache.clear(); super.disposeComponent(); } /************************************************ * JDOMExternalizable * ************************************************/ @Override public void propertyChanged(VirtualFilePropertyEvent event) { } @Override public void contentsChanged(VirtualFileEvent event) { } @Override public void fileCreated(VirtualFileEvent event) { } @Override public void fileDeleted(VirtualFileEvent event) { DBSchemaObject object = cache.get(event.getFile()); if (object != null) { detachDDLFile(event.getFile()); DatabaseFileSystem.getInstance().reopenEditor(object); } } @Override public void fileMoved(VirtualFileMoveEvent event) { } @Override public void fileCopied(VirtualFileCopyEvent event) { } @Override public void beforePropertyChange(VirtualFilePropertyEvent event) { } @Override public void beforeContentsChange(VirtualFileEvent event) { } @Override public void beforeFileDeletion(VirtualFileEvent event) { } @Override public void beforeFileMovement(VirtualFileMoveEvent event) { } /************************************************ * JDOMExternalizable * ************************************************/ public void readExternal(Element element) throws InvalidDataException { for (Object child : element.getChildren()) { Element childElement = (Element) child; String file = childElement.getAttributeValue("file"); String object = childElement.getAttributeValue("object"); mappings.put(file, object); } } public void writeExternal(Element element) throws WriteExternalException { for (String file : mappings.keySet()) { Element childElement = new Element("mapping"); childElement.setAttribute("file", file); childElement.setAttribute("object", mappings.get(file)); element.addContent(childElement); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.end2end; import org.apache.phoenix.thirdparty.com.google.common.collect.Lists; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.util.Bytes; import org.apache.phoenix.query.QueryConstants; import org.apache.phoenix.schema.PTable.ImmutableStorageScheme; import org.apache.phoenix.schema.PTable.QualifierEncodingScheme; import org.apache.phoenix.util.EncodedColumnsUtil; import org.apache.phoenix.util.EnvironmentEdgeManager; import org.junit.Assert; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.sql.Connection; import java.sql.Date; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.Statement; import java.util.Collection; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @Category(ParallelStatsDisabledTest.class) @RunWith(Parameterized.class) public class PhoenixRowTimestampFunctionIT extends ParallelStatsDisabledIT { private final boolean encoded; private final boolean optimized; private final String tableDDLOptions; private static final int NUM_ROWS = 5; private static final long TS_OFFSET = 120000; public PhoenixRowTimestampFunctionIT(QualifierEncodingScheme encoding, ImmutableStorageScheme storage) { StringBuilder optionBuilder = new StringBuilder(); this.optimized = storage == ImmutableStorageScheme.SINGLE_CELL_ARRAY_WITH_OFFSETS ? true : false; // We cannot have non encoded column names if the storage type is single cell this.encoded = (encoding != QualifierEncodingScheme.NON_ENCODED_QUALIFIERS) ? true : (this.optimized) ? true : false; if (this.optimized && encoding == QualifierEncodingScheme.NON_ENCODED_QUALIFIERS) { optionBuilder.append(" COLUMN_ENCODED_BYTES = " + QualifierEncodingScheme.ONE_BYTE_QUALIFIERS.ordinal()); } else { optionBuilder.append(" COLUMN_ENCODED_BYTES = " + encoding.ordinal()); } optionBuilder.append(", IMMUTABLE_STORAGE_SCHEME = "+ storage.toString()); this.tableDDLOptions = optionBuilder.toString(); } @Parameterized.Parameters(name = "encoding={0},storage={1}") public static synchronized Collection<Object[]> data() { List<Object[]> list = Lists.newArrayList(); for (QualifierEncodingScheme encoding : QualifierEncodingScheme.values()) { for (ImmutableStorageScheme storage : ImmutableStorageScheme.values()) { list.add(new Object[]{encoding, storage}); } } return list; } private void verifyHbaseAllRowsTimestamp(String tableName, ResultSet rs, int expectedRowCount) throws Exception { Scan scan = new Scan(); byte[] emptyKVQualifier = EncodedColumnsUtil.getEmptyKeyValueInfo(this.encoded).getFirst(); try (org.apache.hadoop.hbase.client.Connection hconn = ConnectionFactory.createConnection(config)) { Table table = hconn.getTable(TableName.valueOf(tableName)); ResultScanner resultScanner = table.getScanner(scan); int rowCount = 0; while (rs.next()) { Result result = resultScanner.next(); long timeStamp = result.getColumnLatestCell( QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, emptyKVQualifier).getTimestamp(); assertEquals(rs.getDate(1).getTime(), timeStamp); ++rowCount; } assertEquals(expectedRowCount, rowCount); } } private void verifyHbaseRowTimestamp(String tableName, String rowKey, Date expectedTimestamp) throws Exception { byte[] emptyKVQualifier = EncodedColumnsUtil.getEmptyKeyValueInfo(this.encoded).getFirst(); try (org.apache.hadoop.hbase.client.Connection hconn = ConnectionFactory.createConnection(config)) { Table table = hconn.getTable(TableName.valueOf(tableName)); Get get = new Get(Bytes.toBytesBinary(rowKey)); Result result = table.get(get); assertFalse(result.isEmpty()); long timeStamp = result.getColumnLatestCell( QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, emptyKVQualifier).getTimestamp(); assertEquals(expectedTimestamp.getTime(), timeStamp); } } private String createTestData(long rowTimestamp, int numRows) throws Exception { String tableName = generateUniqueName(); try (Connection conn = DriverManager.getConnection(getUrl())) { // Create a test table. try (Statement stmt = conn.createStatement()) { String ddl = "CREATE TABLE IF NOT EXISTS " + tableName + " (PK1 INTEGER NOT NULL, PK2 DATE NOT NULL, KV1 VARCHAR, KV2 VARCHAR" + " CONSTRAINT PK PRIMARY KEY(PK1, PK2))" + this.tableDDLOptions; stmt.execute(ddl); } // Upsert data into the test table. String dml = "UPSERT INTO " + tableName + " (PK1, PK2, KV1, KV2) VALUES (?, ?, ?, ?)"; try (PreparedStatement stmt = conn.prepareStatement(dml)) { Date rowTimestampDate = new Date(rowTimestamp); int count = numRows; for (int id = 0; id < count; ++id) { int idValue = id; stmt.setInt(1, idValue); stmt.setDate(2, rowTimestampDate); stmt.setString(3, "KV1_" + idValue); stmt.setString(4, "KV2_" + idValue); stmt.executeUpdate(); } } conn.commit(); } return tableName; } @Test public void testRowTimestampDefault() throws Exception { if (encoded || optimized) return; String tableName = generateUniqueName(); try (Connection conn = DriverManager.getConnection(getUrl())) { String ddl = "CREATE TABLE IF NOT EXISTS " + tableName + " (PK INTEGER NOT NULL PRIMARY KEY, KV1 VARCHAR, KV2 VARCHAR)" + this.tableDDLOptions; conn.createStatement().execute(ddl); String dml = "UPSERT INTO " + tableName + " (PK, KV1, KV2) VALUES (?, ?, ?)"; try (PreparedStatement stmt = conn.prepareStatement(dml)) { int count = NUM_ROWS; for (int id = 0; id < count; ++id) { stmt.setInt(1, id); stmt.setString(2, "KV1_" + id); stmt.setString(3, "KV2_" + id); stmt.executeUpdate(); } } finally { conn.commit(); } // verify row timestamp returned by the query matches the empty column cell timestamp String dql = "SELECT PHOENIX_ROW_TIMESTAMP() FROM " + tableName; try (Statement stmt = conn.createStatement()) { ResultSet rs = stmt.executeQuery(dql); verifyHbaseAllRowsTimestamp(tableName, rs, NUM_ROWS); } // update one row try (Statement stmt = conn.createStatement()) { stmt.execute("UPSERT INTO " + tableName + " (PK, KV1) VALUES (2, 'KV1_foo')"); } finally { conn.commit(); } // verify again after update // verify row timestamp returned by the query matches the empty column cell timestamp try (Statement stmt = conn.createStatement()) { ResultSet rs = stmt.executeQuery(dql); verifyHbaseAllRowsTimestamp(tableName, rs, NUM_ROWS); } dql = "SELECT ROWKEY_BYTES_STRING(), PHOENIX_ROW_TIMESTAMP() FROM " + tableName + " WHERE PK >= 1 AND PK <=3 "; try (Statement stmt = conn.createStatement()) { ResultSet rs = stmt.executeQuery(dql); while (rs.next()) { verifyHbaseRowTimestamp(tableName, rs.getString(1), rs.getDate(2)); } } } } @Test public void testRowTimestampColumn() throws Exception { String tableName = generateUniqueName(); try (Connection conn = DriverManager.getConnection(getUrl())) { String ddl = "CREATE TABLE IF NOT EXISTS " + tableName + " (PK1 INTEGER NOT NULL, PK2 DATE NOT NULL, KV1 VARCHAR, KV2 VARCHAR" + " CONSTRAINT PK PRIMARY KEY(PK1, PK2 ROW_TIMESTAMP))" + this.tableDDLOptions; conn.createStatement().execute(ddl); String dml = "UPSERT INTO " + tableName + " (PK1, PK2, KV1, KV2) VALUES (?, ?, ?, ?)"; long rowTimestamp = EnvironmentEdgeManager.currentTimeMillis(); Date rowTimestampDate = new Date(rowTimestamp); try (PreparedStatement stmt = conn.prepareStatement(dml)) { int count = NUM_ROWS; for (int id = 0; id < count; ++id) { stmt.setInt(1, id); stmt.setDate(2, rowTimestampDate); stmt.setString(3, "KV1_" + id); stmt.setString(4, "KV2_" + id); stmt.executeUpdate(); } } finally { conn.commit(); } String dql = "SELECT PHOENIX_ROW_TIMESTAMP() FROM " + tableName; try (Statement stmt = conn.createStatement()) { ResultSet rs = stmt.executeQuery(dql); while (rs.next()) { assertEquals(rs.getDate(1), rowTimestampDate); } } } } @Test // case: No rows should have the phoenix_row_timestamp() = date column // Since we used a future date for column PK2 public void testRowTimestampFunctionAndEqualPredicate() throws Exception { long rowTimestamp = EnvironmentEdgeManager.currentTimeMillis() + TS_OFFSET; String tableName = createTestData(rowTimestamp, NUM_ROWS); // With phoenix_row_timestamp function only in projection try (Connection conn = DriverManager.getConnection(getUrl())) { String sql = "SELECT PHOENIX_ROW_TIMESTAMP() FROM " + tableName + " WHERE PHOENIX_ROW_TIMESTAMP() = PK2 "; try (Statement stmt = conn.createStatement()) { ResultSet rs = stmt.executeQuery(sql); assertFalse(rs.next()); rs.close(); } } // With phoenix_row_timestamp function and additional columns in projection try (Connection conn = DriverManager.getConnection(getUrl())) { String sql = "SELECT PHOENIX_ROW_TIMESTAMP(), KV1 FROM " + tableName + " WHERE PHOENIX_ROW_TIMESTAMP() = PK2 "; try (Statement stmt = conn.createStatement()) { ResultSet rs = stmt.executeQuery(sql); assertFalse(rs.next()); rs.close(); } } } @Test // case: All rows selected should have the phoenix_row_timestamp() < date column // Since we used a future date for column PK2 public void testRowTimestampFunctionOnlyWithLessThanPredicate() throws Exception { long rowTimestamp = EnvironmentEdgeManager.currentTimeMillis() + TS_OFFSET; String tableName = createTestData(rowTimestamp, NUM_ROWS); try (Connection conn = DriverManager.getConnection(getUrl())) { String sql = "SELECT PHOENIX_ROW_TIMESTAMP() FROM " + tableName + " WHERE PHOENIX_ROW_TIMESTAMP() < PK2 "; try (Statement stmt = conn.createStatement()) { ResultSet rs = stmt.executeQuery(sql); int actualCount = 0; while(rs.next()) { assertTrue(rs.getDate(1).before(new Date(rowTimestamp))); actualCount++; } assertEquals(NUM_ROWS, actualCount); rs.close(); } } } @Test // case: All rows selected should have the phoenix_row_timestamp() < date column // Since we used a future date for column PK2 // Additional columns should return non null values. public void testRowTimestampFunctionAndAdditionalColsWithLessThanPredicate() throws Exception { long rowTimestamp = EnvironmentEdgeManager.currentTimeMillis() + TS_OFFSET; String tableName = createTestData(rowTimestamp, NUM_ROWS); try (Connection conn = DriverManager.getConnection(getUrl())) { String sql = "SELECT PHOENIX_ROW_TIMESTAMP(), KV2 FROM " + tableName + " WHERE PHOENIX_ROW_TIMESTAMP() < PK2 "; try (Statement stmt = conn.createStatement()) { ResultSet rs = stmt.executeQuery(sql); int actualCount = 0; while(rs.next()) { assertTrue(rs.getDate(1).before(new Date(rowTimestamp))); rs.getString(2); assertFalse(rs.wasNull()); actualCount++; } assertEquals(NUM_ROWS, actualCount); rs.close(); } } } @Test // case: All rows selected should have the phoenix_row_timestamp() > date column // Since we used a past date for column PK2 public void testRowTimestampFunctionOnlyWithGreaterThanPredicate() throws Exception { long rowTimestamp = EnvironmentEdgeManager.currentTimeMillis() - TS_OFFSET; String tableName = createTestData(rowTimestamp, NUM_ROWS); try (Connection conn = DriverManager.getConnection(getUrl())) { String sql = "SELECT PHOENIX_ROW_TIMESTAMP() FROM " + tableName + " WHERE PHOENIX_ROW_TIMESTAMP() > PK2 "; try (Statement stmt = conn.createStatement()) { ResultSet rs = stmt.executeQuery(sql); int actualCount = 0; while(rs.next()) { assertTrue(rs.getDate(1).after(new Date(rowTimestamp))); actualCount++; } assertEquals(NUM_ROWS, actualCount); rs.close(); } } } @Test // case: All rows selected should have the phoenix_row_timestamp() > date column // Since we used a past date for column PK2 // Additional columns should return non null values. public void testRowTimestampFunctionAndColsWithGreaterThanPredicate() throws Exception { long rowTimestamp = EnvironmentEdgeManager.currentTimeMillis() - TS_OFFSET; String tableName = createTestData(rowTimestamp, NUM_ROWS); try (Connection conn = DriverManager.getConnection(getUrl())) { String sql = "SELECT PHOENIX_ROW_TIMESTAMP(), KV1 FROM " + tableName + " WHERE PHOENIX_ROW_TIMESTAMP() > PK2 "; try (Statement stmt = conn.createStatement()) { ResultSet rs = stmt.executeQuery(sql); int actualCount = 0; while(rs.next()) { assertTrue(rs.getDate(1).after(new Date(rowTimestamp))); rs.getString(2); assertFalse(rs.wasNull()); actualCount++; } assertEquals(NUM_ROWS, actualCount); rs.close(); } } } @Test // case: All rows selected should have the phoenix_row_timestamp() > date column // Since we used a past date for column PK2 // Projected columns should return non null and expected values. public void testSimpleSelectColsWithPhoenixRowTimestampPredicate() throws Exception { long rowTimestamp = EnvironmentEdgeManager.currentTimeMillis() - TS_OFFSET; String tableName = createTestData(rowTimestamp, NUM_ROWS); try (Connection conn = DriverManager.getConnection(getUrl())) { String sql = "SELECT KV1 FROM " + tableName + " WHERE PHOENIX_ROW_TIMESTAMP() > PK2 "; try (Statement stmt = conn.createStatement()) { ResultSet rs = stmt.executeQuery(sql); int actualCount = 0; while(rs.next()) { String kv1Value = rs.getString(1); assertFalse(rs.wasNull()); assertTrue(kv1Value.substring(0, "KV2_".length()) .compareToIgnoreCase("KV1_") == 0); actualCount++; } assertEquals(NUM_ROWS, actualCount); rs.close(); } } } @Test // case: Aggregate SQLs work with PhoenixRowTimestamp predicate. public void testSelectCountWithPhoenixRowTimestampPredicate() throws Exception { long rowTimestamp = EnvironmentEdgeManager.currentTimeMillis() - TS_OFFSET; String tableName = createTestData(rowTimestamp, NUM_ROWS); try (Connection conn = DriverManager.getConnection(getUrl())) { String sql = "SELECT COUNT(*) FROM " + tableName + " WHERE PHOENIX_ROW_TIMESTAMP() > PK2 "; try (Statement stmt = conn.createStatement()) { ResultSet rs = stmt.executeQuery(sql); while(rs.next()) { int rowCount = rs.getInt(1); assertFalse(rs.wasNull()); assertTrue(rowCount == NUM_ROWS); } rs.close(); } } } @Test // case: Select with non primary keys in where clause. public void testSelectWithMultiplePredicates() throws Exception { long rowTimestamp = EnvironmentEdgeManager.currentTimeMillis() - TS_OFFSET; String tableName = createTestData(rowTimestamp, NUM_ROWS); try (Connection conn = DriverManager.getConnection(getUrl())) { String sql = "SELECT COUNT(*) FROM " + tableName + " WHERE PHOENIX_ROW_TIMESTAMP() > PK2 AND KV1 = 'KV1_1'"; try (Statement stmt = conn.createStatement()) { ResultSet rs = stmt.executeQuery(sql); while(rs.next()) { int rowCount = rs.getInt(1); assertFalse(rs.wasNull()); assertTrue(rowCount == 1); } rs.close(); } } } @Test // case: Comparision with TO_TIME() public void testTimestampComparePredicate() throws Exception { long rowTimestamp = EnvironmentEdgeManager.currentTimeMillis() - TS_OFFSET; String tableName = createTestData(rowTimestamp, NUM_ROWS); try (Connection conn = DriverManager.getConnection(getUrl())) { try (Statement stmt = conn.createStatement()) { String sql = "SELECT COUNT(*) FROM " + tableName + " WHERE ((PHOENIX_ROW_TIMESTAMP() > PK2) AND " + " (PHOENIX_ROW_TIMESTAMP() > TO_TIME('2005-10-01 14:03:22.559')))"; ResultSet rs = stmt.executeQuery(sql); while(rs.next()) { int rowCount = rs.getInt(1); assertFalse(rs.wasNull()); assertTrue(rowCount == NUM_ROWS); } rs.close(); } try (Statement stmt = conn.createStatement()) { String sql = "SELECT COUNT(*) FROM " + tableName + " WHERE ((PHOENIX_ROW_TIMESTAMP() > PK2) AND " + " (PHOENIX_ROW_TIMESTAMP() < TO_TIME('2005-10-01 14:03:22.559')))"; ResultSet rs = stmt.executeQuery(sql); while(rs.next()) { int rowCount = rs.getInt(1); assertFalse(rs.wasNull()); assertTrue(rowCount == 0); } rs.close(); } } } @Test // case: PHOENIX_ROW_TIMESTAMP() in select clause when aggregating should fail. public void testPhoenixRowTimestampWhenAggShouldFail() throws Exception { // Do not need to run for all test combinations if (encoded || !optimized) { return; } long rowTimestamp = EnvironmentEdgeManager.currentTimeMillis() - TS_OFFSET; String tableName = createTestData(rowTimestamp, NUM_ROWS); try (Connection conn = DriverManager.getConnection(getUrl())) { try (Statement stmt = conn.createStatement()) { String sql = "SELECT PHOENIX_ROW_TIMESTAMP(), PK1, COUNT(*) FROM " + tableName + " WHERE ((PHOENIX_ROW_TIMESTAMP() > PK2) AND " + " (PHOENIX_ROW_TIMESTAMP() > TO_TIME('2005-10-01 14:03:22.559')))" + " GROUP BY PHOENIX_ROW_TIMESTAMP(), PK1"; try { ResultSet rs = stmt.executeQuery(sql); fail(); } catch (Exception e) { Assert.assertTrue(e.getMessage().contains("ERROR 1018 (42Y27")); } } } } @Test public void testPhoenixRowTimestampWithWildcard() throws Exception { try (Connection conn = DriverManager.getConnection(getUrl())) { String dataTableName = generateUniqueName(); conn.createStatement().execute("create table " + dataTableName + " (pk1 integer not null primary key, x.v1 float, y.v2 float, z.v3 float)" + this.tableDDLOptions); conn.createStatement().execute("upsert into " + dataTableName + " values(rand() * 100000000, rand(), rand(), rand())"); conn.commit(); ResultSet rs = conn.createStatement().executeQuery("SELECT v1 from " + dataTableName); assertTrue(rs.next()); float v1 = rs.getFloat(1); rs = conn.createStatement().executeQuery("SELECT * from " + dataTableName + " order by phoenix_row_timestamp()"); assertTrue(rs.next()); System.out.println(v1); assertTrue(v1 == rs.getFloat(2)); } } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package com.eas.client.forms.menu; import com.eas.client.forms.HasChildren; import com.eas.client.forms.HasComponentEvents; import com.eas.client.forms.HasJsName; import com.eas.client.forms.Widget; import com.eas.client.forms.events.ActionEvent; import com.eas.client.forms.events.ComponentEvent; import com.eas.client.forms.events.MouseEvent; import com.eas.client.forms.events.rt.ControlEventsIProxy; import com.eas.client.forms.layouts.MarginLayout; import com.eas.design.Undesignable; import com.eas.script.AlreadyPublishedException; import com.eas.script.EventMethod; import com.eas.script.HasPublished; import com.eas.script.NoPublisherException; import com.eas.script.ScriptFunction; import com.eas.script.Scripts; import java.awt.Color; import java.awt.Cursor; import java.awt.Font; import java.awt.event.FocusEvent; import java.awt.event.KeyEvent; import javax.swing.Icon; import javax.swing.JComponent; import javax.swing.JMenu; import javax.swing.JMenuItem; import javax.swing.JPopupMenu; import jdk.nashorn.api.scripting.JSObject; /** * * @author mg */ public class MenuItem extends JMenuItem implements HasPublished, HasComponentEvents, HasJsName, Widget { public MenuItem(String aText, Icon aIcon) { this(aText, aIcon, null); } private static final String CONSTRUCTOR_JSDOC = "" + "/**\n" + "* A menu item that can be selected or deselected.\n" + "* @param text the text of the component (optional).\n" + "* @param icon the icon of the component (optional).\n" + "* @param actionPerformed the function for the action performed handler (optional).\n" + "*/"; @ScriptFunction(jsDoc = CONSTRUCTOR_JSDOC, params = {"text", "icon", "actionPerformed"}) public MenuItem(String aText, Icon aIcon, JSObject aActionPerformedHandler) { super(aText, aIcon); setOnActionPerformed(aActionPerformedHandler); } public MenuItem(String aText) { this(aText, null); } public MenuItem() { this(null, null); } @ScriptFunction(jsDoc = JS_NAME_DOC) @Override public String getName() { return super.getName(); } @ScriptFunction @Override public void setName(String name) { super.setName(name); } @ScriptFunction(jsDoc = GET_NEXT_FOCUSABLE_COMPONENT_JSDOC) @Override public JComponent getNextFocusableComponent() { return (JComponent) super.getNextFocusableComponent(); } @ScriptFunction @Override public void setNextFocusableComponent(JComponent aValue) { super.setNextFocusableComponent(aValue); } protected String errorMessage; @ScriptFunction(jsDoc = ERROR_JSDOC) @Override public String getError() { return errorMessage; } @ScriptFunction @Override public void setError(String aValue) { errorMessage = aValue; } @ScriptFunction(jsDoc = BACKGROUND_JSDOC) @Override public Color getBackground() { return super.getBackground(); } @ScriptFunction @Override public void setBackground(Color aValue) { super.setBackground(aValue); } @ScriptFunction(jsDoc = FOREGROUND_JSDOC) @Override public Color getForeground() { return super.getForeground(); } @ScriptFunction @Override public void setForeground(Color aValue) { super.setForeground(aValue); } @ScriptFunction(jsDoc = VISIBLE_JSDOC) @Override public boolean getVisible() { return super.isVisible(); } @ScriptFunction @Override public void setVisible(boolean aValue) { super.setVisible(aValue); } @ScriptFunction(jsDoc = FOCUSABLE_JSDOC) @Override public boolean getFocusable() { return super.isFocusable(); } @ScriptFunction @Override public void setFocusable(boolean aValue) { super.setFocusable(aValue); } @ScriptFunction(jsDoc = ENABLED_JSDOC) @Override public boolean getEnabled() { return super.isEnabled(); } @ScriptFunction @Override public void setEnabled(boolean aValue) { super.setEnabled(aValue); } @ScriptFunction(jsDoc = TOOLTIP_TEXT_JSDOC) @Override public String getToolTipText() { return super.getToolTipText(); } @ScriptFunction @Override public void setToolTipText(String aValue) { super.setToolTipText(aValue); } @ScriptFunction(jsDoc = OPAQUE_TEXT_JSDOC) @Override public boolean getOpaque() { return super.isOpaque(); } @ScriptFunction @Override public void setOpaque(boolean aValue) { super.setOpaque(aValue); } @ScriptFunction(jsDoc = COMPONENT_POPUP_MENU_JSDOC) @Override public JPopupMenu getComponentPopupMenu() { return super.getComponentPopupMenu(); } @ScriptFunction @Override public void setComponentPopupMenu(JPopupMenu aMenu) { super.setComponentPopupMenu(aMenu); } @ScriptFunction(jsDoc = FONT_JSDOC) @Override public Font getFont() { return super.getFont(); } @ScriptFunction @Override public void setFont(Font aFont) { super.setFont(aFont); } @ScriptFunction(jsDoc = CURSOR_JSDOC) @Override public Cursor getCursor() { return super.getCursor(); } @ScriptFunction @Override public void setCursor(Cursor aCursor) { super.setCursor(aCursor); } @ScriptFunction(jsDoc = LEFT_JSDOC) @Override public int getLeft() { return super.getLocation().x; } @ScriptFunction @Override public void setLeft(int aValue) { if (super.getParent() != null && super.getParent().getLayout() instanceof MarginLayout) { MarginLayout.ajustLeft(this, aValue); } super.setLocation(aValue, getTop()); } @ScriptFunction(jsDoc = TOP_JSDOC) @Override public int getTop() { return super.getLocation().y; } @ScriptFunction @Override public void setTop(int aValue) { if (super.getParent() != null && super.getParent().getLayout() instanceof MarginLayout) { MarginLayout.ajustTop(this, aValue); } super.setLocation(getLeft(), aValue); } @ScriptFunction(jsDoc = WIDTH_JSDOC) @Override public int getWidth() { return super.getWidth(); } @ScriptFunction @Override public void setWidth(int aValue) { Widget.setWidth(this, aValue); } @ScriptFunction(jsDoc = HEIGHT_JSDOC) @Override public int getHeight() { return super.getHeight(); } @ScriptFunction @Override public void setHeight(int aValue) { Widget.setHeight(this, aValue); } @ScriptFunction(jsDoc = FOCUS_JSDOC) @Override public void focus() { super.requestFocus(); } @Override public String toString() { return String.format("%s [%s]", super.getName() != null ? super.getName() : "", getClass().getSimpleName()); } // Native API @ScriptFunction(jsDoc = NATIVE_COMPONENT_JSDOC) @Override public JComponent getComponent() { return this; } @ScriptFunction(jsDoc = NATIVE_ELEMENT_JSDOC) @Override public Object getElement() { return null; } @ScriptFunction(name = "parent", jsDoc = PARENT_JSDOC) @Override public Widget getParentWidget() { java.awt.Container parent = super.getParent(); if (parent instanceof JPopupMenu && ((JPopupMenu) parent).getInvoker() instanceof JMenu) { parent = (java.awt.Container) ((JPopupMenu) parent).getInvoker(); } return parent instanceof HasChildren ? (Widget) parent : null; } private static final String TEXT_JSDOC = "" + "/**\n" + "* The menu item text.\n" + "*/"; @ScriptFunction(jsDoc = TEXT_JSDOC) @Override public String getText() { return super.getText(); } @ScriptFunction @Override public void setText(String aValue) { super.setText(aValue); } private static final String ICON_JSDOC = "" + "/**\n" + "* Image picture for the menu item.\n" + "*/"; @ScriptFunction(jsDoc = ICON_JSDOC) @Override public Icon getIcon() { return super.getIcon(); } @ScriptFunction @Override public void setIcon(Icon aValue) { super.setIcon(aValue); } private static final String HORIZONTAL_TEXT_POSITION_JSDOC = "" + "/**\n" + "* Horizontal position of the text relative to the icon.\n" + "*/"; @ScriptFunction(jsDoc = HORIZONTAL_TEXT_POSITION_JSDOC) @Override public int getHorizontalTextPosition() { return super.getHorizontalTextPosition(); } @ScriptFunction @Override public void setHorizontalTextPosition(int aValue) { super.setHorizontalTextPosition(aValue); } private static final String VERTICAL_TEXT_POSITION_JSDOC = "" + "/**\n" + "* Vertical position of the text relative to the icon.\n" + "*/"; @ScriptFunction(jsDoc = VERTICAL_TEXT_POSITION_JSDOC) @Override public int getVerticalTextPosition() { return super.getVerticalTextPosition(); } @ScriptFunction @Override public void setVerticalTextPosition(int aValue) { super.setVerticalTextPosition(aValue); } protected JSObject published; @Override public JSObject getPublished() { if (published == null) { JSObject publisher = Scripts.getSpace().getPublisher(this.getClass().getName()); if (publisher == null || !publisher.isFunction()) { throw new NoPublisherException(); } published = (JSObject) publisher.call(null, new Object[]{this}); } return published; } @Override public void setPublished(JSObject aValue) { if (published != null) { throw new AlreadyPublishedException(); } published = aValue; } protected ControlEventsIProxy eventsProxy = new ControlEventsIProxy(this); @ScriptFunction(jsDoc = ON_MOUSE_CLICKED_JSDOC) @EventMethod(eventClass = MouseEvent.class) @Undesignable @Override public JSObject getOnMouseClicked() { return eventsProxy.getHandlers().get(ControlEventsIProxy.mouseClicked); } @ScriptFunction @Override public void setOnMouseClicked(JSObject aValue) { eventsProxy.getHandlers().put(ControlEventsIProxy.mouseClicked, aValue); } @ScriptFunction(jsDoc = ON_MOUSE_DRAGGED_JSDOC) @EventMethod(eventClass = MouseEvent.class) @Undesignable @Override public JSObject getOnMouseDragged() { return eventsProxy.getHandlers().get(ControlEventsIProxy.mouseDragged); } @ScriptFunction @Override public void setOnMouseDragged(JSObject aValue) { eventsProxy.getHandlers().put(ControlEventsIProxy.mouseDragged, aValue); } @ScriptFunction(jsDoc = ON_MOUSE_ENTERED_JSDOC) @EventMethod(eventClass = MouseEvent.class) @Undesignable @Override public JSObject getOnMouseEntered() { return eventsProxy.getHandlers().get(ControlEventsIProxy.mouseEntered); } @ScriptFunction @Override public void setOnMouseEntered(JSObject aValue) { eventsProxy.getHandlers().put(ControlEventsIProxy.mouseEntered, aValue); } @ScriptFunction(jsDoc = ON_MOUSE_EXITED_JSDOC) @EventMethod(eventClass = MouseEvent.class) @Undesignable @Override public JSObject getOnMouseExited() { return eventsProxy.getHandlers().get(ControlEventsIProxy.mouseExited); } @ScriptFunction @Override public void setOnMouseExited(JSObject aValue) { eventsProxy.getHandlers().put(ControlEventsIProxy.mouseExited, aValue); } @ScriptFunction(jsDoc = ON_MOUSE_MOVED_JSDOC) @EventMethod(eventClass = MouseEvent.class) @Undesignable @Override public JSObject getOnMouseMoved() { return eventsProxy.getHandlers().get(ControlEventsIProxy.mouseMoved); } @ScriptFunction @Override public void setOnMouseMoved(JSObject aValue) { eventsProxy.getHandlers().put(ControlEventsIProxy.mouseMoved, aValue); } @ScriptFunction(jsDoc = ON_MOUSE_PRESSED_JSDOC) @EventMethod(eventClass = MouseEvent.class) @Undesignable @Override public JSObject getOnMousePressed() { return eventsProxy.getHandlers().get(ControlEventsIProxy.mousePressed); } @ScriptFunction @Override public void setOnMousePressed(JSObject aValue) { eventsProxy.getHandlers().put(ControlEventsIProxy.mousePressed, aValue); } @ScriptFunction(jsDoc = ON_MOUSE_RELEASED_JSDOC) @EventMethod(eventClass = MouseEvent.class) @Undesignable @Override public JSObject getOnMouseReleased() { return eventsProxy.getHandlers().get(ControlEventsIProxy.mouseReleased); } @ScriptFunction @Override public void setOnMouseReleased(JSObject aValue) { eventsProxy.getHandlers().put(ControlEventsIProxy.mouseReleased, aValue); } @ScriptFunction(jsDoc = ON_MOUSE_WHEEL_MOVED_JSDOC) @EventMethod(eventClass = MouseEvent.class) @Undesignable @Override public JSObject getOnMouseWheelMoved() { return eventsProxy.getHandlers().get(ControlEventsIProxy.mouseWheelMoved); } @ScriptFunction @Override public void setOnMouseWheelMoved(JSObject aValue) { eventsProxy.getHandlers().put(ControlEventsIProxy.mouseWheelMoved, aValue); } @ScriptFunction(jsDoc = ON_ACTION_PERFORMED_JSDOC) @EventMethod(eventClass = ActionEvent.class) @Undesignable @Override public JSObject getOnActionPerformed() { return eventsProxy.getHandlers().get(ControlEventsIProxy.actionPerformed); } @ScriptFunction @Override public void setOnActionPerformed(JSObject aValue) { eventsProxy.getHandlers().put(ControlEventsIProxy.actionPerformed, aValue); } @ScriptFunction(jsDoc = ON_COMPONENT_HIDDEN_JSDOC) @EventMethod(eventClass = ComponentEvent.class) @Undesignable @Override public JSObject getOnComponentHidden() { return eventsProxy.getHandlers().get(ControlEventsIProxy.componentHidden); } @ScriptFunction @Override public void setOnComponentHidden(JSObject aValue) { eventsProxy.getHandlers().put(ControlEventsIProxy.componentHidden, aValue); } @ScriptFunction(jsDoc = ON_COMPONENT_MOVED_JSDOC) @EventMethod(eventClass = ComponentEvent.class) @Undesignable @Override public JSObject getOnComponentMoved() { return eventsProxy.getHandlers().get(ControlEventsIProxy.componentMoved); } @ScriptFunction @Override public void setOnComponentMoved(JSObject aValue) { eventsProxy.getHandlers().put(ControlEventsIProxy.componentMoved, aValue); } @ScriptFunction(jsDoc = ON_COMPONENT_RESIZED_JSDOC) @EventMethod(eventClass = ComponentEvent.class) @Undesignable @Override public JSObject getOnComponentResized() { return eventsProxy.getHandlers().get(ControlEventsIProxy.componentResized); } @ScriptFunction @Override public void setOnComponentResized(JSObject aValue) { eventsProxy.getHandlers().put(ControlEventsIProxy.componentResized, aValue); } @ScriptFunction(jsDoc = ON_COMPONENT_SHOWN_JSDOC) @EventMethod(eventClass = ComponentEvent.class) @Undesignable @Override public JSObject getOnComponentShown() { return eventsProxy.getHandlers().get(ControlEventsIProxy.componentShown); } @ScriptFunction @Override public void setOnComponentShown(JSObject aValue) { eventsProxy.getHandlers().put(ControlEventsIProxy.componentShown, aValue); } @ScriptFunction(jsDoc = ON_FOCUS_GAINED_JSDOC) @EventMethod(eventClass = FocusEvent.class) @Undesignable @Override public JSObject getOnFocusGained() { return eventsProxy.getHandlers().get(ControlEventsIProxy.focusGained); } @ScriptFunction @Override public void setOnFocusGained(JSObject aValue) { eventsProxy.getHandlers().put(ControlEventsIProxy.focusGained, aValue); } @ScriptFunction(jsDoc = ON_FOCUS_LOST_JSDOC) @EventMethod(eventClass = FocusEvent.class) @Undesignable @Override public JSObject getOnFocusLost() { return eventsProxy != null ? eventsProxy.getHandlers().get(ControlEventsIProxy.focusLost) : null; } @ScriptFunction @Override public void setOnFocusLost(JSObject aValue) { eventsProxy.getHandlers().put(ControlEventsIProxy.focusLost, aValue); } @ScriptFunction(jsDoc = ON_KEY_PRESSED_JSDOC) @EventMethod(eventClass = KeyEvent.class) @Undesignable @Override public JSObject getOnKeyPressed() { return eventsProxy.getHandlers().get(ControlEventsIProxy.keyPressed); } @ScriptFunction @Override public void setOnKeyPressed(JSObject aValue) { eventsProxy.getHandlers().put(ControlEventsIProxy.keyPressed, aValue); } @ScriptFunction(jsDoc = ON_KEY_RELEASED_JSDOC) @EventMethod(eventClass = KeyEvent.class) @Undesignable @Override public JSObject getOnKeyReleased() { return eventsProxy.getHandlers().get(ControlEventsIProxy.keyReleased); } @ScriptFunction @Override public void setOnKeyReleased(JSObject aValue) { eventsProxy.getHandlers().put(ControlEventsIProxy.keyReleased, aValue); } @ScriptFunction(jsDoc = ON_KEY_TYPED_JSDOC) @EventMethod(eventClass = KeyEvent.class) @Undesignable @Override public JSObject getOnKeyTyped() { return eventsProxy.getHandlers().get(ControlEventsIProxy.keyTyped); } @ScriptFunction @Override public void setOnKeyTyped(JSObject aValue) { eventsProxy.getHandlers().put(ControlEventsIProxy.keyTyped, aValue); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.wink.json4j.compat.tests; import java.io.StringWriter; import junit.framework.TestCase; import org.apache.wink.json4j.compat.JSONArray; import org.apache.wink.json4j.compat.JSONFactory; import org.apache.wink.json4j.compat.JSONObject; import org.apache.wink.json4j.compat.JSONWriter; /** * Tests for the basic Java JSONWriter */ public class ApacheJSONWriterTest extends TestCase { /** * Test the contructor. */ public void test_new() { StringWriter w = new StringWriter(); System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONWriter jWriter = factory.createJSONWriter(w); } /** * Test the String empty object contructor. */ public void test_WriteEmptyObject() { Exception ex = null; try{ StringWriter w = new StringWriter(); System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONWriter jWriter = factory.createJSONWriter(w); jWriter.object(); jWriter.endObject(); String str = w.toString(); // Verify it parses. JSONObject test = factory.createJSONObject(str); assertTrue(str.equals("{}")); }catch(Exception ex1){ ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test the String empty object contructor. */ public void test_WriteEmptyObjectClose() { Exception ex = null; try{ StringWriter w = new StringWriter(); System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONWriter jWriter = factory.createJSONWriter(w); jWriter.object(); jWriter.close(); String str = w.toString(); // Verify it parses. JSONObject test = factory.createJSONObject(str); assertTrue(str.equals("{}")); }catch(Exception ex1){ ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test the String empty object contructor. */ public void test_WriteEmptyArray() { Exception ex = null; try{ StringWriter w = new StringWriter(); System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONWriter jWriter = factory.createJSONWriter(w); jWriter.array(); jWriter.endArray(); String str = w.toString(); // Verify it parses. JSONArray test = factory.createJSONArray(str); assertTrue(str.equals("[]")); }catch(Exception ex1){ ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test the String empty object contructor. */ public void test_WriteEmptyArrayClose() { Exception ex = null; try{ StringWriter w = new StringWriter(); System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONWriter jWriter = factory.createJSONWriter(w); jWriter.array(); jWriter.close(); String str = w.toString(); // Verify it parses. JSONArray test = factory.createJSONArray(str); assertTrue(str.equals("[]")); }catch(Exception ex1){ ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a simple object with a key + value of string */ public void test_WriteObjectAttrString() { Exception ex = null; try{ StringWriter w = new StringWriter(); System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONWriter jWriter = factory.createJSONWriter(w); jWriter.object(); jWriter.key("foo"); jWriter.value("bar"); jWriter.close(); String str = w.toString(); // Verify it parses. JSONObject test = factory.createJSONObject(str); assertTrue(str.equals("{\"foo\":\"bar\"}")); }catch(Exception ex1){ ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a simple object with a key + value of int */ public void test_WriteObjectAttrInt() { Exception ex = null; try{ StringWriter w = new StringWriter(); System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONWriter jWriter = factory.createJSONWriter(w); jWriter.object(); jWriter.key("foo"); jWriter.value(1); jWriter.close(); String str = w.toString(); // Verify it parses. JSONObject test = factory.createJSONObject(str); assertTrue(str.equals("{\"foo\":1}")); }catch(Exception ex1){ ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a simple object with a key + value of long */ public void test_WriteObjectAttrLong() { Exception ex = null; try{ StringWriter w = new StringWriter(); System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONWriter jWriter = factory.createJSONWriter(w); jWriter.object(); jWriter.key("foo"); jWriter.value((long)1); jWriter.close(); String str = w.toString(); // Verify it parses. JSONObject test = factory.createJSONObject(str); assertTrue(str.equals("{\"foo\":1}")); }catch(Exception ex1){ ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a simple object with a key + value of short */ public void test_WriteObjectAttrShort() { Exception ex = null; try{ StringWriter w = new StringWriter(); System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONWriter jWriter = factory.createJSONWriter(w); jWriter.object(); jWriter.key("foo"); jWriter.value((short)1); jWriter.close(); String str = w.toString(); // Verify it parses. JSONObject test = factory.createJSONObject(str); assertTrue(str.equals("{\"foo\":1}")); }catch(Exception ex1){ ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a simple object with a key + value of Double */ public void test_WriteObjectAttrDouble() { Exception ex = null; try{ StringWriter w = new StringWriter(); System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONWriter jWriter = factory.createJSONWriter(w); jWriter.object(); jWriter.key("foo"); jWriter.value((double)100.959); jWriter.close(); String str = w.toString(); // Verify it parses. JSONObject test = factory.createJSONObject(str); assertTrue(str.equals("{\"foo\":100.959}")); }catch(Exception ex1){ ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a simple object with a key + value of boolean */ public void test_WriteObjectAttrBoolean() { Exception ex = null; try{ StringWriter w = new StringWriter(); System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONWriter jWriter = factory.createJSONWriter(w); jWriter.object(); jWriter.key("foo"); jWriter.value(true); jWriter.close(); String str = w.toString(); // Verify it parses. JSONObject test = factory.createJSONObject(str); assertTrue(str.equals("{\"foo\":true}")); }catch(Exception ex1){ ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a simple object with a key + value of Object */ public void test_WriteObjectAttrObject() { Exception ex = null; try{ StringWriter w = new StringWriter(); System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONWriter jWriter = factory.createJSONWriter(w); jWriter.object(); jWriter.key("foo"); jWriter.object(); jWriter.key("foo"); jWriter.value(true); jWriter.endObject(); jWriter.endObject(); jWriter.close(); String str = w.toString(); // Verify it parses. JSONObject test = factory.createJSONObject(str); assertTrue(str.equals("{\"foo\":{\"foo\":true}}")); }catch(Exception ex1){ ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a simple object with a key + value of Object */ public void test_WriteObjectAttrArray() { Exception ex = null; try{ StringWriter w = new StringWriter(); System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONWriter jWriter = factory.createJSONWriter(w); jWriter.object(); jWriter.key("foo"); jWriter.array(); jWriter.value(true); jWriter.endArray(); jWriter.endObject(); jWriter.close(); String str = w.toString(); // Verify it parses. JSONObject test = factory.createJSONObject(str); assertTrue(str.equals("{\"foo\":[true]}")); }catch(Exception ex1){ ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a simple object with a key + value of Object */ public void test_WriteObjectAttrJSONObject() { Exception ex = null; try{ StringWriter w = new StringWriter(); System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONWriter jWriter = factory.createJSONWriter(w); jWriter.object(); jWriter.key("foo"); // Verify we can put a JSONObject into the stream! JSONObject jObj = factory.createJSONObject(); jObj.put("foo", true); jWriter.value(jObj); jWriter.endObject(); jWriter.close(); String str = w.toString(); // Verify it parses. JSONObject test = factory.createJSONObject(str); assertTrue(str.equals("{\"foo\":{\"foo\":true}}")); }catch(Exception ex1){ ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a simple object with a key + value of array */ public void test_WriteObjectAttrJSONArray() { Exception ex = null; try{ StringWriter w = new StringWriter(); System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONWriter jWriter = factory.createJSONWriter(w); jWriter.object(); jWriter.key("foo"); // Verify we can put a JSONObject into the stream! JSONArray jArray = factory.createJSONArray(); jArray.put(true); jWriter.value(jArray); jWriter.endObject(); jWriter.close(); String str = w.toString(); // Verify it parses. JSONObject test = factory.createJSONObject(str); assertTrue(str.equals("{\"foo\":[true]}")); }catch(Exception ex1){ ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a simple object with multiple keys of varying types */ public void test_WriteObjectComplex() { Exception ex = null; try{ StringWriter w = new StringWriter(); System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONWriter jWriter = factory.createJSONWriter(w); jWriter.object(); jWriter.key("string"); jWriter.value("String1"); jWriter.key("bool"); jWriter.value(false); jWriter.key("number"); jWriter.value(1); // Place an object jWriter.key("object"); jWriter.object(); jWriter.key("string"); jWriter.value("String2"); jWriter.endObject(); // Place an array jWriter.key("array"); jWriter.array(); jWriter.value(1); jWriter.value((double)2); jWriter.value((short)3); jWriter.endArray(); //Close top object. jWriter.endObject(); jWriter.close(); String str = w.toString(); // Verify it parses. JSONObject test = factory.createJSONObject(str); assertTrue(str.equals("{\"string\":\"String1\",\"bool\":false,\"number\":1,\"object\":{\"string\":\"String2\"},\"array\":[1,2.0,3]}")); }catch(Exception ex1){ ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /** * Test a simple object with multiple keys of varying types */ public void test_WriteArrayComplex() { Exception ex = null; try{ StringWriter w = new StringWriter(); System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONWriter jWriter = factory.createJSONWriter(w); jWriter.array(); jWriter.value("String1"); jWriter.value(false); jWriter.value(1); // Place an object jWriter.object(); jWriter.key("string"); jWriter.value("String2"); jWriter.endObject(); // Place an array jWriter.array(); jWriter.value(1); jWriter.value((double)2); jWriter.value((short)3); jWriter.endArray(); //Close top array. jWriter.endArray(); jWriter.close(); String str = w.toString(); // Verify it parses. JSONArray test = factory.createJSONArray(str); assertTrue(str.equals("[\"String1\",false,1,{\"string\":\"String2\"},[1,2.0,3]]")); }catch(Exception ex1){ ex = ex1; ex.printStackTrace(); } assertTrue(ex == null); } /*******************************/ /*All the error condition tests*/ /*******************************/ /** * Test that setting a value in an object without defining its key fails. */ public void test_ObjectNoKeyValueFail() { Exception ex = null; try{ StringWriter w = new StringWriter(); System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONWriter jWriter = factory.createJSONWriter(w); jWriter.object(); jWriter.value(true); jWriter.endObject(); }catch(Exception ex1){ ex = ex1; } assertTrue(ex instanceof IllegalStateException); } /** * Test that setting a value without a key (after another key/value was set), fails */ public void test_ObjectKeyValueNoKeyValueFail() { Exception ex = null; try{ StringWriter w = new StringWriter(); System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONWriter jWriter = factory.createJSONWriter(w); jWriter.object(); jWriter.key("foo"); jWriter.value(true); // This should die with IllegalStateException... jWriter.value(false); jWriter.endObject(); }catch(Exception ex1){ ex = ex1; } assertTrue(ex instanceof IllegalStateException); } /** * Test that setting a key while not in an object fails */ public void test_NoObjectKeyFail() { Exception ex = null; try{ StringWriter w = new StringWriter(); System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONWriter jWriter = factory.createJSONWriter(w); // This should die. jWriter.key("foo"); }catch(Exception ex1){ ex = ex1; } assertTrue(ex instanceof IllegalStateException); } /** * Test that setting a value while not in an array or object fails */ public void test_NoObjectValueFail() { Exception ex = null; try{ StringWriter w = new StringWriter(); System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONWriter jWriter = factory.createJSONWriter(w); // This should die. jWriter.value("foo"); }catch(Exception ex1){ ex = ex1; } assertTrue(ex instanceof IllegalStateException); } /** * Test that trying to set keys while in an array fails. */ public void test_ArrayKeyFail() { Exception ex = null; try{ StringWriter w = new StringWriter(); System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONWriter jWriter = factory.createJSONWriter(w); jWriter.array(); // This should die. jWriter.key("foo"); }catch(Exception ex1){ ex = ex1; } assertTrue(ex instanceof IllegalStateException); } /** * Test that trying operations after the writer has been closed fails. */ public void test_OptsAfterCloseFail() { Exception ex = null; try{ StringWriter w = new StringWriter(); System.setProperty("org.apache.wink.common.model.json.factory.impl", "org.apache.wink.json4j.compat.impl.ApacheJSONFactory"); JSONFactory factory = JSONFactory.newInstance(); JSONWriter jWriter = factory.createJSONWriter(w); jWriter.array(); jWriter.close(); // This should die. jWriter.endArray(); }catch(Exception ex1){ ex = ex1; } assertTrue(ex instanceof IllegalStateException); } }
package au.com.codeka.planetrender; import java.util.ArrayList; import java.util.List; import java.util.Random; import au.com.codeka.common.Colour; import au.com.codeka.common.Image; import au.com.codeka.common.Vector3; /** * The {@see PlanetGenerator} uses this class to render a single planet image. It may (or may not) then combine * multiple planet images into on (e.g. for asteroids). */ public class SinglePlanetGenerator { private double mPlanetRadius; private Vector3 mPlanetOrigin; private double mAmbient; private Vector3 mSunOrigin; private TextureGenerator mTexture; private Vector3 mNorth; private List<Atmosphere> mAtmospheres; private RayWarper mRayWarper; public SinglePlanetGenerator(Template.PlanetTemplate tmpl, Random rand) { mPlanetOrigin = Vector3.pool.borrow().reset(tmpl.getOriginFrom()); Vector3.interpolate(mPlanetOrigin, tmpl.getOriginTo(), rand.nextDouble()); Template.WarpTemplate warpTemplate = tmpl.getParameter(Template.WarpTemplate.class); if (warpTemplate != null) { mRayWarper = new RayWarper(warpTemplate, rand); } Template.TextureTemplate textureTemplate = tmpl.getParameter(Template.TextureTemplate.class); if (textureTemplate == null) { return; } mTexture = new TextureGenerator(textureTemplate, rand); mSunOrigin = tmpl.getSunLocation(); mAmbient = tmpl.getAmbient(); mPlanetRadius = tmpl.getPlanetSize(); List<Template.AtmosphereTemplate> atmosphereTemplates = tmpl.getParameters(Template.AtmosphereTemplate.class); if (atmosphereTemplates != null && atmosphereTemplates.size() > 0) { mAtmospheres = new ArrayList<Atmosphere>(); for (Template.AtmosphereTemplate atmosphereTemplate : atmosphereTemplates) { Atmosphere.getAtmospheres(mAtmospheres, atmosphereTemplate, rand); } } mNorth = Vector3.pool.borrow().reset(tmpl.getNorthFrom()); Vector3.interpolate(mNorth, tmpl.getNorthTo(), rand.nextDouble()); mNorth.normalize(); } /** * Renders a planet into the given \c Image. */ public void render(Image img) { if (mTexture == null) { return; } for (int y = 0; y < img.getHeight(); y++) { for (int x = 0; x < img.getWidth(); x++) { double nx = ((double) x / (double) img.getWidth()) - 0.5; double ny = ((double) y / (double) img.getHeight()) - 0.5; Colour c = getPixelColour(nx, ny); img.setPixelColour(x, y, c); Colour.pool.release(c); } } } /** * Computes the colour of the pixel at (x,y) where each coordinate is defined to * be in the range (-0.5, +0.5). * * @param x The x-coordinate, between -0.5 and +0.5. * @param y The y-coordinate, between -0.5 and +0.5. * @return The colour at the given pixel. */ public Colour getPixelColour(double x, double y) { Colour c = Colour.pool.borrow().reset(Colour.TRANSPARENT); Vector3 ray = Vector3.pool.borrow().reset(x, -y, 1.0); if (mRayWarper != null) { mRayWarper.warp(ray, x, y); } ray.normalize(); Vector3 intersection = raytrace(ray); if (intersection != null) { // we intersected with the planet. Now we need to work out the colour at this point // on the planet. Colour t = queryTexture(intersection); double intensity = lightSphere(intersection); c.reset(1.0, t.r * intensity, t.g * intensity, t.b * intensity); Colour.pool.release(t); if (mAtmospheres != null) { Vector3 surfaceNormal = Vector3.pool.borrow().reset(intersection); surfaceNormal.subtract(mPlanetOrigin); surfaceNormal.normalize(); Vector3 sunDirection = Vector3.pool.borrow().reset(mSunOrigin); sunDirection.subtract(intersection); sunDirection.normalize(); final int numAtmospheres = mAtmospheres.size(); for (int i = 0; i < numAtmospheres; i++) { final Atmosphere atmosphere = mAtmospheres.get(i); Colour atmosphereColour = atmosphere.getInnerPixelColour(x + 0.5, y + 0.5, intersection, surfaceNormal, sunDirection, mNorth); if (atmosphere.getBlendMode() == Template.AtmosphereTemplate.BlendMode.Alpha) { Colour.blend(c, atmosphereColour); } else { Colour.add(c, atmosphereColour); } Colour.pool.release(atmosphereColour); } Vector3.pool.release(surfaceNormal); Vector3.pool.release(sunDirection); } } else if (mAtmospheres != null) { // if we're rendering an atmosphere, we need to work out the distance of this ray // to the planet's surface double u = Vector3.dot(mPlanetOrigin, ray); Vector3 closest = Vector3.pool.borrow().reset(ray); closest.scale(u); double distance = (Vector3.distanceBetween(closest, mPlanetOrigin) - mPlanetRadius); Vector3 surfaceNormal = Vector3.pool.borrow().reset(closest); surfaceNormal.subtract(mPlanetOrigin); surfaceNormal.normalize(); Vector3 sunDirection = Vector3.pool.borrow().reset(mSunOrigin); sunDirection.subtract(closest); sunDirection.normalize(); final int numAtmospheres = mAtmospheres.size(); for (int i = 0; i < numAtmospheres; i++) { final Atmosphere atmosphere = mAtmospheres.get(i); Colour atmosphereColour = atmosphere.getOuterPixelColour(x + 0.5, y + 0.5, surfaceNormal, distance, sunDirection, mNorth); if (atmosphere.getBlendMode() == Template.AtmosphereTemplate.BlendMode.Alpha) { Colour.blend(c, atmosphereColour); } else { Colour.add(c, atmosphereColour); } Colour.pool.release(atmosphereColour); } Vector3.pool.release(closest); Vector3.pool.release(surfaceNormal); Vector3.pool.release(sunDirection); } Vector3.pool.release(ray); Vector3.pool.release(intersection); return c; } /** * Query the texture for the colour at the given intersection (in 3D space). */ private Colour queryTexture(Vector3 intersection) { Vector3 Vn = mNorth; Vector3 Ve = Vector3.pool.borrow().reset(Vn.y, -Vn.x, 0.0); // (AKA Vn.cross(0, 0, 1)) Vector3 Vp = Vector3.pool.borrow().reset(intersection); Vp.subtract(mPlanetOrigin); Ve.normalize(); Vp.normalize(); double phi = Math.acos(-1.0 * Vector3.dot(Vn, Vp)); double v = phi / Math.PI; double theta = (Math.acos(Vector3.dot(Vp, Ve) / Math.sin(phi))) / (Math.PI * 2.0); double u; Vector3 c = Vector3.cross(Vn, Ve); if (Vector3.dot(c, Vp) > 0) { u = theta; } else { u = 1.0 - theta; } Vector3.pool.release(c); Vector3.pool.release(Ve); Vector3.pool.release(Vp); return mTexture.getTexel(u, v); } /** * Calculates light intensity from the sun. * * @param intersection Point where the ray we're currently tracing intersects with the planet. */ private double lightSphere(Vector3 intersection) { Vector3 surfaceNormal = Vector3.pool.borrow().reset(intersection); surfaceNormal.subtract(mPlanetOrigin); surfaceNormal.normalize(); double intensity = diffuse(surfaceNormal, intersection); intensity = Math.max(mAmbient, Math.min(1.0, intensity)); Vector3.pool.release(surfaceNormal); return intensity; } /** * Calculates diffuse lighting at the given point with the given normal. * * @param normal Normal at the point we're calculating diffuse lighting for. * @param point The point at which we're calculating diffuse lighting. * @return The diffuse factor of lighting. */ private double diffuse(Vector3 normal, Vector3 point) { Vector3 directionToLight = Vector3.pool.borrow().reset(mSunOrigin); directionToLight.subtract(point); directionToLight.normalize(); double factor =Vector3.dot(normal, directionToLight); Vector3.pool.release(directionToLight); return factor; } /** * Traces a ray along the given direction. We assume the origin is (0,0,0) (i.e. the eye). * * @param direction The direction of the ray we're going to trace. * @return A \c Vector3 representing the point in space where we intersect with the planet, * or \c null if there's no intersection. */ private Vector3 raytrace(Vector3 direction) { // intersection of a sphere and a line final double a = Vector3.dot(direction, direction); Vector3 blah = Vector3.pool.borrow().reset(mPlanetOrigin); blah.scale(-1); final double b = 2.0 * Vector3.dot(direction, blah); Vector3.pool.release(blah); final double c = Vector3.dot(mPlanetOrigin, mPlanetOrigin) - (mPlanetRadius * mPlanetRadius); final double d = (b * b) - (4.0 * a * c); if (d > 0.0) { double sign = (c < -0.00001) ? 1.0 : -1.0; double distance = (-b + (sign * Math.sqrt(d))) / (2.0 * a); Vector3 intersection = Vector3.pool.borrow().reset(direction); intersection.scale(distance); return intersection; } else { return null; } } }
/* * Copyright 2015 Torridity. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.tor.tribes.ui.renderer.map; import de.tor.tribes.io.WorldDecorationHolder; import de.tor.tribes.types.ext.BarbarianAlly; import de.tor.tribes.types.ext.Barbarians; import de.tor.tribes.types.Marker; import de.tor.tribes.types.ext.Village; import de.tor.tribes.ui.windows.DSWorkbenchMainFrame; import de.tor.tribes.ui.panels.MapPanel; import de.tor.tribes.util.Constants; import de.tor.tribes.util.GlobalOptions; import de.tor.tribes.util.ImageUtils; import de.tor.tribes.util.ServerSettings; import de.tor.tribes.util.Skin; import de.tor.tribes.util.mark.MarkerManager; import java.awt.AlphaComposite; import java.awt.BasicStroke; import java.awt.Color; import java.awt.Composite; import java.awt.Graphics2D; import java.awt.Point; import java.awt.Rectangle; import java.awt.Transparency; import java.awt.geom.AffineTransform; import java.awt.geom.GeneralPath; import java.awt.image.BufferedImage; import java.util.HashMap; /** * @author Torridity */ public class MapLayerRenderer extends AbstractBufferedLayerRenderer { private BufferedImage mLayer = null; private HashMap<Integer, Rectangle> renderedSpriteBounds = null; private HashMap<Integer, Rectangle> renderedMarkerBounds = null; private Point mapPos = null; private boolean bMarkOnTop = false; private boolean shouldReset = true; private boolean moved = true; public void setMarkOnTop(boolean pValue) { bMarkOnTop = pValue; } public boolean isMarkOnTop() { return bMarkOnTop; } @Override public void performRendering(RenderSettings pSettings, Graphics2D pG2d) { if (shouldReset) { setFullRenderRequired(true); shouldReset = false; mapPos = null; if (mLayer != null && (MapPanel.getSingleton().getWidth() > mLayer.getWidth() || MapPanel.getSingleton().getWidth() < mLayer.getWidth() - 100 || MapPanel.getSingleton().getHeight() > mLayer.getHeight() || MapPanel.getSingleton().getHeight() < mLayer.getHeight() - 100 || MapPanel.getSingleton().getWidth() < pSettings.getFieldWidth() * pSettings.getVillagesInX() || MapPanel.getSingleton().getHeight() < pSettings.getFieldHeight() * pSettings.getVillagesInY())) { mLayer.flush(); mLayer = null; } } Graphics2D g2d = null; if (mapPos != null && mLayer != null && !isFullRenderRequired()) { Point newMapPos = new Point((int) Math.floor(pSettings.getMapBounds().getX()), (int) Math.floor(pSettings.getMapBounds().getY())); if (mapPos.distance(newMapPos) != 0) { moved = true; } else { moved = false; } } else { moved = true; } AffineTransform trans = AffineTransform.getTranslateInstance(0, 0); if (mapPos == null) { mapPos = new Point((int) Math.floor(pSettings.getMapBounds().getX()), (int) Math.floor(pSettings.getMapBounds().getY())); } BufferedImage img = null; if (moved || isFullRenderRequired()) { if (isFullRenderRequired()) { if (mLayer == null) { try { mLayer = ImageUtils.createCompatibleBufferedImage(pSettings.getVillagesInX() * pSettings.getFieldWidth(), pSettings.getVillagesInY() * pSettings.getFieldHeight(), Transparency.OPAQUE); } catch (Exception e) { mLayer = null; return; } } g2d = (Graphics2D) mLayer.getGraphics(); pSettings.setRowsToRender(pSettings.getVillagesInY()); mapPos = new Point((int) Math.floor(pSettings.getMapBounds().getX()), (int) Math.floor(pSettings.getMapBounds().getY())); } else { //copy existing data to new location g2d = (Graphics2D) mLayer.getGraphics(); performCopy(pSettings, g2d); } ImageUtils.setupGraphics(g2d); renderedSpriteBounds = new HashMap<Integer, Rectangle>(); renderedMarkerBounds = new HashMap<Integer, Rectangle>(); if (isMarkOnTop()) { img = renderVillageRows(pSettings); } else { img = renderMarkerRows(pSettings); } Graphics2D ig2d = (Graphics2D) img.getGraphics(); int val = GlobalOptions.getProperties().getInt("map.marker.transparency", 80); float transparency = (float) val / 100.0f; if (isMarkOnTop()) { Composite c = ig2d.getComposite(); ig2d.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, transparency)); ig2d.drawImage(renderMarkerRows(pSettings), 0, 0, null); ig2d.setComposite(c); } else { ig2d.drawImage(renderVillageRows(pSettings), 0, 0, null); } if (pSettings.getRowsToRender() < 0) { trans.setToTranslation(0, (pSettings.getVillagesInY() + pSettings.getRowsToRender()) * pSettings.getFieldHeight()); } g2d.drawRenderedImage(img, trans); img.flush(); if (isFullRenderRequired()) { //everything was rendered, skip col rendering setFullRenderRequired(false); } else { renderedSpriteBounds = new HashMap<Integer, Rectangle>(); renderedMarkerBounds = new HashMap<Integer, Rectangle>(); if (isMarkOnTop()) { img = renderVillageColumns(pSettings); } else { img = renderMarkerColumns(pSettings); } ig2d = (Graphics2D) img.getGraphics(); if (isMarkOnTop()) { Composite c = ig2d.getComposite(); ig2d.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, transparency)); ig2d.drawImage(renderMarkerColumns(pSettings), 0, 0, null); ig2d.setComposite(c); } else { ig2d.drawImage(renderVillageColumns(pSettings), 0, 0, null); } trans = AffineTransform.getTranslateInstance(0, 0); if (pSettings.getColumnsToRender() < 0) { trans.setToTranslation((pSettings.getVillagesInX() + pSettings.getColumnsToRender()) * pSettings.getFieldWidth(), 0); } g2d.drawRenderedImage(img, trans); } ig2d.dispose(); g2d.dispose(); img.flush(); } trans = AffineTransform.getTranslateInstance(pSettings.getDeltaX(), pSettings.getDeltaY()); pG2d.drawRenderedImage(mLayer, trans); drawContinents(pSettings, pG2d); pSettings.setLayerVisible(true); } public boolean hasMoved() { return moved; } private void performCopy(RenderSettings pSettings, Graphics2D pG2D) { Point newMapPos = new Point((int) Math.floor(pSettings.getMapBounds().getX()), (int) Math.floor(pSettings.getMapBounds().getY())); int fieldsX = newMapPos.x - mapPos.x; int fieldsY = newMapPos.y - mapPos.y; mapPos = (Point) newMapPos.clone(); //set new map position pG2D.copyArea(0, 0, mLayer.getWidth(), mLayer.getHeight(), -fieldsX * pSettings.getFieldWidth(), -fieldsY * pSettings.getFieldHeight()); } private void drawContinents(RenderSettings pSettings, Graphics2D pG2d) { boolean showSectors = false; try { showSectors = Boolean.parseBoolean(GlobalOptions.getProperty("show.sectors")); } catch (Exception e) { showSectors = false; } boolean showContinents = false; try { showContinents = Boolean.parseBoolean(GlobalOptions.getProperty("map.showcontinents")); } catch (Exception e) { showContinents = false; } //draw continents and sectors if (mapPos == null) { return; } int contSpacing = 100; if (ServerSettings.getSingleton().getCoordType() != 2) { contSpacing = 50; } int fieldHeight = pSettings.getFieldHeight(); int fieldWidth = pSettings.getFieldWidth(); //draw vertical borders for (int i = mapPos.x; i < mapPos.x + pSettings.getVillagesInX() + 1; i++) { if (i % 5 == 0) { boolean draw = true; if (i % contSpacing == 0) { if (!showContinents) { //draw = false; //draw sector line instead pG2d.setStroke(new BasicStroke(0.5f)); pG2d.setColor(Color.BLACK); } else { pG2d.setStroke(new BasicStroke(1.0f)); pG2d.setColor(Color.YELLOW); } } else { if (!showSectors) { draw = false; } else { pG2d.setStroke(new BasicStroke(0.5f)); pG2d.setColor(Color.BLACK); } } if (draw) { pG2d.drawLine((i - mapPos.x) * fieldWidth + (int) Math.floor(pSettings.getDeltaX()), 0, (i - mapPos.x) * fieldWidth + (int) Math.floor(pSettings.getDeltaX()), pSettings.getVillagesInY() * (fieldHeight + 1)); } } } //draw horizontal borders for (int i = mapPos.y; i < mapPos.y + pSettings.getVillagesInY() + 1; i++) { if (i % 5 == 0) { boolean draw = true; if (i % contSpacing == 0) { if (!showContinents) { //draw = false; //draw sector line instead pG2d.setStroke(new BasicStroke(0.5f)); pG2d.setColor(Color.BLACK); } else { //draw continent line pG2d.setStroke(new BasicStroke(1.0f)); pG2d.setColor(Color.YELLOW); } } else { if (!showSectors) { draw = false; } else { pG2d.setStroke(new BasicStroke(0.5f)); pG2d.setColor(Color.BLACK); } } if (draw) { pG2d.drawLine(0, (i - mapPos.y) * fieldHeight + (int) Math.floor(pSettings.getDeltaY()), (pSettings.getVillagesInX() + 1) * fieldWidth, (i - mapPos.y) * fieldHeight + (int) Math.floor(pSettings.getDeltaY())); } } } } private BufferedImage renderVillageRows(RenderSettings pSettings) { //create new buffer for rendering BufferedImage newRows = ImageUtils.createCompatibleBufferedImage(pSettings.getVillagesInX() * pSettings.getFieldWidth(), Math.abs(pSettings.getRowsToRender()) * pSettings.getFieldHeight(), Transparency.BITMASK); //calculate first row that will be rendered int firstRow = (pSettings.getRowsToRender() > 0) ? 0 : pSettings.getVillagesInY() - Math.abs(pSettings.getRowsToRender()); Graphics2D g2d = (Graphics2D) newRows.getGraphics(); ImageUtils.setupGraphics(g2d); boolean showBarbarian = true; try { showBarbarian = Boolean.parseBoolean(GlobalOptions.getProperty("show.barbarian")); } catch (Exception e) { showBarbarian = true; } boolean markedOnly = false; //iterate through entire row int cnt = 0; boolean useDecoration = true; if (!WorldDecorationHolder.isValid()) { //use decoration if skin field size equals the world skin size useDecoration = false; } for (int x = 0; x < pSettings.getVillagesInX(); x++) { //iterate from first row for 'pRows' times for (int y = firstRow; y < firstRow + Math.abs(pSettings.getRowsToRender()); y++) { cnt++; Village v = pSettings.getVisibleVillage(x, y); int row = y - firstRow; int col = x; int globalCol = colToGlobalPosition(pSettings, col); int globalRow = rowToGlobalPosition(pSettings, y); renderVillageField(v, row, col, globalRow, globalCol, pSettings.getFieldWidth(), pSettings.getFieldHeight(), pSettings.getZoom(), useDecoration, showBarbarian, markedOnly, g2d); } } g2d.dispose(); return newRows; } private BufferedImage renderMarkerRows(RenderSettings pSettings) { //create new buffer for rendering BufferedImage newRows = null; if (isMarkOnTop()) { newRows = ImageUtils.createCompatibleBufferedImage(pSettings.getVillagesInX() * pSettings.getFieldWidth(), Math.abs(pSettings.getRowsToRender()) * pSettings.getFieldHeight(), Transparency.TRANSLUCENT); } else { newRows = ImageUtils.createCompatibleBufferedImage(pSettings.getVillagesInX() * pSettings.getFieldWidth(), Math.abs(pSettings.getRowsToRender()) * pSettings.getFieldHeight(), Transparency.OPAQUE); } //calculate first row that will be rendered int firstRow = (pSettings.getRowsToRender() > 0) ? 0 : pSettings.getVillagesInY() - Math.abs(pSettings.getRowsToRender()); Graphics2D g2d = (Graphics2D) newRows.getGraphics(); ImageUtils.setupGraphics(g2d); //iterate through entire row int cnt = 0; boolean useDecoration = true; if (!WorldDecorationHolder.isValid()) { //use decoration if skin field size equals the world skin size useDecoration = false; } for (int x = 0; x < pSettings.getVillagesInX(); x++) { //iterate from first row for 'pRows' times for (int y = firstRow; y < firstRow + Math.abs(pSettings.getRowsToRender()); y++) { cnt++; Village v = pSettings.getVisibleVillage(x, y); int row = y - firstRow; int col = x; renderMarkerField(v, row, col, pSettings.getFieldWidth(), pSettings.getFieldHeight(), pSettings.getZoom(), useDecoration, g2d); } } g2d.dispose(); return newRows; } private BufferedImage renderVillageColumns(RenderSettings pSettings) { //create new buffer for rendering BufferedImage newColumns = ImageUtils.createCompatibleBufferedImage(Math.abs(pSettings.getColumnsToRender()) * pSettings.getFieldWidth(), pSettings.getVillagesInY() * pSettings.getFieldHeight(), Transparency.BITMASK); //calculate first row that will be rendered int firstCol = (pSettings.getColumnsToRender() > 0) ? 0 : pSettings.getVillagesInX() - Math.abs(pSettings.getColumnsToRender()); Graphics2D g2d = (Graphics2D) newColumns.getGraphics(); ImageUtils.setupGraphics(g2d); boolean showBarbarian = true; try { showBarbarian = Boolean.parseBoolean(GlobalOptions.getProperty("show.barbarian")); } catch (Exception e) { showBarbarian = true; } boolean markedOnly = false; //iterate through entire row int cnt = 0; boolean useDecoration = true; if (!WorldDecorationHolder.isValid()) { //use decoration if skin field size equals the world skin size useDecoration = false; } for (int x = firstCol; x < firstCol + Math.abs(pSettings.getColumnsToRender()); x++) { for (int y = 0; y < pSettings.getVillagesInY(); y++) { cnt++; //iterate from first row for 'pRows' times Village v = pSettings.getVisibleVillage(x, y); int row = y; int col = x - firstCol; int globalCol = colToGlobalPosition(pSettings, x); int globalRow = rowToGlobalPosition(pSettings, row); renderVillageField(v, row, col, globalRow, globalCol, pSettings.getFieldWidth(), pSettings.getFieldHeight(), pSettings.getZoom(), useDecoration, showBarbarian, markedOnly, g2d); } } g2d.dispose(); return newColumns; } private BufferedImage renderMarkerColumns(RenderSettings pSettings) { //create new buffer for rendering BufferedImage newColumns = null; if (isMarkOnTop()) { newColumns = ImageUtils.createCompatibleBufferedImage(Math.abs(pSettings.getColumnsToRender()) * pSettings.getFieldWidth(), pSettings.getVillagesInY() * pSettings.getFieldHeight(), Transparency.TRANSLUCENT); } else { newColumns = ImageUtils.createCompatibleBufferedImage(Math.abs(pSettings.getColumnsToRender()) * pSettings.getFieldWidth(), pSettings.getVillagesInY() * pSettings.getFieldHeight(), Transparency.OPAQUE); } //calculate first row that will be rendered int firstCol = (pSettings.getColumnsToRender() > 0) ? 0 : pSettings.getVillagesInX() - Math.abs(pSettings.getColumnsToRender()); Graphics2D g2d = (Graphics2D) newColumns.getGraphics(); ImageUtils.setupGraphics(g2d); //iterate through entire row int cnt = 0; boolean useDecoration = true; if (!WorldDecorationHolder.isValid()) { //use decoration if skin field size equals the world skin size useDecoration = false; } for (int x = firstCol; x < firstCol + Math.abs(pSettings.getColumnsToRender()); x++) { for (int y = 0; y < pSettings.getVillagesInY(); y++) { cnt++; //iterate from first row for 'pRows' times Village v = pSettings.getVisibleVillage(x, y); int row = y; int col = x - firstCol; renderMarkerField(v, row, col, pSettings.getFieldWidth(), pSettings.getFieldHeight(), pSettings.getZoom(), useDecoration, g2d); } } g2d.dispose(); return newColumns; } private void renderVillageField(Village v, int row, int col, int globalRow, int globalCol, int pFieldWidth, int pFieldHeight, double zoom, boolean useDecoration, boolean showBarbarian, boolean markedOnly, Graphics2D g2d) { Rectangle copyRect = null; int textureId = -1; BufferedImage sprite = null; // Marker villageMarker = MarkerManager.getSingleton().getMarker(v); if (v != null && !(v.getTribe().equals(Barbarians.getSingleton()) && !showBarbarian) // && !(villageMarker == null && markedOnly && !v.getTribe().getName().equals(GlobalOptions.getSelectedProfile().getTribeName())) ) { //village field that has to be rendered v.setVisibleOnMap(true); if (GlobalOptions.getSkin().isMinimapSkin()) { textureId = Skin.ID_V1; } else { textureId = v.getGraphicsType(); } copyRect = renderedSpriteBounds.get(textureId); if (copyRect == null) { sprite = GlobalOptions.getSkin().getCachedImage(textureId, zoom); } } else { if (v != null) { v.setVisibleOnMap(false); } if (useDecoration) { textureId = WorldDecorationHolder.getTextureId(globalCol, globalRow) + 100; } else { textureId = Skin.ID_DEFAULT_UNDERGROUND; } copyRect = renderedSpriteBounds.get(textureId); if (copyRect == null && useDecoration) { sprite = WorldDecorationHolder.getCachedImage(globalCol, globalRow, zoom); } else if (copyRect == null && !useDecoration) { sprite = GlobalOptions.getSkin().getCachedImage(textureId, zoom); } } //render sprite or copy area if sprite is null int posX = col * pFieldWidth; int posY = row * pFieldHeight; if (sprite != null) { //render sprite if (isMarkOnTop()) { g2d.setColor(Color.BLACK); g2d.fillRect(posX, posY, pFieldWidth, pFieldHeight); } g2d.drawImage(sprite, posX, posY, null); renderedSpriteBounds.put(textureId, new Rectangle(posX, posY, pFieldWidth, pFieldHeight)); } else if (copyRect != null) { //copy from copy rect g2d.copyArea(copyRect.x, copyRect.y, copyRect.width, copyRect.height, posX - copyRect.x, posY - copyRect.y); } } private void renderMarkerField(Village v, int row, int col, int pFieldWidth, int pFieldHeight, double zoom, boolean useDecoration, Graphics2D g2d) { int tribeId = -666; BufferedImage sprite = null; Rectangle copyRect = null; boolean showBarbarian = true; Village currentUserVillage = DSWorkbenchMainFrame.getSingleton().getCurrentUserVillage(); try { showBarbarian = Boolean.parseBoolean(GlobalOptions.getProperty("show.barbarian")); } catch (Exception e) { showBarbarian = true; } if (v != null && !(v.getTribe().equals(Barbarians.getSingleton()) && !showBarbarian) //&& !(MarkerManager.getSingleton().getMarker(v) == null && !v.getTribe().getName().equals(GlobalOptions.getSelectedProfile().getTribeName())) ) { v.setVisibleOnMap(true); tribeId = v.getTribeID(); copyRect = renderedMarkerBounds.get(tribeId); if (copyRect == null) { sprite = getMarker(v); } } else { if (v != null) { v.setVisibleOnMap(false); } return; } //render sprite or copy area if sprite is null if (sprite != null) { //render sprite AffineTransform t = AffineTransform.getTranslateInstance(col * pFieldWidth, row * pFieldHeight); t.scale(1.0 / zoom, 1.0 / zoom); g2d.drawRenderedImage(sprite, t); if (currentUserVillage == null || !v.equals(currentUserVillage)) { renderedMarkerBounds.put(tribeId, new Rectangle(col * pFieldWidth, row * pFieldHeight, pFieldWidth, pFieldHeight)); } sprite.flush(); } else if (copyRect != null) { g2d.copyArea(copyRect.x, copyRect.y, copyRect.width, copyRect.height, col * pFieldWidth - copyRect.x, row * pFieldHeight - copyRect.y); } } private int rowToGlobalPosition(RenderSettings pSettings, int pRow) { int yPos = (int) Math.floor(pSettings.getMapBounds().getY()); return yPos + pRow; } private int colToGlobalPosition(RenderSettings pSettings, int pCol) { int xPos = (int) Math.floor(pSettings.getMapBounds().getX()); return xPos + pCol; } private BufferedImage getMarker(Village pVillage) { int w = GlobalOptions.getSkin().getBasicFieldWidth(); int h = GlobalOptions.getSkin().getBasicFieldHeight(); BufferedImage image = ImageUtils.createCompatibleBufferedImage(w, h, Transparency.OPAQUE); Color markerColor = null; Marker tribeMarker = null; Marker allyMarker = null; Color DEFAULT = null; try { int mark = Integer.parseInt(GlobalOptions.getProperty("default.mark")); if (mark == 0) { DEFAULT = Constants.DS_DEFAULT_MARKER; } else if (mark == 1) { DEFAULT = Color.RED; } else if (mark == 2) { DEFAULT = Color.WHITE; } } catch (Exception e) { DEFAULT = Constants.DS_DEFAULT_MARKER; } Village currentUserVillage = DSWorkbenchMainFrame.getSingleton().getCurrentUserVillage(); if (currentUserVillage != null && pVillage.getTribe() == currentUserVillage.getTribe()) { if (pVillage.equals(currentUserVillage)) { markerColor = Color.WHITE; } else { markerColor = Color.YELLOW; } } else { if (pVillage.getTribe() != Barbarians.getSingleton()) { tribeMarker = MarkerManager.getSingleton().getMarker(pVillage.getTribe()); if (tribeMarker != null && !tribeMarker.isShownOnMap()) { tribeMarker = null; markerColor = DEFAULT; } if (pVillage.getTribe().getAlly() != BarbarianAlly.getSingleton()) { allyMarker = MarkerManager.getSingleton().getMarker(pVillage.getTribe().getAlly()); if (allyMarker != null && !allyMarker.isShownOnMap()) { allyMarker = null; markerColor = DEFAULT; } } } } Graphics2D g2d = image.createGraphics(); ImageUtils.setupGraphics(g2d); if (markerColor != null || tribeMarker != null || allyMarker != null) { if (tribeMarker != null && allyMarker != null) { //draw two-part marker GeneralPath p = new GeneralPath(); p.moveTo(0, 0); p.lineTo(w, h); p.lineTo(0, h); p.closePath(); g2d.setColor(tribeMarker.getMarkerColor()); g2d.fill(p); p = new GeneralPath(); p.moveTo(0, 0); p.lineTo(w, 0); p.lineTo(w, h); p.closePath(); g2d.setColor(allyMarker.getMarkerColor()); g2d.fill(p); } else if (tribeMarker != null) { //draw tribe marker g2d.setColor(tribeMarker.getMarkerColor()); g2d.fillRect(0, 0, w, h); } else if (allyMarker != null) { //draw ally marker g2d.setColor(allyMarker.getMarkerColor()); g2d.fillRect(0, 0, w, h); } else { //draw misc marker g2d.setColor(markerColor); g2d.fillRect(0, 0, w, h); } } else { if (pVillage.getTribe() != Barbarians.getSingleton()) { if (DEFAULT != null) { //no mark-on-top mode g2d.setColor(DEFAULT); g2d.fillRect(0, 0, w, h); } } else { //barbarian marker g2d.setColor(Color.LIGHT_GRAY); g2d.fillRect(0, 0, w, h); } } g2d.setColor(Color.BLACK); g2d.drawRect(0, 0, w, h); g2d.dispose(); return image; } public void reset() { shouldReset = true; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version * 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package org.apache.storm.daemon.worker; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Queue; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.function.Supplier; import org.apache.storm.Config; import org.apache.storm.Constants; import org.apache.storm.StormTimer; import org.apache.storm.cluster.DaemonType; import org.apache.storm.cluster.IStateStorage; import org.apache.storm.cluster.IStormClusterState; import org.apache.storm.cluster.VersionedData; import org.apache.storm.daemon.StormCommon; import org.apache.storm.daemon.supervisor.AdvancedFSOps; import org.apache.storm.executor.IRunningExecutor; import org.apache.storm.generated.Assignment; import org.apache.storm.generated.DebugOptions; import org.apache.storm.generated.Grouping; import org.apache.storm.generated.InvalidTopologyException; import org.apache.storm.generated.NodeInfo; import org.apache.storm.generated.StormBase; import org.apache.storm.generated.StormTopology; import org.apache.storm.generated.StreamInfo; import org.apache.storm.generated.TopologyStatus; import org.apache.storm.grouping.Load; import org.apache.storm.grouping.LoadMapping; import org.apache.storm.hooks.IWorkerHook; import org.apache.storm.messaging.ConnectionWithStatus; import org.apache.storm.messaging.DeserializingConnectionCallback; import org.apache.storm.messaging.IConnection; import org.apache.storm.messaging.IConnectionCallback; import org.apache.storm.messaging.IContext; import org.apache.storm.messaging.TransportFactory; import org.apache.storm.messaging.netty.BackPressureStatus; import org.apache.storm.metrics2.StormMetricRegistry; import org.apache.storm.policy.IWaitStrategy; import org.apache.storm.security.auth.IAutoCredentials; import org.apache.storm.serialization.ITupleSerializer; import org.apache.storm.serialization.KryoTupleSerializer; import org.apache.storm.shade.com.google.common.collect.ImmutableMap; import org.apache.storm.shade.com.google.common.collect.Sets; import org.apache.storm.shade.org.apache.commons.lang.Validate; import org.apache.storm.task.WorkerTopologyContext; import org.apache.storm.tuple.AddressedTuple; import org.apache.storm.tuple.Fields; import org.apache.storm.utils.ConfigUtils; import org.apache.storm.utils.JCQueue; import org.apache.storm.utils.ObjectReader; import org.apache.storm.utils.SupervisorClient; import org.apache.storm.utils.SupervisorIfaceFactory; import org.apache.storm.utils.ThriftTopologyUtils; import org.apache.storm.utils.Utils; import org.apache.storm.utils.Utils.SmartThread; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class WorkerState { private static final Logger LOG = LoggerFactory.getLogger(WorkerState.class); private static final long LOAD_REFRESH_INTERVAL_MS = 5000L; private static long dropCount = 0; final Map<String, Object> conf; final IContext mqContext; final IConnection receiver; final String topologyId; final String assignmentId; private final Supplier<SupervisorIfaceFactory> supervisorIfaceSupplier; final int port; final String workerId; final IStateStorage stateStorage; final IStormClusterState stormClusterState; // when worker bootup, worker will start to setup initial connections to // other workers. When all connection is ready, we will count down this latch // and spout and bolt will be activated, assuming the topology is not deactivated. // used in worker only, keep it as a latch final CountDownLatch isWorkerActive; final AtomicBoolean isTopologyActive; final AtomicReference<Map<String, DebugOptions>> stormComponentToDebug; // local executors and localTaskIds running in this worker final Set<List<Long>> localExecutors; final ArrayList<Integer> localTaskIds; // [taskId]-> JCQueue : initialized after local executors are initialized final Map<Integer, JCQueue> localReceiveQueues = new HashMap<>(); final Map<String, Object> topologyConf; final StormTopology topology; final StormTopology systemTopology; final Map<Integer, String> taskToComponent; final Map<String, Map<String, Fields>> componentToStreamToFields; final Map<String, List<Integer>> componentToSortedTasks; final ConcurrentMap<String, Long> blobToLastKnownVersion; final ReentrantReadWriteLock endpointSocketLock; final AtomicReference<Map<Integer, NodeInfo>> cachedTaskToNodePort; final AtomicReference<Map<NodeInfo, IConnection>> cachedNodeToPortSocket; // executor id is in form [start_task_id end_task_id] final Map<List<Long>, JCQueue> executorReceiveQueueMap; final Map<Integer, JCQueue> taskToExecutorQueue; final Runnable suicideCallback; final Utils.UptimeComputer uptime; final Map<String, Object> defaultSharedResources; final Map<String, Object> userSharedResources; final LoadMapping loadMapping; final AtomicReference<Map<String, VersionedData<Assignment>>> assignmentVersions; // Timers final StormTimer heartbeatTimer = mkHaltingTimer("heartbeat-timer"); final StormTimer refreshLoadTimer = mkHaltingTimer("refresh-load-timer"); final StormTimer refreshConnectionsTimer = mkHaltingTimer("refresh-connections-timer"); final StormTimer refreshCredentialsTimer = mkHaltingTimer("refresh-credentials-timer"); final StormTimer checkForUpdatedBlobsTimer = mkHaltingTimer("check-for-updated-blobs-timer"); final StormTimer resetLogLevelsTimer = mkHaltingTimer("reset-log-levels-timer"); final StormTimer refreshActiveTimer = mkHaltingTimer("refresh-active-timer"); final StormTimer executorHeartbeatTimer = mkHaltingTimer("executor-heartbeat-timer"); final StormTimer flushTupleTimer = mkHaltingTimer("flush-tuple-timer"); final StormTimer userTimer = mkHaltingTimer("user-timer"); final StormTimer backPressureCheckTimer = mkHaltingTimer("backpressure-check-timer"); private final WorkerTransfer workerTransfer; private final BackPressureTracker bpTracker; private final List<IWorkerHook> deserializedWorkerHooks; // global variables only used internally in class private final Set<Integer> outboundTasks; private final AtomicLong nextLoadUpdate = new AtomicLong(0); private final boolean trySerializeLocal; private final Collection<IAutoCredentials> autoCredentials; private final StormMetricRegistry metricRegistry; public WorkerState(Map<String, Object> conf, IContext mqContext, String topologyId, String assignmentId, Supplier<SupervisorIfaceFactory> supervisorIfaceSupplier, int port, String workerId, Map<String, Object> topologyConf, IStateStorage stateStorage, IStormClusterState stormClusterState, Collection<IAutoCredentials> autoCredentials, StormMetricRegistry metricRegistry) throws IOException, InvalidTopologyException { this.metricRegistry = metricRegistry; this.autoCredentials = autoCredentials; this.conf = conf; this.supervisorIfaceSupplier = supervisorIfaceSupplier; this.localExecutors = new HashSet<>(readWorkerExecutors(stormClusterState, topologyId, assignmentId, port)); this.mqContext = (null != mqContext) ? mqContext : TransportFactory.makeContext(topologyConf); this.topologyId = topologyId; this.assignmentId = assignmentId; this.port = port; this.workerId = workerId; this.stateStorage = stateStorage; this.stormClusterState = stormClusterState; this.isWorkerActive = new CountDownLatch(1); this.isTopologyActive = new AtomicBoolean(false); this.stormComponentToDebug = new AtomicReference<>(); this.executorReceiveQueueMap = mkReceiveQueueMap(topologyConf, localExecutors); this.localTaskIds = new ArrayList<>(); this.taskToExecutorQueue = new HashMap<>(); this.blobToLastKnownVersion = new ConcurrentHashMap<>(); for (Map.Entry<List<Long>, JCQueue> entry : executorReceiveQueueMap.entrySet()) { List<Integer> taskIds = StormCommon.executorIdToTasks(entry.getKey()); for (Integer taskId : taskIds) { this.taskToExecutorQueue.put(taskId, entry.getValue()); } this.localTaskIds.addAll(taskIds); } Collections.sort(localTaskIds); this.topologyConf = topologyConf; this.topology = ConfigUtils.readSupervisorTopology(conf, topologyId, AdvancedFSOps.make(conf)); this.systemTopology = StormCommon.systemTopology(topologyConf, topology); this.taskToComponent = StormCommon.stormTaskInfo(topology, topologyConf); this.componentToStreamToFields = new HashMap<>(); for (String c : ThriftTopologyUtils.getComponentIds(systemTopology)) { Map<String, Fields> streamToFields = new HashMap<>(); for (Map.Entry<String, StreamInfo> stream : ThriftTopologyUtils.getComponentCommon(systemTopology, c).get_streams().entrySet()) { streamToFields.put(stream.getKey(), new Fields(stream.getValue().get_output_fields())); } componentToStreamToFields.put(c, streamToFields); } this.componentToSortedTasks = Utils.reverseMap(taskToComponent); this.componentToSortedTasks.values().forEach(Collections::sort); this.endpointSocketLock = new ReentrantReadWriteLock(); this.cachedNodeToPortSocket = new AtomicReference<>(new HashMap<>()); this.cachedTaskToNodePort = new AtomicReference<>(new HashMap<>()); this.suicideCallback = Utils.mkSuicideFn(); this.uptime = Utils.makeUptimeComputer(); this.defaultSharedResources = makeDefaultResources(); this.userSharedResources = makeUserResources(); this.loadMapping = new LoadMapping(); this.assignmentVersions = new AtomicReference<>(new HashMap<>()); this.outboundTasks = workerOutboundTasks(); this.trySerializeLocal = topologyConf.containsKey(Config.TOPOLOGY_TESTING_ALWAYS_TRY_SERIALIZE) && (Boolean) topologyConf.get(Config.TOPOLOGY_TESTING_ALWAYS_TRY_SERIALIZE); if (trySerializeLocal) { LOG.warn("WILL TRY TO SERIALIZE ALL TUPLES (Turn off {} for production", Config.TOPOLOGY_TESTING_ALWAYS_TRY_SERIALIZE); } int maxTaskId = getMaxTaskId(componentToSortedTasks); this.workerTransfer = new WorkerTransfer(this, topologyConf, maxTaskId); this.bpTracker = new BackPressureTracker(workerId, taskToExecutorQueue); this.deserializedWorkerHooks = deserializeWorkerHooks(); LOG.info("Registering IConnectionCallbacks for {}:{}", assignmentId, port); IConnectionCallback cb = new DeserializingConnectionCallback(topologyConf, getWorkerTopologyContext(), this::transferLocalBatch); Supplier<Object> newConnectionResponse = () -> { BackPressureStatus bpStatus = bpTracker.getCurrStatus(); LOG.info("Sending BackPressure status to new client. BPStatus: {}", bpStatus); return bpStatus; }; this.receiver = this.mqContext.bind(topologyId, port, cb, newConnectionResponse); } private static double getQueueLoad(JCQueue queue) { JCQueue.QueueMetrics queueMetrics = queue.getMetrics(); return ((double) queueMetrics.population()) / queueMetrics.capacity(); } public static boolean isConnectionReady(IConnection connection) { return !(connection instanceof ConnectionWithStatus) || ((ConnectionWithStatus) connection).status() == ConnectionWithStatus.Status.Ready; } private static int getMaxTaskId(Map<String, List<Integer>> componentToSortedTasks) { int maxTaskId = -1; for (List<Integer> integers : componentToSortedTasks.values()) { if (!integers.isEmpty()) { int tempMax = integers.stream().max(Integer::compareTo).get(); if (tempMax > maxTaskId) { maxTaskId = tempMax; } } } return maxTaskId; } public List<IWorkerHook> getDeserializedWorkerHooks() { return deserializedWorkerHooks; } public Map<String, Object> getConf() { return conf; } public IConnection getReceiver() { return receiver; } public String getTopologyId() { return topologyId; } public int getPort() { return port; } public String getWorkerId() { return workerId; } public IStateStorage getStateStorage() { return stateStorage; } public CountDownLatch getIsWorkerActive() { return isWorkerActive; } public AtomicBoolean getIsTopologyActive() { return isTopologyActive; } public AtomicReference<Map<String, DebugOptions>> getStormComponentToDebug() { return stormComponentToDebug; } public Set<List<Long>> getLocalExecutors() { return localExecutors; } public List<Integer> getLocalTaskIds() { return localTaskIds; } public Map<Integer, JCQueue> getLocalReceiveQueues() { return localReceiveQueues; } public Map<String, Object> getTopologyConf() { return topologyConf; } public StormTopology getTopology() { return topology; } public StormTopology getSystemTopology() { return systemTopology; } public Map<Integer, String> getTaskToComponent() { return taskToComponent; } public Map<String, Map<String, Fields>> getComponentToStreamToFields() { return componentToStreamToFields; } public Map<String, List<Integer>> getComponentToSortedTasks() { return componentToSortedTasks; } public Map<String, Long> getBlobToLastKnownVersion() { return blobToLastKnownVersion; } public AtomicReference<Map<NodeInfo, IConnection>> getCachedNodeToPortSocket() { return cachedNodeToPortSocket; } public Map<List<Long>, JCQueue> getExecutorReceiveQueueMap() { return executorReceiveQueueMap; } public Runnable getSuicideCallback() { return suicideCallback; } public Utils.UptimeComputer getUptime() { return uptime; } public Map<String, Object> getDefaultSharedResources() { return defaultSharedResources; } public Map<String, Object> getUserSharedResources() { return userSharedResources; } public LoadMapping getLoadMapping() { return loadMapping; } public AtomicReference<Map<String, VersionedData<Assignment>>> getAssignmentVersions() { return assignmentVersions; } public StormTimer getUserTimer() { return userTimer; } public SmartThread makeTransferThread() { return workerTransfer.makeTransferThread(); } public void refreshConnections() { Assignment assignment = null; try { assignment = getLocalAssignment(stormClusterState, topologyId); } catch (Exception e) { LOG.warn("Failed to read assignment. This should only happen when topology is shutting down.", e); } Set<NodeInfo> neededConnections = new HashSet<>(); Map<Integer, NodeInfo> newTaskToNodePort = new HashMap<>(); if (null != assignment) { Map<Integer, NodeInfo> taskToNodePort = StormCommon.taskToNodeport(assignment.get_executor_node_port()); for (Map.Entry<Integer, NodeInfo> taskToNodePortEntry : taskToNodePort.entrySet()) { Integer task = taskToNodePortEntry.getKey(); if (outboundTasks.contains(task)) { newTaskToNodePort.put(task, taskToNodePortEntry.getValue()); if (!localTaskIds.contains(task)) { neededConnections.add(taskToNodePortEntry.getValue()); } } } } Set<NodeInfo> currentConnections = cachedNodeToPortSocket.get().keySet(); Set<NodeInfo> newConnections = Sets.difference(neededConnections, currentConnections); Set<NodeInfo> removeConnections = Sets.difference(currentConnections, neededConnections); Map<String, String> nodeHost = assignment != null ? assignment.get_node_host() : null; // Add new connections atomically cachedNodeToPortSocket.getAndUpdate(prev -> { Map<NodeInfo, IConnection> next = new HashMap<>(prev); for (NodeInfo nodeInfo : newConnections) { next.put(nodeInfo, mqContext.connect( topologyId, //nodeHost is not null here, as newConnections is only non-empty if assignment was not null above. nodeHost.get(nodeInfo.get_node()), // Host nodeInfo.get_port().iterator().next().intValue(), // Port workerTransfer.getRemoteBackPressureStatus())); } return next; }); try { endpointSocketLock.writeLock().lock(); cachedTaskToNodePort.set(newTaskToNodePort); } finally { endpointSocketLock.writeLock().unlock(); } for (NodeInfo nodeInfo : removeConnections) { cachedNodeToPortSocket.get().get(nodeInfo).close(); } // Remove old connections atomically cachedNodeToPortSocket.getAndUpdate(prev -> { Map<NodeInfo, IConnection> next = new HashMap<>(prev); removeConnections.forEach(next::remove); return next; }); } public void refreshStormActive() { refreshStormActive(() -> refreshActiveTimer.schedule(0, this::refreshStormActive)); } public void refreshStormActive(Runnable callback) { StormBase base = stormClusterState.stormBase(topologyId, callback); isTopologyActive.set( (null != base) && (base.get_status() == TopologyStatus.ACTIVE)); if (null != base) { Map<String, DebugOptions> debugOptionsMap = new HashMap<>(base.get_component_debug()); for (DebugOptions debugOptions : debugOptionsMap.values()) { if (!debugOptions.is_set_samplingpct()) { debugOptions.set_samplingpct(10); } if (!debugOptions.is_set_enable()) { debugOptions.set_enable(false); } } stormComponentToDebug.set(debugOptionsMap); LOG.debug("Events debug options {}", stormComponentToDebug.get()); } } public void refreshLoad(List<IRunningExecutor> execs) { Set<Integer> remoteTasks = Sets.difference(new HashSet<>(outboundTasks), new HashSet<>(localTaskIds)); Map<Integer, Double> localLoad = new HashMap<>(); for (IRunningExecutor exec : execs) { double receiveLoad = getQueueLoad(exec.getReceiveQueue()); localLoad.put(exec.getExecutorId().get(0).intValue(), receiveLoad); } Map<Integer, Load> remoteLoad = new HashMap<>(); cachedNodeToPortSocket.get().values().stream().forEach(conn -> remoteLoad.putAll(conn.getLoad(remoteTasks))); loadMapping.setLocal(localLoad); loadMapping.setRemote(remoteLoad); Long now = System.currentTimeMillis(); if (now > nextLoadUpdate.get()) { receiver.sendLoadMetrics(localLoad); nextLoadUpdate.set(now + LOAD_REFRESH_INTERVAL_MS); } } // checks if the tasks which had back pressure are now free again. if so, sends an update to other workers public void refreshBackPressureStatus() { LOG.debug("Checking for change in Backpressure status on worker's tasks"); boolean bpSituationChanged = bpTracker.refreshBpTaskList(); if (bpSituationChanged) { BackPressureStatus bpStatus = bpTracker.getCurrStatus(); receiver.sendBackPressureStatus(bpStatus); } } /** * we will wait all connections to be ready and then activate the spout/bolt when the worker bootup. */ public void activateWorkerWhenAllConnectionsReady() { int delaySecs = 0; int recurSecs = 1; refreshActiveTimer.schedule(delaySecs, () -> { if (areAllConnectionsReady()) { LOG.info("All connections are ready for worker {}:{} with id {}", assignmentId, port, workerId); isWorkerActive.countDown(); } else { refreshActiveTimer.schedule(recurSecs, () -> activateWorkerWhenAllConnectionsReady(), false, 0); } } ); } /* Not a Blocking call. If cannot emit, will add 'tuple' to pendingEmits and return 'false'. 'pendingEmits' can be null */ public boolean tryTransferRemote(AddressedTuple tuple, Queue<AddressedTuple> pendingEmits, ITupleSerializer serializer) { return workerTransfer.tryTransferRemote(tuple, pendingEmits, serializer); } public void flushRemotes() throws InterruptedException { workerTransfer.flushRemotes(); } public boolean tryFlushRemotes() { return workerTransfer.tryFlushRemotes(); } // Receives msgs from remote workers and feeds them to local executors. If any receiving local executor is under Back Pressure, // informs other workers about back pressure situation. Runs in the NettyWorker thread. private void transferLocalBatch(ArrayList<AddressedTuple> tupleBatch) { int lastOverflowCount = 0; // overflowQ size at the time the last BPStatus was sent for (int i = 0; i < tupleBatch.size(); i++) { AddressedTuple tuple = tupleBatch.get(i); JCQueue queue = taskToExecutorQueue.get(tuple.dest); // 1- try adding to main queue if its overflow is not empty if (queue.isEmptyOverflow()) { if (queue.tryPublish(tuple)) { continue; } } // 2- BP detected (i.e MainQ is full). So try adding to overflow int currOverflowCount = queue.getOverflowCount(); if (bpTracker.recordBackPressure(tuple.dest)) { receiver.sendBackPressureStatus(bpTracker.getCurrStatus()); lastOverflowCount = currOverflowCount; } else { if (currOverflowCount - lastOverflowCount > 10000) { // resend BP status, in case prev notification was missed or reordered BackPressureStatus bpStatus = bpTracker.getCurrStatus(); receiver.sendBackPressureStatus(bpStatus); lastOverflowCount = currOverflowCount; LOG.debug("Re-sent BackPressure Status. OverflowCount = {}, BP Status ID = {}. ", currOverflowCount, bpStatus.id); } } if (!queue.tryPublishToOverflow(tuple)) { dropMessage(tuple, queue); } } } private void dropMessage(AddressedTuple tuple, JCQueue queue) { ++dropCount; queue.recordMsgDrop(); LOG.warn( "Dropping message as overflow threshold has reached for Q = {}. OverflowCount = {}. Total Drop Count= {}, Dropped Message : {}", queue.getName(), queue.getOverflowCount(), dropCount, tuple); } public void checkSerialize(KryoTupleSerializer serializer, AddressedTuple tuple) { if (trySerializeLocal) { serializer.serialize(tuple.getTuple()); } } public final WorkerTopologyContext getWorkerTopologyContext() { try { String codeDir = ConfigUtils.supervisorStormResourcesPath(ConfigUtils.supervisorStormDistRoot(conf, topologyId)); String pidDir = ConfigUtils.workerPidsRoot(conf, topologyId); return new WorkerTopologyContext(systemTopology, topologyConf, taskToComponent, componentToSortedTasks, componentToStreamToFields, topologyId, codeDir, pidDir, port, localTaskIds, defaultSharedResources, userSharedResources, cachedTaskToNodePort, assignmentId); } catch (IOException e) { throw Utils.wrapInRuntime(e); } } private List<IWorkerHook> deserializeWorkerHooks() { List<IWorkerHook> myHookList = new ArrayList<>(); if (topology.is_set_worker_hooks()) { for (ByteBuffer hook : topology.get_worker_hooks()) { byte[] hookBytes = Utils.toByteArray(hook); IWorkerHook hookObject = Utils.javaDeserialize(hookBytes, IWorkerHook.class); myHookList.add(hookObject); } } return myHookList; } public void runWorkerStartHooks() { WorkerTopologyContext workerContext = getWorkerTopologyContext(); for (IWorkerHook hook : getDeserializedWorkerHooks()) { hook.start(topologyConf, workerContext); } } public void runWorkerShutdownHooks() { for (IWorkerHook hook : getDeserializedWorkerHooks()) { hook.shutdown(); } } public void closeResources() { LOG.info("Shutting down default resources"); ((ExecutorService) defaultSharedResources.get(WorkerTopologyContext.SHARED_EXECUTOR)).shutdownNow(); LOG.info("Shut down default resources"); } public boolean areAllConnectionsReady() { return cachedNodeToPortSocket.get().values() .stream() .map(WorkerState::isConnectionReady) .reduce((left, right) -> left && right) .orElse(true); } public Collection<IAutoCredentials> getAutoCredentials() { return this.autoCredentials; } private List<List<Long>> readWorkerExecutors(IStormClusterState stormClusterState, String topologyId, String assignmentId, int port) { LOG.info("Reading assignments"); List<List<Long>> executorsAssignedToThisWorker = new ArrayList<>(); executorsAssignedToThisWorker.add(Constants.SYSTEM_EXECUTOR_ID); Map<List<Long>, NodeInfo> executorToNodePort = getLocalAssignment(stormClusterState, topologyId).get_executor_node_port(); for (Map.Entry<List<Long>, NodeInfo> entry : executorToNodePort.entrySet()) { NodeInfo nodeInfo = entry.getValue(); if (nodeInfo.get_node().equals(assignmentId) && nodeInfo.get_port().iterator().next() == port) { executorsAssignedToThisWorker.add(entry.getKey()); } } return executorsAssignedToThisWorker; } private Assignment getLocalAssignment(IStormClusterState stormClusterState, String topologyId) { try (SupervisorIfaceFactory fac = supervisorIfaceSupplier.get()) { return fac.getIface().getLocalAssignmentForStorm(topologyId); } catch (Throwable e) { //if any error/exception thrown, fetch it from zookeeper Assignment assignment = stormClusterState.remoteAssignmentInfo(topologyId, null); if (assignment == null) { throw new RuntimeException("Failed to read worker assignment." + " Supervisor client threw exception, and assignment in Zookeeper was null", e); } return assignment; } } private Map<List<Long>, JCQueue> mkReceiveQueueMap(Map<String, Object> topologyConf, Set<List<Long>> executors) { Integer recvQueueSize = ObjectReader.getInt(topologyConf.get(Config.TOPOLOGY_EXECUTOR_RECEIVE_BUFFER_SIZE)); Integer recvBatchSize = ObjectReader.getInt(topologyConf.get(Config.TOPOLOGY_PRODUCER_BATCH_SIZE)); Integer overflowLimit = ObjectReader.getInt(topologyConf.get(Config.TOPOLOGY_EXECUTOR_OVERFLOW_LIMIT)); if (recvBatchSize > recvQueueSize / 2) { throw new IllegalArgumentException(Config.TOPOLOGY_PRODUCER_BATCH_SIZE + ":" + recvBatchSize + " is greater than half of " + Config.TOPOLOGY_EXECUTOR_RECEIVE_BUFFER_SIZE + ":" + recvQueueSize); } IWaitStrategy backPressureWaitStrategy = IWaitStrategy.createBackPressureWaitStrategy(topologyConf); Map<List<Long>, JCQueue> receiveQueueMap = new HashMap<>(); for (List<Long> executor : executors) { int port = this.getPort(); receiveQueueMap.put(executor, new JCQueue("receive-queue" + executor.toString(), recvQueueSize, overflowLimit, recvBatchSize, backPressureWaitStrategy, this.getTopologyId(), Constants.SYSTEM_COMPONENT_ID, -1, this.getPort(), metricRegistry)); } return receiveQueueMap; } private Map<String, Object> makeDefaultResources() { int threadPoolSize = ObjectReader.getInt(conf.get(Config.TOPOLOGY_WORKER_SHARED_THREAD_POOL_SIZE)); return ImmutableMap.of(WorkerTopologyContext.SHARED_EXECUTOR, Executors.newFixedThreadPool(threadPoolSize)); } private Map<String, Object> makeUserResources() { /* TODO: need to invoke a hook provided by the topology, giving it a chance to create user resources. * this would be part of the initialization hook * need to separate workertopologycontext into WorkerContext and WorkerUserContext. * actually just do it via interfaces. just need to make sure to hide setResource from tasks */ return new HashMap<>(); } private StormTimer mkHaltingTimer(String name) { return new StormTimer(name, (thread, exception) -> { LOG.error("Error when processing event", exception); Utils.exitProcess(20, "Error when processing an event"); }); } /** * Get worker outbound tasks. * @return seq of task ids that receive messages from this worker */ private Set<Integer> workerOutboundTasks() { WorkerTopologyContext context = getWorkerTopologyContext(); Set<String> components = new HashSet<>(); for (Integer taskId : localTaskIds) { for (Map<String, Grouping> value : context.getTargets(context.getComponentId(taskId)).values()) { components.addAll(value.keySet()); } } Set<Integer> outboundTasks = new HashSet<>(); for (Map.Entry<String, List<Integer>> entry : Utils.reverseMap(taskToComponent).entrySet()) { if (components.contains(entry.getKey())) { outboundTasks.addAll(entry.getValue()); } } return outboundTasks; } public Set<Integer> getOutboundTasks() { return this.outboundTasks; } /** * Check if this worker has remote outbound tasks. * @return true if this worker has remote outbound tasks; false otherwise. */ public boolean hasRemoteOutboundTasks() { Set<Integer> remoteTasks = Sets.difference(new HashSet<>(outboundTasks), new HashSet<>(localTaskIds)); return !remoteTasks.isEmpty(); } /** * If all the tasks are local tasks, the topology has only one worker. * @return true if this worker is the single worker; false otherwise. */ public boolean isSingleWorker() { Set<Integer> nonLocalTasks = Sets.difference(getTaskToComponent().keySet(), new HashSet<>(localTaskIds)); return nonLocalTasks.isEmpty(); } public void haltWorkerTransfer() { workerTransfer.haltTransferThd(); } public JCQueue getTransferQueue() { return workerTransfer.getTransferQueue(); } public StormMetricRegistry getMetricRegistry() { return metricRegistry; } public interface ILocalTransferCallback { void transfer(ArrayList<AddressedTuple> tupleBatch); } }
/** * WRML - Web Resource Modeling Language * __ __ ______ __ __ __ * /\ \ _ \ \ /\ == \ /\ "-./ \ /\ \ * \ \ \/ ".\ \\ \ __< \ \ \-./\ \\ \ \____ * \ \__/".~\_\\ \_\ \_\\ \_\ \ \_\\ \_____\ * \/_/ \/_/ \/_/ /_/ \/_/ \/_/ \/_____/ * * http://www.wrml.org * * Copyright (C) 2011 - 2013 Mark Masse <mark@wrml.org> (OSS project WRML.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wrml.runtime.rest; import com.google.common.collect.ComparisonChain; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.wrml.model.Model; import org.wrml.model.rest.*; import org.wrml.runtime.Context; import org.wrml.runtime.Dimensions; import org.wrml.runtime.Keys; import org.wrml.runtime.schema.*; import org.wrml.runtime.syntax.SyntaxLoader; import java.net.URI; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentSkipListSet; /** * A runtime manifestation of a specific {@link Api}'s specific {@link ResourceTemplate} (REST API URI tree node). */ public class Resource implements Comparable<Resource> { private static final Logger LOGGER = LoggerFactory.getLogger(Resource.class); private static final String TO_STRING_FORMAT = "{\"Resource\" : { \"ResourceTemplateId\" : \"%s\",\"UriTemplate\" : %s,\"FullPath\" : \"%s\"}}"; /** * The {@link ApiNavigator} that owns us. */ private final ApiNavigator _ApiNavigator; private final ResourceTemplate _ResourceTemplate; private final UriTemplate _UriTemplate; private final Resource _ParentResource; private final String _FullPath; private final String _ParentPath; private final ConcurrentHashMap<String, Resource> _LiteralPathSubresources; private final ConcurrentHashMap<String, Resource> _VariablePathSubresources; /** * The ways in which resources may reference us. */ private final ConcurrentHashMap<URI, LinkTemplate> _ReferenceTemplates; private final Set<Method> _ReferenceMethods; private final ConcurrentHashMap<Method, Set<URI>> _ReferenceTemplateMethodToLinkRelationUrisMap; private final ConcurrentHashMap<Method, Set<URI>> _ReferenceTemplateMethodToRequestSchemaUrisMap; private final ConcurrentHashMap<Method, Set<URI>> _ReferenceTemplateMethodToResponseSchemaUriMap; /** * The ways in which we may link to (or reference) resources. */ private final ConcurrentHashMap<URI, LinkTemplate> _LinkTemplates; /** * <p> * The {@link Resource} constructor compiles a "chunk" of the {@link Api} metadata; an individual * {@link ResourceTemplate}. It is part of a runtime-constructed tree structure that represents each URI path '/' as * a hierarchical tree of {@link Resource} nodes. * </p> * <p/> * <p> * If an {@link Api} were a regex input string, and an {@link ApiNavigator} was its corresponding Regex compilation; * then a {@link Resource} would be a subexpression, a nested component within the compiled (optimized) regex. The * {@link Resource} (along with the {@link ApiNavigator}) compile {@link Api} metadata so that it is ready to be * used by the runtime for "pattern matching" (request routing by the framework). * </p> */ Resource(final ApiNavigator apiNavigator, final ResourceTemplate resourceTemplate, final Resource parentResource) { if (apiNavigator == null) { throw new ResourceException("The apiNavigator may not be null", null, this); } if (resourceTemplate == null) { throw new ResourceException("The resource template may not be null", null, this); } _ApiNavigator = apiNavigator; _ResourceTemplate = resourceTemplate; _ParentResource = parentResource; _FullPath = getFullPath(parentResource); if (_ParentResource != null) { _ParentPath = _ParentResource.getPathText(); } else { _ParentPath = null; } final Api api = apiNavigator.getApi(); final Context context = api.getContext(); final ApiLoader apiLoader = context.getApiLoader(); final SyntaxLoader syntaxLoader = context.getSyntaxLoader(); final URI apiUri = api.getUri(); final String uriTemplateString = StringUtils.join(apiUri.toString(), _FullPath); LOGGER.debug("creating resource with uriTemplateString={} and _FullPath={}", uriTemplateString, _FullPath); _UriTemplate = new UriTemplate(syntaxLoader, uriTemplateString); _LiteralPathSubresources = new ConcurrentHashMap<String, Resource>(); _VariablePathSubresources = new ConcurrentHashMap<String, Resource>(); _LinkTemplates = new ConcurrentHashMap<URI, LinkTemplate>(); // The reference templates are API metadata that describe possible "request/link" types that may target this // resource as an endpoint. _ReferenceTemplates = new ConcurrentHashMap<URI, LinkTemplate>(); _ReferenceMethods = Collections.newSetFromMap(new ConcurrentHashMap<Method, Boolean>()); _ReferenceTemplateMethodToLinkRelationUrisMap = new ConcurrentHashMap<Method, Set<URI>>(); _ReferenceTemplateMethodToRequestSchemaUrisMap = new ConcurrentHashMap<Method, Set<URI>>(); _ReferenceTemplateMethodToResponseSchemaUriMap = new ConcurrentHashMap<Method, Set<URI>>(); final UUID resourceTemplateId = _ResourceTemplate.getUniqueId(); final List<LinkTemplate> linkTemplates = api.getLinkTemplates(); for (final LinkTemplate linkTemplate : linkTemplates) { final URI linkRelationUri = linkTemplate.getLinkRelationUri(); if (linkRelationUri == null) { continue; } final UUID endPointId = linkTemplate.getEndPointId(); if (endPointId != null && endPointId.equals(resourceTemplateId)) { _ReferenceTemplates.put(linkRelationUri, linkTemplate); final LinkTemplate reference = linkTemplate; // Each reference has an associate link relation which is it's "metafunction". final SchemaLoader schemaLoader = context.getSchemaLoader(); final URI documentSchemaUriConstant = schemaLoader.getDocumentSchemaUri(); final Keys relKeys = apiLoader.buildDocumentKeys(linkRelationUri, schemaLoader.getLinkRelationSchemaUri()); final Dimensions relDimensions = apiNavigator.getLinkRelationDimensions(); final LinkRelation rel = context.getModel(relKeys, relDimensions); if (rel == null) { throw new ResourceException("The link relation: " + linkRelationUri + " was not found", null, this); } // The interaction method associated with the link relation matches the parameter. final Method requestMethod = rel.getMethod(); _ReferenceMethods.add(requestMethod); if (!_ReferenceTemplateMethodToLinkRelationUrisMap.containsKey(requestMethod)) { _ReferenceTemplateMethodToLinkRelationUrisMap.put(requestMethod, new LinkedHashSet<URI>()); } final Set<URI> linkRelationUris = _ReferenceTemplateMethodToLinkRelationUrisMap.get(requestMethod); linkRelationUris.add(linkRelationUri); if (!_ReferenceTemplateMethodToRequestSchemaUrisMap.containsKey(requestMethod)) { _ReferenceTemplateMethodToRequestSchemaUrisMap.put(requestMethod, new LinkedHashSet<URI>()); } final Set<URI> requestSchemaUris = _ReferenceTemplateMethodToRequestSchemaUrisMap.get(requestMethod); // The API's reference template may have defined its own API-specific argument type final URI referenceRequestSchemaUri = reference.getRequestSchemaUri(); if (referenceRequestSchemaUri != null) { requestSchemaUris.add(referenceRequestSchemaUri); } // The reference's link relation may have defined a generic, reusable argument type final URI relRequestSchemaUri = rel.getRequestSchemaUri(); if (relRequestSchemaUri != null && !documentSchemaUriConstant.equals(relRequestSchemaUri)) { requestSchemaUris.add(relRequestSchemaUri); } if (!_ReferenceTemplateMethodToResponseSchemaUriMap.containsKey(requestMethod)) { _ReferenceTemplateMethodToResponseSchemaUriMap.put(requestMethod, new LinkedHashSet<URI>()); } final Set<URI> responseSchemaUris = _ReferenceTemplateMethodToResponseSchemaUriMap.get(requestMethod); // The API's reference template may have defined its own API-specific response type final URI referenceResponseSchemaUri = reference.getResponseSchemaUri(); if (referenceResponseSchemaUri != null) { responseSchemaUris.add(referenceResponseSchemaUri); } // The reference's link relation may have defined a generic, reusable response type final URI relResponseSchemaUri = rel.getResponseSchemaUri(); if (relResponseSchemaUri != null && !documentSchemaUriConstant.equals(relResponseSchemaUri)) { responseSchemaUris.add(relResponseSchemaUri); } } final UUID referrerId = linkTemplate.getReferrerId(); if (referrerId != null && referrerId.equals(resourceTemplateId)) { _LinkTemplates.put(linkRelationUri, linkTemplate); } } } private final String getFullPath(final Resource parentResource) { final StringBuffer sb = new StringBuffer(); boolean appendPathSeparator = true; if (parentResource != null && parentResource.getPathText() != null) { final String text = parentResource.getPathText(); sb.append(text); if (text.endsWith(ApiNavigator.PATH_SEPARATOR)) { appendPathSeparator = false; } } if (appendPathSeparator) { sb.append(ApiNavigator.PATH_SEPARATOR); } final String pathSegment = getPathSegment(); if (StringUtils.isNotEmpty(pathSegment)) { sb.append(pathSegment); } return sb.toString(); } /** * @return a flattened {@link List} of all child and sub-child {@link Resource}s (recursive). */ public List<Resource> getAllChildResources() { final List<Resource> allChildResources = new LinkedList<>(); final List<ResourceTemplate> childResourceTemplates = this._ResourceTemplate.getChildren(); for (final ResourceTemplate childResourceTemplate : childResourceTemplates) { final Resource childResource = this._ApiNavigator.getResource(childResourceTemplate.getUniqueId()); if (childResource != null) { allChildResources.add(childResource); allChildResources.addAll(childResource.getAllChildResources()); } } return allChildResources; } /** * @return the {@link ApiNavigator} that owns this {@link Resource}. */ public ApiNavigator getApiNavigator() { return _ApiNavigator; } public URI getDefaultDocumentUri() { final UriTemplate uriTemplate = getUriTemplate(); final String[] parameterNames = uriTemplate.getParameterNames(); final Map<String, Object> parameterMap = new LinkedHashMap<>(); if (parameterNames != null && parameterNames.length > 0) { final Api api = getApiNavigator().getApi(); final Context context = api.getContext(); final SchemaLoader schemaLoader = context.getSchemaLoader(); final URI defaultSchemaUri = getDefaultSchemaUri(); final Prototype defaultPrototype = (defaultSchemaUri != null) ? schemaLoader.getPrototype(defaultSchemaUri) : null; for (int i = 0; i < parameterNames.length; i++) { final String parameterName = parameterNames[i]; URI keyedSchemaUri = null; if (defaultPrototype != null) { final Set<String> allKeySlotNames = defaultPrototype.getAllKeySlotNames(); if (allKeySlotNames != null && allKeySlotNames.contains(parameterName)) { keyedSchemaUri = defaultSchemaUri; } } if (keyedSchemaUri == null) { final ConcurrentHashMap<URI, LinkTemplate> referenceTemplates = getReferenceTemplates(); if (referenceTemplates != null && !referenceTemplates.isEmpty()) { final Set<URI> referenceLinkRelationUris = getReferenceLinkRelationUris(Method.Get); if (referenceLinkRelationUris != null && !referenceLinkRelationUris.isEmpty()) { for (URI linkRelationUri : referenceLinkRelationUris) { final LinkTemplate referenceTemplate = referenceTemplates.get(linkRelationUri); final URI responseSchemaUri = referenceTemplate.getResponseSchemaUri(); final Prototype responseSchemaPrototype = schemaLoader.getPrototype(responseSchemaUri); if (responseSchemaPrototype != null) { final Set<String> allKeySlotNames = responseSchemaPrototype.getAllKeySlotNames(); if (allKeySlotNames != null && allKeySlotNames.contains(parameterName)) { keyedSchemaUri = responseSchemaUri; break; } } } } } } Object defaultValue = null; if (keyedSchemaUri != null) { final Prototype keyedPrototype = schemaLoader.getPrototype(keyedSchemaUri); final ProtoSlot keyProtoSlot = keyedPrototype.getProtoSlot(parameterName); if (keyProtoSlot instanceof PropertyProtoSlot) { final PropertyProtoSlot keyPropertyProtoSlot = (PropertyProtoSlot) keyProtoSlot; // TODO: Allow more fine grain control of the default parameter value defaultValue = keyPropertyProtoSlot.getDefaultValue(); if (defaultValue == null) { defaultValue = keyPropertyProtoSlot.getValueType().getDefaultValue(); } } } parameterMap.put(parameterName, defaultValue); } } return uriTemplate.evaluate(parameterMap, true); } public URI getDefaultSchemaUri() { return getResourceTemplate().getDefaultSchemaUri(); } public URI getDocumentUri(final Document document) { final UriTemplate uriTemplate = getUriTemplate(); final String[] parameterNames = uriTemplate.getParameterNames(); if (parameterNames == null) { return uriTemplate.evaluate(null); } else { final Map<String, Object> parameterMap = new LinkedHashMap<>(); for (final String parameterName : parameterNames) { if (!document.containsSlotValue(parameterName)) { return null; } Object parameterValue = document.getSlotValue(parameterName); if (parameterValue == null) { return null; } parameterMap.put(parameterName, parameterValue); } return uriTemplate.evaluate(parameterMap); } } /** * A mapping of {@link LinkRelation} id ({@link URI}) to {@link LinkTemplate} model instance. This (conceptual) set * of {@link LinkTemplate}s represent the ways in which we may link to (reference) resources. * * @return A map of link relation id to link template, which communicates the ways that this {@link Resource} may * reference other {@link Resource}s. */ public ConcurrentHashMap<URI, LinkTemplate> getLinkTemplates() { return _LinkTemplates; } public ConcurrentHashMap<String, Resource> getLiteralPathSubresources() { return _LiteralPathSubresources; } public Resource getParentResource() { return _ParentResource; } public List<Resource> getPath(final boolean includeDocroot) { final List<Resource> path = new ArrayList<>(); Resource parent = getParentResource(); if (!includeDocroot && parent == null) { return path; } path.add(this); if (parent == null) { return path; } while (parent != null) { path.add(parent); parent = parent.getParentResource(); } if (!includeDocroot) { path.remove(path.size() - 1); } Collections.reverse(path); return path; } public String getPathSegment() { return this._ResourceTemplate.getPathSegment(); } public String getPathText() { return _FullPath; } public String getParentPathText() { return _ParentPath; } public Set<URI> getReferenceLinkRelationUris(final Method requestMethod) { return _ReferenceTemplateMethodToLinkRelationUrisMap.get(requestMethod); } /** * A mapping of {@link LinkRelation} id ({@link URI}) to {@link LinkTemplate} model instance. This (conceptual) set * of {@link LinkTemplate}s represent the ways in which other resources may link to (reference) us. * * @return The ways that we may be referenced by other resources. */ public ConcurrentHashMap<URI, LinkTemplate> getReferenceTemplates() { return _ReferenceTemplates; } public Set<Method> getReferenceMethods() { return _ReferenceMethods; } public Set<URI> getRequestSchemaUris(final Method requestMethod) { return _ReferenceTemplateMethodToRequestSchemaUrisMap.get(requestMethod); } public ResourceTemplate getResourceTemplate() { return _ResourceTemplate; } public UUID getResourceTemplateId() { if (_ResourceTemplate == null) { return null; } return _ResourceTemplate.getUniqueId(); } public Set<URI> getResponseSchemaUris(final Method requestMethod) { return _ReferenceTemplateMethodToResponseSchemaUriMap.get(requestMethod); } /** * Generates the "href" URI used to refer to this resource from the specified referrer {@link Model} instance using * the specified {@link LinkRelation} {@link URI} value. */ public URI getHrefUri(final Model referrer, final URI referenceRelationUri) { if (referrer == null) { return null; } /* * Given the nature of the Api's ResourceTemplate metadata tree, the runtime resource can determine its own * UriTemplate (and it only needs to determine/compute this once). */ final UriTemplate uriTemplate = getUriTemplate(); if (uriTemplate == null) { return null; } /* * Get the end point id's templated parameter names, for example: a UriTemplate might have slots that look like * {teamId} or {highScoreId} or {name} appearing where legal (according to UriTemplate syntax, see: * http://tools.ietf.org/html/rfc6570). A fixed URI, meaning a UriTemplate with no variables, will return null * here. */ final String[] uriTemplateParameterNames = this._UriTemplate.getParameterNames(); Map<String, Object> parameterMap = null; if (uriTemplateParameterNames != null && uriTemplateParameterNames.length > 0) { // Get the Link slot's bindings, which may be used to provide an alternative source for one or more URI // template parameter values. final Prototype referrerPrototype = referrer.getPrototype(); final SortedMap<URI, LinkProtoSlot> linkProtoSlots = referrerPrototype.getLinkProtoSlots(); Map<String, ProtoValueSource> linkSlotBindings = null; if (linkProtoSlots != null && !linkProtoSlots.isEmpty()) { final LinkProtoSlot linkProtoSlot = linkProtoSlots.get(referenceRelationUri); if (linkProtoSlot != null) { linkSlotBindings = linkProtoSlot.getLinkSlotBindings(); } } parameterMap = new LinkedHashMap<>(uriTemplateParameterNames.length); for (final String paramName : uriTemplateParameterNames) { final Object paramValue; if (linkSlotBindings != null && linkSlotBindings.containsKey(paramName)) { // The link slot has declared a binding to an alternate source for this URI template parameter's // value. final ProtoValueSource valueSource = linkSlotBindings.get(paramName); paramValue = valueSource.getValue(referrer); } else { // By default, if the end point's UriTemplate has parameters (blanks) to fill in, then by convention // we // assume that the referrer model has the corresponding slot values to match the UriTemplate's // inputs/slots. // // Put simply, (by default) referrer model slot names "match" UriTemplate param names. // // This enforces that the model's own represented resource state is the only thing used to // (automatically) drive the link-based graph traversal (aka HATEOAS). Its also a simple convention // that // is (reasonably) easy to comprehend, hopefully even intuitive. paramValue = referrer.getSlotValue(paramName); } parameterMap.put(paramName, paramValue); } } final URI uri = this._UriTemplate.evaluate(parameterMap); return uri; } public UriTemplate getUriTemplate() { return _UriTemplate; } /** * @return a sorted map of variable path child resources (i.e. {keySlotName}) */ public ConcurrentHashMap<String, Resource> getVariablePathSubresources() { return _VariablePathSubresources; } public boolean isDocroot() { return (getParentResource() == null); } /** * Adds a resource to this resource's list of subresources, differentiating based on its inclusion of the { * character whether it's a literal or variable subresource * * @param subresource */ void addSubresource(final Resource subresource) { final String pathSegment = subresource.getPathSegment(); if (StringUtils.containsAny(pathSegment, '{')) { addVariablePathSubresource(pathSegment, subresource); } else { addLiteralPathSubresource(pathSegment, subresource); } } private void addLiteralPathSubresource(final String pathSegment, final Resource subresource) { _LiteralPathSubresources.put(pathSegment, subresource); } private void addVariablePathSubresource(final String pathSegment, final Resource subresource) { _VariablePathSubresources.put(pathSegment, subresource); } @Override public String toString() { return String.format(TO_STRING_FORMAT, getResourceTemplateId(), _UriTemplate, _FullPath); } @Override public int compareTo(final Resource otherResource) { return ComparisonChain.start().compare(this._FullPath, otherResource._FullPath).result(); } public SortedSet<Parameter> getSurrogateKeyComponents(final URI uri, final Prototype prototype) { final Set<URI> responseSchemaUris = getResponseSchemaUris(Method.Get); if (responseSchemaUris == null) { return null; } boolean isCompatibleResource = false; for (final URI responseSchemaUri : responseSchemaUris) { if (prototype.isAssignableFrom(responseSchemaUri)) { isCompatibleResource = true; break; } } if (!isCompatibleResource) { return null; } final UriTemplate uriTemplate = getUriTemplate(); return uriTemplate.getParameters(uri); } }
/* * Copyright (c) 1997, 2006, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package java.awt.dnd; import java.awt.Component; import java.awt.Cursor; import java.awt.GraphicsEnvironment; import java.awt.HeadlessException; import java.awt.Image; import java.awt.Point; import java.awt.Toolkit; import java.awt.datatransfer.FlavorMap; import java.awt.datatransfer.SystemFlavorMap; import java.awt.datatransfer.Transferable; import java.awt.dnd.peer.DragSourceContextPeer; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.security.AccessController; import java.util.EventListener; import sun.awt.dnd.SunDragSourceContextPeer; import sun.security.action.GetIntegerAction; /** * The <code>DragSource</code> is the entity responsible * for the initiation of the Drag * and Drop operation, and may be used in a number of scenarios: * <UL> * <LI>1 default instance per JVM for the lifetime of that JVM. * <LI>1 instance per class of potential Drag Initiator object (e.g * TextField). [implementation dependent] * <LI>1 per instance of a particular * <code>Component</code>, or application specific * object associated with a <code>Component</code> * instance in the GUI. [implementation dependent] * <LI>Some other arbitrary association. [implementation dependent] *</UL> * * Once the <code>DragSource</code> is * obtained, a <code>DragGestureRecognizer</code> should * also be obtained to associate the <code>DragSource</code> * with a particular * <code>Component</code>. * <P> * The initial interpretation of the user's gesture, * and the subsequent starting of the drag operation * are the responsibility of the implementing * <code>Component</code>, which is usually * implemented by a <code>DragGestureRecognizer</code>. *<P> * When a drag gesture occurs, the * <code>DragSource</code>'s * startDrag() method shall be * invoked in order to cause processing * of the user's navigational * gestures and delivery of Drag and Drop * protocol notifications. A * <code>DragSource</code> shall only * permit a single Drag and Drop operation to be * current at any one time, and shall * reject any further startDrag() requests * by throwing an <code>IllegalDnDOperationException</code> * until such time as the extant operation is complete. * <P> * The startDrag() method invokes the * createDragSourceContext() method to * instantiate an appropriate * <code>DragSourceContext</code> * and associate the <code>DragSourceContextPeer</code> * with that. * <P> * If the Drag and Drop System is * unable to initiate a drag operation for * some reason, the startDrag() method throws * a <code>java.awt.dnd.InvalidDnDOperationException</code> * to signal such a condition. Typically this * exception is thrown when the underlying platform * system is either not in a state to * initiate a drag, or the parameters specified are invalid. * <P> * Note that during the drag, the * set of operations exposed by the source * at the start of the drag operation may not change * until the operation is complete. * The operation(s) are constant for the * duration of the operation with respect to the * <code>DragSource</code>. * * @since 1.2 */ public class DragSource implements Serializable { private static final long serialVersionUID = 6236096958971414066L; /* * load a system default cursor */ private static Cursor load(String name) { if (GraphicsEnvironment.isHeadless()) { return null; } try { return (Cursor)Toolkit.getDefaultToolkit().getDesktopProperty(name); } catch (Exception e) { e.printStackTrace(); throw new RuntimeException("failed to load system cursor: " + name + " : " + e.getMessage()); } } /** * The default <code>Cursor</code> to use with a copy operation indicating * that a drop is currently allowed. <code>null</code> if * <code>GraphicsEnvironment.isHeadless()</code> returns <code>true</code>. * * @see java.awt.GraphicsEnvironment#isHeadless */ public static final Cursor DefaultCopyDrop = load("DnD.Cursor.CopyDrop"); /** * The default <code>Cursor</code> to use with a move operation indicating * that a drop is currently allowed. <code>null</code> if * <code>GraphicsEnvironment.isHeadless()</code> returns <code>true</code>. * * @see java.awt.GraphicsEnvironment#isHeadless */ public static final Cursor DefaultMoveDrop = load("DnD.Cursor.MoveDrop"); /** * The default <code>Cursor</code> to use with a link operation indicating * that a drop is currently allowed. <code>null</code> if * <code>GraphicsEnvironment.isHeadless()</code> returns <code>true</code>. * * @see java.awt.GraphicsEnvironment#isHeadless */ public static final Cursor DefaultLinkDrop = load("DnD.Cursor.LinkDrop"); /** * The default <code>Cursor</code> to use with a copy operation indicating * that a drop is currently not allowed. <code>null</code> if * <code>GraphicsEnvironment.isHeadless()</code> returns <code>true</code>. * * @see java.awt.GraphicsEnvironment#isHeadless */ public static final Cursor DefaultCopyNoDrop = load("DnD.Cursor.CopyNoDrop"); /** * The default <code>Cursor</code> to use with a move operation indicating * that a drop is currently not allowed. <code>null</code> if * <code>GraphicsEnvironment.isHeadless()</code> returns <code>true</code>. * * @see java.awt.GraphicsEnvironment#isHeadless */ public static final Cursor DefaultMoveNoDrop = load("DnD.Cursor.MoveNoDrop"); /** * The default <code>Cursor</code> to use with a link operation indicating * that a drop is currently not allowed. <code>null</code> if * <code>GraphicsEnvironment.isHeadless()</code> returns <code>true</code>. * * @see java.awt.GraphicsEnvironment#isHeadless */ public static final Cursor DefaultLinkNoDrop = load("DnD.Cursor.LinkNoDrop"); private static final DragSource dflt = (GraphicsEnvironment.isHeadless()) ? null : new DragSource(); /** * Internal constants for serialization. */ static final String dragSourceListenerK = "dragSourceL"; static final String dragSourceMotionListenerK = "dragSourceMotionL"; /** * Gets the <code>DragSource</code> object associated with * the underlying platform. * * @return the platform DragSource * @exception HeadlessException if GraphicsEnvironment.isHeadless() * returns true * @see java.awt.GraphicsEnvironment#isHeadless */ public static DragSource getDefaultDragSource() { if (GraphicsEnvironment.isHeadless()) { throw new HeadlessException(); } else { return dflt; } } /** * Reports * whether or not drag * <code>Image</code> support * is available on the underlying platform. * <P> * @return if the Drag Image support is available on this platform */ public static boolean isDragImageSupported() { Toolkit t = Toolkit.getDefaultToolkit(); Boolean supported; try { supported = (Boolean)Toolkit.getDefaultToolkit().getDesktopProperty("DnD.isDragImageSupported"); return supported.booleanValue(); } catch (Exception e) { return false; } } /** * Creates a new <code>DragSource</code>. * * @exception HeadlessException if GraphicsEnvironment.isHeadless() * returns true * @see java.awt.GraphicsEnvironment#isHeadless */ public DragSource() throws HeadlessException { if (GraphicsEnvironment.isHeadless()) { throw new HeadlessException(); } } /** * Start a drag, given the <code>DragGestureEvent</code> * that initiated the drag, the initial * <code>Cursor</code> to use, * the <code>Image</code> to drag, * the offset of the <code>Image</code> origin * from the hotspot of the <code>Cursor</code> at * the instant of the trigger, * the <code>Transferable</code> subject data * of the drag, the <code>DragSourceListener</code>, * and the <code>FlavorMap</code>. * <P> * @param trigger the <code>DragGestureEvent</code> that initiated the drag * @param dragCursor the initial {@code Cursor} for this drag operation * or {@code null} for the default cursor handling; * see <a href="DragSourceContext.html#defaultCursor">DragSourceContext</a> * for more details on the cursor handling mechanism during drag and drop * @param dragImage the image to drag or {@code null} * @param imageOffset the offset of the <code>Image</code> origin from the hotspot * of the <code>Cursor</code> at the instant of the trigger * @param transferable the subject data of the drag * @param dsl the <code>DragSourceListener</code> * @param flavorMap the <code>FlavorMap</code> to use, or <code>null</code> * <P> * @throws java.awt.dnd.InvalidDnDOperationException * if the Drag and Drop * system is unable to initiate a drag operation, or if the user * attempts to start a drag while an existing drag operation * is still executing */ public void startDrag(DragGestureEvent trigger, Cursor dragCursor, Image dragImage, Point imageOffset, Transferable transferable, DragSourceListener dsl, FlavorMap flavorMap) throws InvalidDnDOperationException { SunDragSourceContextPeer.setDragDropInProgress(true); try { if (flavorMap != null) this.flavorMap = flavorMap; DragSourceContextPeer dscp = Toolkit.getDefaultToolkit().createDragSourceContextPeer(trigger); DragSourceContext dsc = createDragSourceContext(dscp, trigger, dragCursor, dragImage, imageOffset, transferable, dsl ); if (dsc == null) { throw new InvalidDnDOperationException(); } dscp.startDrag(dsc, dsc.getCursor(), dragImage, imageOffset); // may throw } catch (RuntimeException e) { SunDragSourceContextPeer.setDragDropInProgress(false); throw e; } } /** * Start a drag, given the <code>DragGestureEvent</code> * that initiated the drag, the initial * <code>Cursor</code> to use, * the <code>Transferable</code> subject data * of the drag, the <code>DragSourceListener</code>, * and the <code>FlavorMap</code>. * <P> * @param trigger the <code>DragGestureEvent</code> that * initiated the drag * @param dragCursor the initial {@code Cursor} for this drag operation * or {@code null} for the default cursor handling; * see <a href="DragSourceContext.html#defaultCursor">DragSourceContext</a> * for more details on the cursor handling mechanism during drag and drop * @param transferable the subject data of the drag * @param dsl the <code>DragSourceListener</code> * @param flavorMap the <code>FlavorMap</code> to use or <code>null</code> * <P> * @throws java.awt.dnd.InvalidDnDOperationException * if the Drag and Drop * system is unable to initiate a drag operation, or if the user * attempts to start a drag while an existing drag operation * is still executing */ public void startDrag(DragGestureEvent trigger, Cursor dragCursor, Transferable transferable, DragSourceListener dsl, FlavorMap flavorMap) throws InvalidDnDOperationException { startDrag(trigger, dragCursor, null, null, transferable, dsl, flavorMap); } /** * Start a drag, given the <code>DragGestureEvent</code> * that initiated the drag, the initial <code>Cursor</code> * to use, * the <code>Image</code> to drag, * the offset of the <code>Image</code> origin * from the hotspot of the <code>Cursor</code> * at the instant of the trigger, * the subject data of the drag, and * the <code>DragSourceListener</code>. * <P> * @param trigger the <code>DragGestureEvent</code> that initiated the drag * @param dragCursor the initial {@code Cursor} for this drag operation * or {@code null} for the default cursor handling; * see <a href="DragSourceContext.html#defaultCursor">DragSourceContext</a> * for more details on the cursor handling mechanism during drag and drop * @param dragImage the <code>Image</code> to drag or <code>null</code> * @param dragOffset the offset of the <code>Image</code> origin from the hotspot * of the <code>Cursor</code> at the instant of the trigger * @param transferable the subject data of the drag * @param dsl the <code>DragSourceListener</code> * <P> * @throws java.awt.dnd.InvalidDnDOperationException * if the Drag and Drop * system is unable to initiate a drag operation, or if the user * attempts to start a drag while an existing drag operation * is still executing */ public void startDrag(DragGestureEvent trigger, Cursor dragCursor, Image dragImage, Point dragOffset, Transferable transferable, DragSourceListener dsl) throws InvalidDnDOperationException { startDrag(trigger, dragCursor, dragImage, dragOffset, transferable, dsl, null); } /** * Start a drag, given the <code>DragGestureEvent</code> * that initiated the drag, the initial * <code>Cursor</code> to * use, * the <code>Transferable</code> subject data * of the drag, and the <code>DragSourceListener</code>. * <P> * @param trigger the <code>DragGestureEvent</code> that initiated the drag * @param dragCursor the initial {@code Cursor} for this drag operation * or {@code null} for the default cursor handling; * see <a href="DragSourceContext.html#defaultCursor">DragSourceContext</a> class * for more details on the cursor handling mechanism during drag and drop * @param transferable the subject data of the drag * @param dsl the <code>DragSourceListener</code> * <P> * @throws java.awt.dnd.InvalidDnDOperationException * if the Drag and Drop * system is unable to initiate a drag operation, or if the user * attempts to start a drag while an existing drag operation * is still executing */ public void startDrag(DragGestureEvent trigger, Cursor dragCursor, Transferable transferable, DragSourceListener dsl) throws InvalidDnDOperationException { startDrag(trigger, dragCursor, null, null, transferable, dsl, null); } /** * Creates the {@code DragSourceContext} to handle the current drag * operation. * <p> * To incorporate a new <code>DragSourceContext</code> * subclass, subclass <code>DragSource</code> and * override this method. * <p> * If <code>dragImage</code> is <code>null</code>, no image is used * to represent the drag over feedback for this drag operation, but * <code>NullPointerException</code> is not thrown. * <p> * If <code>dsl</code> is <code>null</code>, no drag source listener * is registered with the created <code>DragSourceContext</code>, * but <code>NullPointerException</code> is not thrown. * * @param dscp The <code>DragSourceContextPeer</code> for this drag * @param dgl The <code>DragGestureEvent</code> that triggered the * drag * @param dragCursor The initial {@code Cursor} for this drag operation * or {@code null} for the default cursor handling; * see <a href="DragSourceContext.html#defaultCursor">DragSourceContext</a> class * for more details on the cursor handling mechanism during drag and drop * @param dragImage The <code>Image</code> to drag or <code>null</code> * @param imageOffset The offset of the <code>Image</code> origin from the * hotspot of the cursor at the instant of the trigger * @param t The subject data of the drag * @param dsl The <code>DragSourceListener</code> * * @return the <code>DragSourceContext</code> * * @throws NullPointerException if <code>dscp</code> is <code>null</code> * @throws NullPointerException if <code>dgl</code> is <code>null</code> * @throws NullPointerException if <code>dragImage</code> is not * <code>null</code> and <code>imageOffset</code> is <code>null</code> * @throws NullPointerException if <code>t</code> is <code>null</code> * @throws IllegalArgumentException if the <code>Component</code> * associated with the trigger event is <code>null</code>. * @throws IllegalArgumentException if the <code>DragSource</code> for the * trigger event is <code>null</code>. * @throws IllegalArgumentException if the drag action for the * trigger event is <code>DnDConstants.ACTION_NONE</code>. * @throws IllegalArgumentException if the source actions for the * <code>DragGestureRecognizer</code> associated with the trigger * event are equal to <code>DnDConstants.ACTION_NONE</code>. */ protected DragSourceContext createDragSourceContext(DragSourceContextPeer dscp, DragGestureEvent dgl, Cursor dragCursor, Image dragImage, Point imageOffset, Transferable t, DragSourceListener dsl) { return new DragSourceContext(dscp, dgl, dragCursor, dragImage, imageOffset, t, dsl); } /** * This method returns the * <code>FlavorMap</code> for this <code>DragSource</code>. * <P> * @return the <code>FlavorMap</code> for this <code>DragSource</code> */ public FlavorMap getFlavorMap() { return flavorMap; } /** * Creates a new <code>DragGestureRecognizer</code> * that implements the specified * abstract subclass of * <code>DragGestureRecognizer</code>, and * sets the specified <code>Component</code> * and <code>DragGestureListener</code> on * the newly created object. * <P> * @param recognizerAbstractClass the requested abstract type * @param actions the permitted source drag actions * @param c the <code>Component</code> target * @param dgl the <code>DragGestureListener</code> to notify * <P> * @return the new <code>DragGestureRecognizer</code> or <code>null</code> * if the <code>Toolkit.createDragGestureRecognizer</code> method * has no implementation available for * the requested <code>DragGestureRecognizer</code> * subclass and returns <code>null</code> */ public <T extends DragGestureRecognizer> T createDragGestureRecognizer(Class<T> recognizerAbstractClass, Component c, int actions, DragGestureListener dgl) { return Toolkit.getDefaultToolkit().createDragGestureRecognizer(recognizerAbstractClass, this, c, actions, dgl); } /** * Creates a new <code>DragGestureRecognizer</code> * that implements the default * abstract subclass of <code>DragGestureRecognizer</code> * for this <code>DragSource</code>, * and sets the specified <code>Component</code> * and <code>DragGestureListener</code> on the * newly created object. * * For this <code>DragSource</code> * the default is <code>MouseDragGestureRecognizer</code>. * <P> * @param c the <code>Component</code> target for the recognizer * @param actions the permitted source actions * @param dgl the <code>DragGestureListener</code> to notify * <P> * @return the new <code>DragGestureRecognizer</code> or <code>null</code> * if the <code>Toolkit.createDragGestureRecognizer</code> method * has no implementation available for * the requested <code>DragGestureRecognizer</code> * subclass and returns <code>null</code> */ public DragGestureRecognizer createDefaultDragGestureRecognizer(Component c, int actions, DragGestureListener dgl) { return Toolkit.getDefaultToolkit().createDragGestureRecognizer(MouseDragGestureRecognizer.class, this, c, actions, dgl); } /** * Adds the specified <code>DragSourceListener</code> to this * <code>DragSource</code> to receive drag source events during drag * operations intiated with this <code>DragSource</code>. * If a <code>null</code> listener is specified, no action is taken and no * exception is thrown. * * @param dsl the <code>DragSourceListener</code> to add * * @see #removeDragSourceListener * @see #getDragSourceListeners * @since 1.4 */ public void addDragSourceListener(DragSourceListener dsl) { if (dsl != null) { synchronized (this) { listener = DnDEventMulticaster.add(listener, dsl); } } } /** * Removes the specified <code>DragSourceListener</code> from this * <code>DragSource</code>. * If a <code>null</code> listener is specified, no action is taken and no * exception is thrown. * If the listener specified by the argument was not previously added to * this <code>DragSource</code>, no action is taken and no exception * is thrown. * * @param dsl the <code>DragSourceListener</code> to remove * * @see #addDragSourceListener * @see #getDragSourceListeners * @since 1.4 */ public void removeDragSourceListener(DragSourceListener dsl) { if (dsl != null) { synchronized (this) { listener = DnDEventMulticaster.remove(listener, dsl); } } } /** * Gets all the <code>DragSourceListener</code>s * registered with this <code>DragSource</code>. * * @return all of this <code>DragSource</code>'s * <code>DragSourceListener</code>s or an empty array if no * such listeners are currently registered * * @see #addDragSourceListener * @see #removeDragSourceListener * @since 1.4 */ public DragSourceListener[] getDragSourceListeners() { return (DragSourceListener[])getListeners(DragSourceListener.class); } /** * Adds the specified <code>DragSourceMotionListener</code> to this * <code>DragSource</code> to receive drag motion events during drag * operations intiated with this <code>DragSource</code>. * If a <code>null</code> listener is specified, no action is taken and no * exception is thrown. * * @param dsml the <code>DragSourceMotionListener</code> to add * * @see #removeDragSourceMotionListener * @see #getDragSourceMotionListeners * @since 1.4 */ public void addDragSourceMotionListener(DragSourceMotionListener dsml) { if (dsml != null) { synchronized (this) { motionListener = DnDEventMulticaster.add(motionListener, dsml); } } } /** * Removes the specified <code>DragSourceMotionListener</code> from this * <code>DragSource</code>. * If a <code>null</code> listener is specified, no action is taken and no * exception is thrown. * If the listener specified by the argument was not previously added to * this <code>DragSource</code>, no action is taken and no exception * is thrown. * * @param dsml the <code>DragSourceMotionListener</code> to remove * * @see #addDragSourceMotionListener * @see #getDragSourceMotionListeners * @since 1.4 */ public void removeDragSourceMotionListener(DragSourceMotionListener dsml) { if (dsml != null) { synchronized (this) { motionListener = DnDEventMulticaster.remove(motionListener, dsml); } } } /** * Gets all of the <code>DragSourceMotionListener</code>s * registered with this <code>DragSource</code>. * * @return all of this <code>DragSource</code>'s * <code>DragSourceMotionListener</code>s or an empty array if no * such listeners are currently registered * * @see #addDragSourceMotionListener * @see #removeDragSourceMotionListener * @since 1.4 */ public DragSourceMotionListener[] getDragSourceMotionListeners() { return (DragSourceMotionListener[]) getListeners(DragSourceMotionListener.class); } /** * Gets all the objects currently registered as * <code><em>Foo</em>Listener</code>s upon this <code>DragSource</code>. * <code><em>Foo</em>Listener</code>s are registered using the * <code>add<em>Foo</em>Listener</code> method. * * @param listenerType the type of listeners requested; this parameter * should specify an interface that descends from * <code>java.util.EventListener</code> * @return an array of all objects registered as * <code><em>Foo</em>Listener</code>s on this * <code>DragSource</code>, or an empty array if no such listeners * have been added * @exception <code>ClassCastException</code> if <code>listenerType</code> * doesn't specify a class or interface that implements * <code>java.util.EventListener</code> * * @see #getDragSourceListeners * @see #getDragSourceMotionListeners * @since 1.4 */ public <T extends EventListener> T[] getListeners(Class<T> listenerType) { EventListener l = null; if (listenerType == DragSourceListener.class) { l = listener; } else if (listenerType == DragSourceMotionListener.class) { l = motionListener; } return DnDEventMulticaster.getListeners(l, listenerType); } /** * This method calls <code>dragEnter</code> on the * <code>DragSourceListener</code>s registered with this * <code>DragSource</code>, and passes them the specified * <code>DragSourceDragEvent</code>. * * @param dsde the <code>DragSourceDragEvent</code> */ void processDragEnter(DragSourceDragEvent dsde) { DragSourceListener dsl = listener; if (dsl != null) { dsl.dragEnter(dsde); } } /** * This method calls <code>dragOver</code> on the * <code>DragSourceListener</code>s registered with this * <code>DragSource</code>, and passes them the specified * <code>DragSourceDragEvent</code>. * * @param dsde the <code>DragSourceDragEvent</code> */ void processDragOver(DragSourceDragEvent dsde) { DragSourceListener dsl = listener; if (dsl != null) { dsl.dragOver(dsde); } } /** * This method calls <code>dropActionChanged</code> on the * <code>DragSourceListener</code>s registered with this * <code>DragSource</code>, and passes them the specified * <code>DragSourceDragEvent</code>. * * @param dsde the <code>DragSourceDragEvent</code> */ void processDropActionChanged(DragSourceDragEvent dsde) { DragSourceListener dsl = listener; if (dsl != null) { dsl.dropActionChanged(dsde); } } /** * This method calls <code>dragExit</code> on the * <code>DragSourceListener</code>s registered with this * <code>DragSource</code>, and passes them the specified * <code>DragSourceEvent</code>. * * @param dse the <code>DragSourceEvent</code> */ void processDragExit(DragSourceEvent dse) { DragSourceListener dsl = listener; if (dsl != null) { dsl.dragExit(dse); } } /** * This method calls <code>dragDropEnd</code> on the * <code>DragSourceListener</code>s registered with this * <code>DragSource</code>, and passes them the specified * <code>DragSourceDropEvent</code>. * * @param dsde the <code>DragSourceEvent</code> */ void processDragDropEnd(DragSourceDropEvent dsde) { DragSourceListener dsl = listener; if (dsl != null) { dsl.dragDropEnd(dsde); } } /** * This method calls <code>dragMouseMoved</code> on the * <code>DragSourceMotionListener</code>s registered with this * <code>DragSource</code>, and passes them the specified * <code>DragSourceDragEvent</code>. * * @param dsde the <code>DragSourceEvent</code> */ void processDragMouseMoved(DragSourceDragEvent dsde) { DragSourceMotionListener dsml = motionListener; if (dsml != null) { dsml.dragMouseMoved(dsde); } } /** * Serializes this <code>DragSource</code>. This method first performs * default serialization. Next, it writes out this object's * <code>FlavorMap</code> if and only if it can be serialized. If not, * <code>null</code> is written instead. Next, it writes out * <code>Serializable</code> listeners registered with this * object. Listeners are written in a <code>null</code>-terminated sequence * of 0 or more pairs. The pair consists of a <code>String</code> and an * <code>Object</code>; the <code>String</code> indicates the type of the * <code>Object</code> and is one of the following: * <ul> * <li><code>dragSourceListenerK</code> indicating a * <code>DragSourceListener</code> object; * <li><code>dragSourceMotionListenerK</code> indicating a * <code>DragSourceMotionListener</code> object. * </ul> * * @serialData Either a <code>FlavorMap</code> instance, or * <code>null</code>, followed by a <code>null</code>-terminated * sequence of 0 or more pairs; the pair consists of a * <code>String</code> and an <code>Object</code>; the * <code>String</code> indicates the type of the <code>Object</code> * and is one of the following: * <ul> * <li><code>dragSourceListenerK</code> indicating a * <code>DragSourceListener</code> object; * <li><code>dragSourceMotionListenerK</code> indicating a * <code>DragSourceMotionListener</code> object. * </ul>. * @since 1.4 */ private void writeObject(ObjectOutputStream s) throws IOException { s.defaultWriteObject(); s.writeObject(SerializationTester.test(flavorMap) ? flavorMap : null); DnDEventMulticaster.save(s, dragSourceListenerK, listener); DnDEventMulticaster.save(s, dragSourceMotionListenerK, motionListener); s.writeObject(null); } /** * Deserializes this <code>DragSource</code>. This method first performs * default deserialization. Next, this object's <code>FlavorMap</code> is * deserialized by using the next object in the stream. * If the resulting <code>FlavorMap</code> is <code>null</code>, this * object's <code>FlavorMap</code> is set to the default FlavorMap for * this thread's <code>ClassLoader</code>. * Next, this object's listeners are deserialized by reading a * <code>null</code>-terminated sequence of 0 or more key/value pairs * from the stream: * <ul> * <li>If a key object is a <code>String</code> equal to * <code>dragSourceListenerK</code>, a <code>DragSourceListener</code> is * deserialized using the corresponding value object and added to this * <code>DragSource</code>. * <li>If a key object is a <code>String</code> equal to * <code>dragSourceMotionListenerK</code>, a * <code>DragSourceMotionListener</code> is deserialized using the * corresponding value object and added to this <code>DragSource</code>. * <li>Otherwise, the key/value pair is skipped. * </ul> * * @see java.awt.datatransfer.SystemFlavorMap#getDefaultFlavorMap * @since 1.4 */ private void readObject(ObjectInputStream s) throws ClassNotFoundException, IOException { s.defaultReadObject(); // 'flavorMap' was written explicitly flavorMap = (FlavorMap)s.readObject(); // Implementation assumes 'flavorMap' is never null. if (flavorMap == null) { flavorMap = SystemFlavorMap.getDefaultFlavorMap(); } Object keyOrNull; while (null != (keyOrNull = s.readObject())) { String key = ((String)keyOrNull).intern(); if (dragSourceListenerK == key) { addDragSourceListener((DragSourceListener)(s.readObject())); } else if (dragSourceMotionListenerK == key) { addDragSourceMotionListener( (DragSourceMotionListener)(s.readObject())); } else { // skip value for unrecognized key s.readObject(); } } } /** * Returns the drag gesture motion threshold. The drag gesture motion threshold * defines the recommended behavior for {@link MouseDragGestureRecognizer}s. * <p> * If the system property <code>awt.dnd.drag.threshold</code> is set to * a positive integer, this method returns the value of the system property; * otherwise if a pertinent desktop property is available and supported by * the implementation of the Java platform, this method returns the value of * that property; otherwise this method returns some default value. * The pertinent desktop property can be queried using * <code>java.awt.Toolkit.getDesktopProperty("DnD.gestureMotionThreshold")</code>. * * @return the drag gesture motion threshold * @see MouseDragGestureRecognizer * @since 1.5 */ public static int getDragThreshold() { int ts = ((Integer)AccessController.doPrivileged( new GetIntegerAction("awt.dnd.drag.threshold", 0))).intValue(); if (ts > 0) { return ts; } else { Integer td = (Integer)Toolkit.getDefaultToolkit(). getDesktopProperty("DnD.gestureMotionThreshold"); if (td != null) { return td.intValue(); } } return 5; } /* * fields */ private transient FlavorMap flavorMap = SystemFlavorMap.getDefaultFlavorMap(); private transient DragSourceListener listener; private transient DragSourceMotionListener motionListener; }
package com.hubspot.baragon.data; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Charsets; import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.base.Throwables; import com.google.common.io.BaseEncoding; import com.hubspot.baragon.config.ZooKeeperConfiguration; import com.hubspot.baragon.utils.JavaUtils; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Collections; import java.util.Comparator; import java.util.List; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.api.PathAndBytesable; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.data.Stat; import org.slf4j.Logger; import org.slf4j.LoggerFactory; // because curator is a piece of shit public abstract class AbstractDataStore { private static final Logger LOG = LoggerFactory.getLogger(AbstractDataStore.class); public enum OperationType { READ, WRITE } protected final CuratorFramework curatorFramework; protected final ObjectMapper objectMapper; protected final ZooKeeperConfiguration zooKeeperConfiguration; public static final Comparator<String> SEQUENCE_NODE_COMPARATOR_LOW_TO_HIGH = new Comparator<String>() { @Override public int compare(String o1, String o2) { return o1.substring(o1.length() - 10).compareTo(o2.substring(o2.length() - 10)); } }; public static final Comparator<String> SEQUENCE_NODE_COMPARATOR_HIGH_TO_LOW = new Comparator<String>() { @Override public int compare(String o1, String o2) { return o2.substring(o2.length() - 10).compareTo(o1.substring(o1.length() - 10)); } }; public AbstractDataStore( CuratorFramework curatorFramework, ObjectMapper objectMapper, ZooKeeperConfiguration zooKeeperConfiguration ) { this.curatorFramework = curatorFramework; this.objectMapper = objectMapper; this.zooKeeperConfiguration = zooKeeperConfiguration; } protected void log( OperationType type, Optional<Integer> numItems, Optional<Integer> bytes, long start, String path ) { final String message = String.format( "%s (items: %s) (bytes: %s) in %s (%s)", type, numItems.or(1), bytes.or(0), JavaUtils.duration(start), path ); final long duration = System.currentTimeMillis() - start; if ( ( bytes.isPresent() && bytes.get() > zooKeeperConfiguration.getDebugCuratorCallOverBytes() ) || (duration > zooKeeperConfiguration.getDebugCuratorCallOverMillis()) ) { LOG.debug(message); } else { LOG.trace(message); } } protected String encodeUrl(String url) { return BaseEncoding.base64Url().encode(url.getBytes(Charsets.UTF_8)); } protected String decodeUrl(String encodedUrl) { return new String(BaseEncoding.base64Url().decode(encodedUrl), Charsets.UTF_8); } protected String sanitizeNodeName(String name) { return name.contains("/") ? encodeUrl(name) : name; } protected boolean nodeExists(String path) { final long start = System.currentTimeMillis(); try { Stat stat = curatorFramework.checkExists().forPath(path); log( OperationType.READ, Optional.<Integer>absent(), Optional.<Integer>absent(), start, path ); return stat != null; } catch (KeeperException.NoNodeException e) { return false; } catch (Exception e) { throw Throwables.propagate(e); } } protected <T> void writeToZk(String path, T data) { final long start = System.currentTimeMillis(); try { final byte[] serializedInfo = serialize(data); final PathAndBytesable<?> builder; if (curatorFramework.checkExists().forPath(path) != null) { builder = curatorFramework.setData(); } else { builder = curatorFramework.create().creatingParentsIfNeeded(); } builder.forPath(path, serializedInfo); log( OperationType.WRITE, Optional.<Integer>absent(), Optional.of(serializedInfo.length), start, path ); } catch (Exception e) { throw Throwables.propagate(e); } } protected <T> byte[] serialize(T data) { try { return objectMapper.writeValueAsBytes(data); } catch (JsonProcessingException e) { throw Throwables.propagate(e); } } protected <T> Optional<T> readFromZk(final String path, final Class<T> klass) { final long start = System.currentTimeMillis(); Optional<byte[]> data = readFromZk(path); if (data.isPresent()) { log( OperationType.READ, Optional.<Integer>absent(), Optional.of(data.get().length), start, path ); if (data.get().length > 0) { return Optional.of(deserialize(data.get(), klass, path)); } } return Optional.absent(); } protected Optional<byte[]> readFromZk(String path) { try { byte[] data = curatorFramework.getData().forPath(path); if (data.length > 0) { return Optional.of(curatorFramework.getData().forPath(path)); } else { return Optional.absent(); } } catch (KeeperException.NoNodeException nne) { return Optional.absent(); } catch (Exception e) { throw Throwables.propagate(e); } } protected <T> T deserialize(byte[] data, Class<T> klass, String path) { try { return objectMapper.readValue(data, klass); } catch (JsonParseException jpe) { try { LOG.error( "Invalid Json at path {}: {}", path, new String(data, StandardCharsets.UTF_8), jpe ); } catch (Exception e) { LOG.error("Could not get raw json string at path {}", path, e); } throw Throwables.propagate(jpe); } catch (IOException e) { throw Throwables.propagate(e); } } protected String createNode(String path) { final long start = System.currentTimeMillis(); try { final String result = curatorFramework .create() .creatingParentsIfNeeded() .forPath(path); log( OperationType.WRITE, Optional.<Integer>absent(), Optional.<Integer>absent(), start, path ); return result; } catch (Exception e) { throw Throwables.propagate(e); } } protected String createPersistentSequentialNode(String path) { final long start = System.currentTimeMillis(); try { final String result = curatorFramework .create() .creatingParentsIfNeeded() .withMode(CreateMode.PERSISTENT_SEQUENTIAL) .forPath(path); log( OperationType.WRITE, Optional.<Integer>absent(), Optional.<Integer>absent(), start, path ); return result; } catch (Exception e) { throw Throwables.propagate(e); } } protected boolean deleteNode(String path) { return deleteNode(path, false); } protected boolean deleteNode(String path, boolean recursive) { final long start = System.currentTimeMillis(); try { if (recursive) { curatorFramework.delete().deletingChildrenIfNeeded().forPath(path); log( OperationType.WRITE, Optional.<Integer>absent(), Optional.<Integer>absent(), start, path ); } else { curatorFramework.delete().forPath(path); log( OperationType.WRITE, Optional.<Integer>absent(), Optional.<Integer>absent(), start, path ); } return true; } catch (KeeperException.NoNodeException e) { return false; } catch (Exception e) { throw Throwables.propagate(e); } } protected List<String> getChildren(String path) { final long start = System.currentTimeMillis(); try { List<String> children = curatorFramework.getChildren().forPath(path); log( OperationType.READ, Optional.of(children.size()), Optional.<Integer>absent(), start, path ); return children; } catch (KeeperException.NoNodeException e) { return Collections.emptyList(); } catch (Exception e) { throw Throwables.propagate(e); } } protected Optional<Long> getUpdatedAt(String path) { final long start = System.currentTimeMillis(); try { Stat stat = curatorFramework.checkExists().forPath(path); log( OperationType.READ, Optional.<Integer>absent(), Optional.<Integer>absent(), start, path ); if (stat != null) { return Optional.of(stat.getMtime()); } else { return Optional.absent(); } } catch (KeeperException.NoNodeException e) { return Optional.absent(); } catch (Exception e) { throw Throwables.propagate(e); } } }
/******************************************************************************* * Copyright 2014 University of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * This code was developed by the Information Integration Group as part * of the Karma project at the Information Sciences Institute of the * University of Southern California. For more information, publications, * and related projects, please see: http://www.isi.edu/integration ******************************************************************************/ package edu.isi.karma.kr2rml.template; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import edu.isi.karma.controller.command.selection.SuperSelection; import edu.isi.karma.kr2rml.affinity.ColumnAffinity; import edu.isi.karma.kr2rml.affinity.CommonParentRowColumnAffinity; import edu.isi.karma.kr2rml.affinity.NoColumnAffinity; import edu.isi.karma.kr2rml.affinity.ParentRowColumnAffinity; import edu.isi.karma.kr2rml.affinity.RowColumnAffinity; import edu.isi.karma.rep.HNodePath; import edu.isi.karma.rep.Node; import edu.isi.karma.rep.Row; public class TemplateTermSetPopulatorPlan { protected Map<ColumnTemplateTerm, TemplateTermSetPopulatorWorker> independentWorkers = new HashMap<ColumnTemplateTerm, TemplateTermSetPopulatorWorker>(); protected Map<ColumnTemplateTerm, TemplateTermSetPopulatorWorker> workers = new HashMap<ColumnTemplateTerm, TemplateTermSetPopulatorWorker>(); protected Map<ColumnTemplateTerm, List<TemplateTermSetPopulatorWorker>> workerDependencyPlaceholder = new HashMap<ColumnTemplateTerm, List<TemplateTermSetPopulatorWorker>>(); protected Map<ColumnTemplateTerm, HNodePath> termToPath; protected TemplateTermSetPopulatorWorker firstWorker; protected LinkedList<ColumnTemplateTerm> columnTerms; protected List<ColumnTemplateTerm> comparisonTerms; protected SuperSelection selection; private static final List<ColumnAffinity> affinities; static { affinities = new LinkedList<ColumnAffinity>(); affinities.add(RowColumnAffinity.INSTANCE); affinities.add(ParentRowColumnAffinity.INSTANCE); affinities.add(CommonParentRowColumnAffinity.INSTANCE); } protected TemplateTermSetPopulatorPlan(SuperSelection sel) { this.selection = sel; } public TemplateTermSetPopulatorPlan(Map<ColumnTemplateTerm, HNodePath> termToPath, Collection<ColumnTemplateTerm> columnTerms, SuperSelection sel) { this.termToPath = termToPath; this.columnTerms = new LinkedList<ColumnTemplateTerm>(); this.comparisonTerms = this.columnTerms; this.columnTerms.addAll(columnTerms); this.firstWorker = null; this.selection = sel; generate(); } public TemplateTermSetPopulatorPlan(Map<ColumnTemplateTerm, HNodePath> termToPath, Collection<ColumnTemplateTerm> columnTerms, TemplateTermSetPopulatorWorker firstWorker, SuperSelection sel) { this.termToPath = termToPath; this.columnTerms = new LinkedList<ColumnTemplateTerm>(); this.comparisonTerms = this.columnTerms; this.columnTerms.addAll(columnTerms); this.firstWorker = firstWorker; this.selection = sel; generate(); } public TemplateTermSetPopulatorPlan( Map<ColumnTemplateTerm, HNodePath> termToPath, LinkedList<ColumnTemplateTerm> columnTerms, List<ColumnTemplateTerm> comparisonTerms) { this.termToPath = termToPath; this.columnTerms = columnTerms; this.comparisonTerms = comparisonTerms; this.firstWorker = null; generate(); } private void generate() { LinkedList<ColumnTemplateTerm> columnTermsLocal = new LinkedList<ColumnTemplateTerm>(); columnTermsLocal.addAll(columnTerms); sortColumnTermsByHNodePathDepth(columnTermsLocal); Map<ColumnTemplateTerm, ColumnTemplateTerm> termsToTermDependentOn = generateTermsToTermDependentOn(columnTermsLocal, comparisonTerms); generateWorkers(termsToTermDependentOn); findFirstWorker(); } protected void sortColumnTermsByHNodePathDepth(LinkedList<ColumnTemplateTerm> columnTerms) { Collections.sort(columnTerms, new Comparator<ColumnTemplateTerm>(){ @Override public int compare(ColumnTemplateTerm o1, ColumnTemplateTerm o2) { return -(o1.calculateColumnPathLength() - o2.calculateColumnPathLength()); } }); } protected void generateWorkers( Map<ColumnTemplateTerm, ColumnTemplateTerm> termsToTermDependentOn) { for(Entry<ColumnTemplateTerm, ColumnTemplateTerm> termToTermDependentOn : termsToTermDependentOn.entrySet()) { ColumnTemplateTerm currentTerm = termToTermDependentOn.getKey(); ColumnTemplateTerm dependentTerm = termToTermDependentOn.getValue(); generateWorker(currentTerm, dependentTerm); } } protected Map<ColumnTemplateTerm, ColumnTemplateTerm> generateTermsToTermDependentOn(LinkedList<ColumnTemplateTerm> columnTermsLocal, List<ColumnTemplateTerm> comparisonTermsLocal) { Map<ColumnTemplateTerm, ColumnTemplateTerm> termsToTermDependentOn = new HashMap<ColumnTemplateTerm, ColumnTemplateTerm>(); while(!columnTermsLocal.isEmpty()) { ColumnTemplateTerm currentTerm = columnTermsLocal.pop(); ColumnAffinity closestAffinity = NoColumnAffinity.INSTANCE; ColumnTemplateTerm dependentTerm = null; for(ColumnTemplateTerm comparisonTerm : comparisonTermsLocal) { if(comparisonTerm == currentTerm) { continue; } ColumnAffinity affinity = findAffinity(currentTerm, comparisonTerm, termToPath); if(affinity.isCloserThan(closestAffinity) && !isTransitivelyDependentOn(termsToTermDependentOn, currentTerm, comparisonTerm)) { closestAffinity = affinity; dependentTerm = comparisonTerm; } } termsToTermDependentOn.put(currentTerm, dependentTerm); } return termsToTermDependentOn; } private boolean isTransitivelyDependentOn(Map<ColumnTemplateTerm, ColumnTemplateTerm> termsToTermDependentOn, ColumnTemplateTerm currentTerm, ColumnTemplateTerm comparisonTerm) { if(!termsToTermDependentOn.containsKey(comparisonTerm)) { return false; } else if(termsToTermDependentOn.get(comparisonTerm) == currentTerm) { return true; } else { return isTransitivelyDependentOn(termsToTermDependentOn, currentTerm, termsToTermDependentOn.get(comparisonTerm) ); } } private TemplateTermSetPopulatorWorker generateWorker( ColumnTemplateTerm currentTerm, ColumnTemplateTerm dependentTerm) { TemplateTermSetPopulatorStrategy strategy = generateStrategy( currentTerm, dependentTerm); TemplateTermSetPopulatorWorker worker = new TemplateTermSetPopulatorWorker(currentTerm,termToPath.get(currentTerm), strategy, selection); if(workerDependencyPlaceholder.containsKey(currentTerm)) { for(TemplateTermSetPopulatorWorker dependentWorker : workerDependencyPlaceholder.get(currentTerm)){ worker.addDependentWorker(dependentWorker); } } workers.put(currentTerm, worker); manageDependencies(currentTerm, dependentTerm, worker); return worker; } private TemplateTermSetPopulatorStrategy generateStrategy( ColumnTemplateTerm currentTerm, ColumnTemplateTerm dependentTerm) { TemplateTermSetPopulatorStrategy strategy = null; if(dependentTerm == null && firstWorker == null) { strategy = new MemoizedTemplateTermSetPopulatorStrategy(termToPath.get(currentTerm)); } else { if(firstWorker!= null && dependentTerm == null) dependentTerm = firstWorker.term; strategy = new DynamicTemplateTermSetPopulatorStrategy(termToPath.get(currentTerm), termToPath.get(dependentTerm)); } return strategy; } private void manageDependencies(ColumnTemplateTerm currentTerm, ColumnTemplateTerm dependentTerm, TemplateTermSetPopulatorWorker worker) { if(dependentTerm != null) { generateDependency(dependentTerm, worker); } else { if(firstWorker != null) { firstWorker.addDependentWorker(worker); } else { independentWorkers.put(currentTerm, worker); } } } private void generateDependency(ColumnTemplateTerm dependentTerm, TemplateTermSetPopulatorWorker worker) { TemplateTermSetPopulatorWorker dependentOnWorker = null; dependentOnWorker = workers.get(dependentTerm); if(dependentOnWorker != null) { dependentOnWorker.addDependentWorker(worker); } else { addDependencyPlaceholder(dependentTerm, worker); } } private void addDependencyPlaceholder(ColumnTemplateTerm dependentTerm, TemplateTermSetPopulatorWorker worker) { if(!workerDependencyPlaceholder.containsKey(dependentTerm)) { workerDependencyPlaceholder.put(dependentTerm, new LinkedList<TemplateTermSetPopulatorWorker>()); } List<TemplateTermSetPopulatorWorker> dependencyPlaceholder = workerDependencyPlaceholder.get(dependentTerm); dependencyPlaceholder.add(worker); } protected void findFirstWorker() { if(firstWorker != null) { return; } TemplateTermSetPopulatorWorker previousWorker = null; for(TemplateTermSetPopulatorWorker worker : independentWorkers.values()) { if(firstWorker == null) { firstWorker = worker; } if(previousWorker != null) { previousWorker.addDependentWorker(worker); } previousWorker = worker; } } private ColumnAffinity findAffinity(ColumnTemplateTerm currentTerm, ColumnTemplateTerm comparisonTerm, Map<ColumnTemplateTerm, HNodePath> termToPath) { ColumnAffinity closestAffinity = NoColumnAffinity.INSTANCE; for(ColumnAffinity affinity : affinities) { HNodePath currentPath = termToPath.get(currentTerm); HNodePath comparisonPath= termToPath.get(comparisonTerm); if(affinity.isValidFor(currentPath, comparisonPath)) { ColumnAffinity generatedAffinity = affinity.generateAffinity(currentPath, comparisonPath); if(generatedAffinity.isCloserThan(closestAffinity)) { closestAffinity = generatedAffinity; } } } return closestAffinity; } public List<PartiallyPopulatedTermSet> execute(Row topRow) { if(columnTerms == null || columnTerms.isEmpty()) { List<PartiallyPopulatedTermSet> predicates = new LinkedList<PartiallyPopulatedTermSet>(); predicates.add(new PartiallyPopulatedTermSet()); return predicates; } if(firstWorker != null) { return firstWorker.work(topRow); } return new LinkedList<PartiallyPopulatedTermSet>(); } public List<PartiallyPopulatedTermSet> executeComplicated(Row topRow, Node value) { return firstWorker.work(topRow, value); } }
/* * Licensed to GraphHopper and Peter Karich under one or more contributor * license agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * GraphHopper licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.graphhopper.coll; import com.graphhopper.util.Helper; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * An in-memory simple B-Tree. Later we'll use DataAccess to allow on-disc storage for very large * data sets. Delete not supported. * <p/> * @author Peter Karich */ public class GHLongIntBTree implements LongIntMap { private Logger logger = LoggerFactory.getLogger(getClass()); private final int noNumberValue = -1; private long size; private int maxLeafEntries; private int initLeafSize; private int splitIndex; private float factor; private int height; private BTreeEntry root; public GHLongIntBTree( int maxLeafEntries ) { this.maxLeafEntries = maxLeafEntries; if (maxLeafEntries < 1) { throw new IllegalArgumentException("illegal maxLeafEntries:" + maxLeafEntries); } if (maxLeafEntries % 2 == 0) { maxLeafEntries++; } splitIndex = maxLeafEntries / 2; if (maxLeafEntries < 10) { factor = 2; initLeafSize = 1; } else if (maxLeafEntries < 20) { factor = 2; initLeafSize = 4; } else { factor = 1.7f; initLeafSize = maxLeafEntries / 10; } clear(); } @Override public int put( long key, int value ) { if (key == noNumberValue) { throw new IllegalArgumentException("Illegal key " + key); } ReturnValue rv = root.put(key, value); if (rv.tree != null) { height++; root = rv.tree; } if (rv.oldValue == noNumberValue) { // successfully inserted size++; if (size % 1000000 == 0) optimize(); } return rv.oldValue; } @Override public int get( long key ) { return root.get(key); } int height() { return height; } @Override public long getSize() { return size; } /** * @return memory usage in MB */ @Override public int getMemoryUsage() { return Math.round(root.getCapacity() / Helper.MB); } void clear() { size = 0; height = 1; root = new BTreeEntry(initLeafSize, true); } int getNoNumberValue() { return noNumberValue; } void flush() { throw new IllegalStateException("not supported yet"); } private int getEntries() { return root.getEntries(); } static class ReturnValue { int oldValue; BTreeEntry tree; public ReturnValue() { } public ReturnValue( int oldValue ) { this.oldValue = oldValue; } } class BTreeEntry { int entrySize; long keys[]; int values[]; BTreeEntry children[]; boolean isLeaf; public BTreeEntry( int tmpSize, boolean leaf ) { this.isLeaf = leaf; keys = new long[tmpSize]; values = new int[tmpSize]; if (!isLeaf) // in a b-tree we need one more entry to point to all children! { children = new BTreeEntry[tmpSize + 1]; } } /** * @return the old value which was associated with the specified key or if no update it * returns noNumberValue */ ReturnValue put( long key, int newValue ) { int index = binarySearch(keys, 0, entrySize, key); if (index >= 0) { // update int oldValue = values[index]; values[index] = newValue; return new ReturnValue(oldValue); } index = ~index; ReturnValue downTreeRV; if (isLeaf || children[index] == null) { // insert downTreeRV = new ReturnValue(noNumberValue); downTreeRV.tree = checkSplitEntry(); if (downTreeRV.tree == null) { insertKeyValue(index, key, newValue); } else { if (index <= splitIndex) { downTreeRV.tree.children[0].insertKeyValue(index, key, newValue); } else { downTreeRV.tree.children[1].insertKeyValue(index - splitIndex - 1, key, newValue); } } return downTreeRV; } downTreeRV = children[index].put(key, newValue); if (downTreeRV.oldValue != noNumberValue) // only update { return downTreeRV; } if (downTreeRV.tree != null) { // split this treeEntry if it is too big BTreeEntry returnTree, downTree = returnTree = checkSplitEntry(); if (downTree == null) { insertTree(index, downTreeRV.tree); } else { if (index <= splitIndex) { downTree.children[0].insertTree(index, downTreeRV.tree); } else { downTree.children[1].insertTree(index - splitIndex - 1, downTreeRV.tree); } } downTreeRV.tree = returnTree; } return downTreeRV; } /** * @return null if nothing to do or a new sub tree if this tree capacity is no longer * sufficient. */ BTreeEntry checkSplitEntry() { if (entrySize < maxLeafEntries) { return null; } // right child: copy from this int count = entrySize - splitIndex - 1; BTreeEntry newRightChild = new BTreeEntry(Math.max(initLeafSize, count), isLeaf); copy(this, newRightChild, splitIndex + 1, count); // left child: copy from this // avoid: http://stackoverflow.com/q/15897869/194609 BTreeEntry newLeftChild = new BTreeEntry(Math.max(initLeafSize, splitIndex), isLeaf); copy(this, newLeftChild, 0, splitIndex); // new tree pointing to left + right tree only BTreeEntry newTree = new BTreeEntry(1, false); newTree.entrySize = 1; newTree.keys[0] = this.keys[splitIndex]; newTree.values[0] = this.values[splitIndex]; newTree.children[0] = newLeftChild; newTree.children[1] = newRightChild; return newTree; } void copy( BTreeEntry fromChild, BTreeEntry toChild, int from, int count ) { System.arraycopy(fromChild.keys, from, toChild.keys, 0, count); System.arraycopy(fromChild.values, from, toChild.values, 0, count); if (!fromChild.isLeaf) { System.arraycopy(fromChild.children, from, toChild.children, 0, count + 1); } toChild.entrySize = count; } void insertKeyValue( int index, long key, int newValue ) { ensureSize(entrySize + 1); int count = entrySize - index; if (count > 0) { System.arraycopy(keys, index, keys, index + 1, count); System.arraycopy(values, index, values, index + 1, count); if (!isLeaf) { System.arraycopy(children, index + 1, children, index + 2, count); } } keys[index] = key; values[index] = newValue; entrySize++; } void insertTree( int index, BTreeEntry tree ) { insertKeyValue(index, tree.keys[0], tree.values[0]); if (!isLeaf) { // overwrite children children[index] = tree.children[0]; // set children[index + 1] = tree.children[1]; } } int get( long key ) { int index = binarySearch(keys, 0, entrySize, key); if (index >= 0) { return values[index]; } index = ~index; if (isLeaf || children[index] == null) { return noNumberValue; } return children[index].get(key); } /** * @return used bytes */ long getCapacity() { long cap = keys.length * (8 + 4) + 3 * 12 + 4 + 1; if (!isLeaf) { cap += children.length * 4; for (int i = 0; i < children.length; i++) { if (children[i] != null) { cap += children[i].getCapacity(); } } } return cap; } int getEntries() { int entries = 1; if (!isLeaf) { for (int i = 0; i < children.length; i++) { if (children[i] != null) { entries += children[i].getEntries(); } } } return entries; } void ensureSize( int size ) { if (size <= keys.length) { return; } int newSize = Math.min(maxLeafEntries, Math.max(size + 1, Math.round(size * factor))); keys = Arrays.copyOf(keys, newSize); values = Arrays.copyOf(values, newSize); if (!isLeaf) { children = Arrays.copyOf(children, newSize + 1); } } void compact() { int tolerance = 1; if (entrySize + tolerance < keys.length) { keys = Arrays.copyOf(keys, entrySize); values = Arrays.copyOf(values, entrySize); if (!isLeaf) { children = Arrays.copyOf(children, entrySize + 1); } } if (!isLeaf) { for (int i = 0; i < children.length; i++) { if (children[i] != null) { children[i].compact(); } } } } String toString( int height ) { String str = height + ": "; for (int i = 0; i < entrySize; i++) { if (i > 0) { str += ","; } if (keys[i] == noNumberValue) { str += "-"; } else { str += keys[i]; } } str += "\n"; if (!isLeaf) { for (int i = 0; i < entrySize + 1; i++) { if (children[i] != null) { str += children[i].toString(height + 1) + "| "; } } } return str; } } @Override public void optimize() { if (getSize() > 10000) { // StopWatch sw = new StopWatch().start(); // int old = memoryUsage(); root.compact(); // logger.info(size + "| osmIdMap.optimize took: " + sw.stop().getSeconds() // + " => freed: " + (old - memoryUsage()) + "MB"); } } @Override public String toString() { return "Height:" + height() + ", entries:" + getEntries(); } void print() { logger.info(root.toString(1)); } // LATER: see OSMIDMap for a version where we use DataAccess static int binarySearch( long keys[], int start, int len, long key ) { int high = start + len, low = start - 1, guess; while (high - low > 1) { // use >>> for average or we could get an integer overflow. guess = (high + low) >>> 1; long guessedKey = keys[guess]; if (guessedKey < key) { low = guess; } else { high = guess; } } if (high == start + len) { return ~(start + len); } long highKey = keys[high]; if (highKey == key) { return high; } else { return ~high; } } }
/* $Id: NavigationDerbyUI.java 1535983 2013-10-26 12:46:18Z kwright $ */ /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.manifoldcf.cmis_tests; import java.util.Locale; import org.apache.manifoldcf.core.tests.HTMLTester; import org.junit.Test; /** Basic UI navigation tests */ public class NavigationDerbyUI extends BaseUIDerby { @Test public void createConnectionsAndJob() throws Exception { testerInstance.newTest(Locale.US); HTMLTester.Window window; HTMLTester.Link link; HTMLTester.Form form; HTMLTester.Textarea textarea; HTMLTester.Selectbox selectbox; HTMLTester.Button button; HTMLTester.Radiobutton radiobutton; HTMLTester.Loop loop; window = testerInstance.openMainWindow("http://localhost:8346/mcf-crawler-ui/index.jsp"); // Login form = window.findForm(testerInstance.createStringDescription("loginform")); textarea = form.findTextarea(testerInstance.createStringDescription("userID")); textarea.setValue(testerInstance.createStringDescription("admin")); textarea = form.findTextarea(testerInstance.createStringDescription("password")); textarea.setValue(testerInstance.createStringDescription("admin")); button = window.findButton(testerInstance.createStringDescription("Login")); button.click(); window = testerInstance.findWindow(null); // Define an output connection via the UI link = window.findLink(testerInstance.createStringDescription("List output connections")); link.click(); window = testerInstance.findWindow(null); link = window.findLink(testerInstance.createStringDescription("Add an output connection")); link.click(); // Fill in a name window = testerInstance.findWindow(null); form = window.findForm(testerInstance.createStringDescription("editconnection")); textarea = form.findTextarea(testerInstance.createStringDescription("connname")); textarea.setValue(testerInstance.createStringDescription("MyOutputConnection")); link = window.findLink(testerInstance.createStringDescription("Type tab")); link.click(); // Select a type window = testerInstance.findWindow(null); form = window.findForm(testerInstance.createStringDescription("editconnection")); selectbox = form.findSelectbox(testerInstance.createStringDescription("classname")); selectbox.selectValue(testerInstance.createStringDescription("org.apache.manifoldcf.agents.output.nullconnector.NullConnector")); button = window.findButton(testerInstance.createStringDescription("Continue to next page")); button.click(); // Visit the Throttling tab window = testerInstance.findWindow(null); link = window.findLink(testerInstance.createStringDescription("Throttling tab")); link.click(); // Go back to the Name tab window = testerInstance.findWindow(null); link = window.findLink(testerInstance.createStringDescription("Name tab")); link.click(); // Now save the connection. window = testerInstance.findWindow(null); button = window.findButton(testerInstance.createStringDescription("Save this output connection")); button.click(); // Define a repository connection via the UI window = testerInstance.findWindow(null); link = window.findLink(testerInstance.createStringDescription("List repository connections")); link.click(); window = testerInstance.findWindow(null); link = window.findLink(testerInstance.createStringDescription("Add a connection")); link.click(); // Fill in a name window = testerInstance.findWindow(null); form = window.findForm(testerInstance.createStringDescription("editconnection")); textarea = form.findTextarea(testerInstance.createStringDescription("connname")); textarea.setValue(testerInstance.createStringDescription("MyRepositoryConnection")); link = window.findLink(testerInstance.createStringDescription("Type tab")); link.click(); // Select a type window = testerInstance.findWindow(null); form = window.findForm(testerInstance.createStringDescription("editconnection")); selectbox = form.findSelectbox(testerInstance.createStringDescription("classname")); selectbox.selectValue(testerInstance.createStringDescription("org.apache.manifoldcf.crawler.connectors.cmis.CmisRepositoryConnector")); button = window.findButton(testerInstance.createStringDescription("Continue to next page")); button.click(); // Visit the Throttling tab window = testerInstance.findWindow(null); link = window.findLink(testerInstance.createStringDescription("Throttling tab")); link.click(); // Server tab window = testerInstance.findWindow(null); link = window.findLink(testerInstance.createStringDescription("Server tab")); link.click(); window = testerInstance.findWindow(null); form = window.findForm(testerInstance.createStringDescription("editconnection")); textarea = form.findTextarea(testerInstance.createStringDescription("username")); textarea.setValue(testerInstance.createStringDescription("foo")); textarea = form.findTextarea(testerInstance.createStringDescription("password")); textarea.setValue(testerInstance.createStringDescription("bar")); // Go back to the Name tab link = window.findLink(testerInstance.createStringDescription("Name tab")); link.click(); window = testerInstance.findWindow(null); // Now save the connection. button = window.findButton(testerInstance.createStringDescription("Save this connection")); button.click(); // Define an authority connection via the UI window = testerInstance.findWindow(null); link = window.findLink(testerInstance.createStringDescription("List authority groups")); link.click(); window = testerInstance.findWindow(null); link = window.findLink(testerInstance.createStringDescription("Add new authority group")); link.click(); window = testerInstance.findWindow(null); form = window.findForm(testerInstance.createStringDescription("editgroup")); textarea = form.findTextarea(testerInstance.createStringDescription("groupname")); textarea.setValue(testerInstance.createStringDescription("MyAuthorityConnection")); button = window.findButton(testerInstance.createStringDescription("Save this authority group")); button.click(); window = testerInstance.findWindow(null); link = window.findLink(testerInstance.createStringDescription("List authorities")); link.click(); window = testerInstance.findWindow(null); link = window.findLink(testerInstance.createStringDescription("Add a new connection")); link.click(); // Fill in a name window = testerInstance.findWindow(null); form = window.findForm(testerInstance.createStringDescription("editconnection")); textarea = form.findTextarea(testerInstance.createStringDescription("connname")); textarea.setValue(testerInstance.createStringDescription("MyAuthorityConnection")); link = window.findLink(testerInstance.createStringDescription("Type tab")); link.click(); // Select a type window = testerInstance.findWindow(null); form = window.findForm(testerInstance.createStringDescription("editconnection")); selectbox = form.findSelectbox(testerInstance.createStringDescription("classname")); selectbox.selectValue(testerInstance.createStringDescription("org.apache.manifoldcf.crawler.connectors.cmis.CmisAuthorityConnector")); selectbox = form.findSelectbox(testerInstance.createStringDescription("authoritygroup")); selectbox.selectValue(testerInstance.createStringDescription("MyAuthorityConnection")); button = window.findButton(testerInstance.createStringDescription("Continue to next page")); button.click(); window = testerInstance.findWindow(null); //MHL // Go back to the Name tab link = window.findLink(testerInstance.createStringDescription("Name tab")); link.click(); // Now save the connection. window = testerInstance.findWindow(null); button = window.findButton(testerInstance.createStringDescription("Save this authority connection")); button.click(); // Create a job window = testerInstance.findWindow(null); link = window.findLink(testerInstance.createStringDescription("List jobs")); link.click(); // Add a job window = testerInstance.findWindow(null); link = window.findLink(testerInstance.createStringDescription("Add a job")); link.click(); // Fill in a name window = testerInstance.findWindow(null); form = window.findForm(testerInstance.createStringDescription("editjob")); textarea = form.findTextarea(testerInstance.createStringDescription("description")); textarea.setValue(testerInstance.createStringDescription("MyJob")); link = window.findLink(testerInstance.createStringDescription("Connection tab")); link.click(); // Select the connections window = testerInstance.findWindow(null); form = window.findForm(testerInstance.createStringDescription("editjob")); selectbox = form.findSelectbox(testerInstance.createStringDescription("outputname")); selectbox.selectValue(testerInstance.createStringDescription("MyOutputConnection")); selectbox = form.findSelectbox(testerInstance.createStringDescription("connectionname")); selectbox.selectValue(testerInstance.createStringDescription("MyRepositoryConnection")); button = window.findButton(testerInstance.createStringDescription("Continue to next screen")); button.click(); // Visit all the tabs. Scheduling tab first window = testerInstance.findWindow(null); link = window.findLink(testerInstance.createStringDescription("Scheduling tab")); link.click(); window = testerInstance.findWindow(null); form = window.findForm(testerInstance.createStringDescription("editjob")); selectbox = form.findSelectbox(testerInstance.createStringDescription("dayofweek")); selectbox.selectValue(testerInstance.createStringDescription("0")); selectbox = form.findSelectbox(testerInstance.createStringDescription("hourofday")); selectbox.selectValue(testerInstance.createStringDescription("1")); selectbox = form.findSelectbox(testerInstance.createStringDescription("minutesofhour")); selectbox.selectValue(testerInstance.createStringDescription("30")); selectbox = form.findSelectbox(testerInstance.createStringDescription("monthofyear")); selectbox.selectValue(testerInstance.createStringDescription("11")); selectbox = form.findSelectbox(testerInstance.createStringDescription("dayofmonth")); selectbox.selectValue(testerInstance.createStringDescription("none")); textarea = form.findTextarea(testerInstance.createStringDescription("duration")); textarea.setValue(testerInstance.createStringDescription("120")); button = window.findButton(testerInstance.createStringDescription("Add new schedule record")); button.click(); window = testerInstance.findWindow(null); // MHL // Save the job button = window.findButton(testerInstance.createStringDescription("Save this job")); button.click(); // Delete the job window = testerInstance.findWindow(null); HTMLTester.StringDescription jobID = window.findMatch(testerInstance.createStringDescription("<!--jobid=(.*?)-->"),0); testerInstance.printValue(jobID); link = window.findLink(testerInstance.createStringDescription("Delete this job")); link.click(); // Wait for the job to go away loop = testerInstance.beginLoop(120); window = testerInstance.findWindow(null); link = window.findLink(testerInstance.createStringDescription("Manage jobs")); link.click(); window = testerInstance.findWindow(null); HTMLTester.StringDescription isJobNotPresent = window.isNotPresent(jobID); testerInstance.printValue(isJobNotPresent); loop.breakWhenTrue(isJobNotPresent); loop.endLoop(); // Delete the authority connection window = testerInstance.findWindow(null); link = window.findLink(testerInstance.createStringDescription("List authorities")); link.click(); window = testerInstance.findWindow(null); link = window.findLink(testerInstance.createStringDescription("Delete MyAuthorityConnection")); link.click(); // Delete the repository connection window = testerInstance.findWindow(null); link = window.findLink(testerInstance.createStringDescription("List repository connections")); link.click(); window = testerInstance.findWindow(null); link = window.findLink(testerInstance.createStringDescription("Delete MyRepositoryConnection")); link.click(); // Delete the output connection window = testerInstance.findWindow(null); link = window.findLink(testerInstance.createStringDescription("List output connections")); link.click(); window = testerInstance.findWindow(null); link = window.findLink(testerInstance.createStringDescription("Delete MyOutputConnection")); link.click(); testerInstance.executeTest(); } }
/* * Copyright 2000-2014 Vaadin Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.vaadin.tests.components.grid.basicfeatures.escalator; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import org.junit.Test; import org.openqa.selenium.By; import com.vaadin.tests.components.grid.basicfeatures.EscalatorBasicClientFeaturesTest; public class EscalatorRowColumnTest extends EscalatorBasicClientFeaturesTest { /** * The scroll position of the Escalator when scrolled all the way down, to * reveal the 100:th row. */ private static final int BOTTOM_SCROLL_POSITION = 1857; @Test public void testInit() { openTestURL(); assertNotNull(getEscalator()); assertNull(getHeaderRow(0)); assertNull(getBodyRow(0)); assertNull(getFooterRow(0)); assertLogContains("Columns: 0"); assertLogContains("Header rows: 0"); assertLogContains("Body rows: 0"); assertLogContains("Footer rows: 0"); } @Test public void testInsertAColumn() { openTestURL(); selectMenuPath(COLUMNS_AND_ROWS, COLUMNS, ADD_ONE_COLUMN_TO_BEGINNING); assertNull(getHeaderRow(0)); assertNull(getBodyRow(0)); assertNull(getFooterRow(0)); assertLogContains("Columns: 1"); } @Test public void testInsertAHeaderRow() { openTestURL(); selectMenuPath(COLUMNS_AND_ROWS, HEADER_ROWS, ADD_ONE_ROW_TO_BEGINNING); assertNull(getHeaderCell(0, 0)); assertNull(getBodyCell(0, 0)); assertNull(getFooterCell(0, 0)); assertLogContains("Header rows: 1"); } @Test public void testInsertABodyRow() { openTestURL(); selectMenuPath(COLUMNS_AND_ROWS, BODY_ROWS, ADD_ONE_ROW_TO_BEGINNING); assertNull(getHeaderCell(0, 0)); assertNull(getBodyCell(0, 0)); assertNull(getFooterCell(0, 0)); assertLogContains("Body rows: 1"); } @Test public void testInsertAFooterRow() { openTestURL(); selectMenuPath(COLUMNS_AND_ROWS, FOOTER_ROWS, ADD_ONE_ROW_TO_BEGINNING); assertNull(getHeaderCell(0, 0)); assertNull(getBodyCell(0, 0)); assertNull(getFooterCell(0, 0)); assertLogContains("Footer rows: 1"); } @Test public void testInsertAColumnAndAHeaderRow() { openTestURL(); selectMenuPath(COLUMNS_AND_ROWS, COLUMNS, ADD_ONE_COLUMN_TO_BEGINNING); selectMenuPath(COLUMNS_AND_ROWS, HEADER_ROWS, ADD_ONE_ROW_TO_BEGINNING); assertNotNull(getHeaderCell(0, 0)); assertNull(getBodyCell(0, 0)); assertNull(getFooterCell(0, 0)); assertLogContains("Columns: 1"); assertLogContains("Header rows: 1"); } @Test public void testInsertAColumnAndABodyRow() { openTestURL(); selectMenuPath(COLUMNS_AND_ROWS, COLUMNS, ADD_ONE_COLUMN_TO_BEGINNING); selectMenuPath(COLUMNS_AND_ROWS, BODY_ROWS, ADD_ONE_ROW_TO_BEGINNING); assertNull(getHeaderCell(0, 0)); assertNotNull(getBodyCell(0, 0)); assertNull(getFooterCell(0, 0)); assertLogContains("Columns: 1"); assertLogContains("Body rows: 1"); } @Test public void testInsertAColumnAndAFooterRow() { openTestURL(); selectMenuPath(COLUMNS_AND_ROWS, COLUMNS, ADD_ONE_COLUMN_TO_BEGINNING); selectMenuPath(COLUMNS_AND_ROWS, FOOTER_ROWS, ADD_ONE_ROW_TO_BEGINNING); assertNull(getHeaderCell(0, 0)); assertNull(getBodyCell(0, 0)); assertNotNull(getFooterCell(0, 0)); assertLogContains("Columns: 1"); assertLogContains("Footer rows: 1"); } @Test public void testInsertAHeaderRowAndAColumn() { openTestURL(); selectMenuPath(COLUMNS_AND_ROWS, HEADER_ROWS, ADD_ONE_ROW_TO_BEGINNING); selectMenuPath(COLUMNS_AND_ROWS, COLUMNS, ADD_ONE_COLUMN_TO_BEGINNING); assertNotNull(getHeaderCell(0, 0)); assertNull(getBodyCell(0, 0)); assertNull(getFooterCell(0, 0)); assertLogContains("Columns: 1"); assertLogContains("Header rows: 1"); } @Test public void testInsertABodyRowAndAColumn() { openTestURL(); selectMenuPath(COLUMNS_AND_ROWS, BODY_ROWS, ADD_ONE_ROW_TO_BEGINNING); selectMenuPath(COLUMNS_AND_ROWS, COLUMNS, ADD_ONE_COLUMN_TO_BEGINNING); assertNull(getHeaderCell(0, 0)); assertNotNull(getBodyCell(0, 0)); assertNull(getFooterCell(0, 0)); assertLogContains("Columns: 1"); assertLogContains("Body rows: 1"); } @Test public void testInsertAFooterRowAndAColumn() { openTestURL(); selectMenuPath(COLUMNS_AND_ROWS, FOOTER_ROWS, ADD_ONE_ROW_TO_BEGINNING); selectMenuPath(COLUMNS_AND_ROWS, COLUMNS, ADD_ONE_COLUMN_TO_BEGINNING); assertNull(getHeaderCell(0, 0)); assertNull(getBodyCell(0, 0)); assertNotNull(getFooterCell(0, 0)); assertLogContains("Columns: 1"); assertLogContains("Footer rows: 1"); } @Test public void testFillColRow() { openTestURL(); selectMenuPath(GENERAL, POPULATE_COLUMN_ROW); scrollVerticallyTo(2000); // more like 1857, but this should be enough. // if not found, an exception is thrown here assertTrue("Wanted cell was not visible", isElementPresent(By.xpath("//td[text()='Cell: 9,99']"))); } @Test public void testFillRowCol() { openTestURL(); selectMenuPath(GENERAL, POPULATE_ROW_COLUMN); scrollVerticallyTo(2000); // more like 1857, but this should be enough. // if not found, an exception is thrown here assertTrue("Wanted cell was not visible", isElementPresent(By.xpath("//td[text()='Cell: 9,99']"))); } @Test public void testClearColRow() { openTestURL(); selectMenuPath(GENERAL, POPULATE_COLUMN_ROW); selectMenuPath(GENERAL, CLEAR_COLUMN_ROW); assertNull(getBodyCell(0, 0)); } @Test public void testClearRowCol() { openTestURL(); selectMenuPath(GENERAL, POPULATE_COLUMN_ROW); selectMenuPath(GENERAL, CLEAR_ROW_COLUMN); assertNull(getBodyCell(0, 0)); } @Test public void testResizeColToFit() { openTestURL(); selectMenuPath(GENERAL, POPULATE_COLUMN_ROW); selectMenuPath(COLUMNS_AND_ROWS, COLUMNS, RESIZE_FIRST_COLUMN_TO_100PX); int originalWidth = getBodyCell(0, 0).getSize().getWidth(); assertEquals(100, originalWidth); selectMenuPath(COLUMNS_AND_ROWS, COLUMNS, RESIZE_FIRST_COLUMN_TO_MAX_WIDTH); int newWidth = getBodyCell(0, 0).getSize().getWidth(); assertNotEquals("Column width should've changed", originalWidth, newWidth); } @Test public void testRemoveMoreThanPagefulAtBottomWhileScrolledToBottom() throws Exception { openTestURL(); selectMenuPath(GENERAL, POPULATE_COLUMN_ROW); scrollVerticallyTo(BOTTOM_SCROLL_POSITION); selectMenuPath(COLUMNS_AND_ROWS, BODY_ROWS, REMOVE_50_ROWS_FROM_BOTTOM); assertEquals("Row 49: 0,49", getBodyCell(-1, 0).getText()); scrollVerticallyTo(0); // let the DOM organize itself Thread.sleep(500); // if something goes wrong, it'll explode before this. assertEquals("Row 0: 0,0", getBodyCell(0, 0).getText()); } @Test public void testRemoveMoreThanPagefulAtBottomWhileScrolledAlmostToBottom() throws Exception { openTestURL(); selectMenuPath(GENERAL, POPULATE_COLUMN_ROW); // bottom minus 15 rows. scrollVerticallyTo(BOTTOM_SCROLL_POSITION - 15 * 20); selectMenuPath(COLUMNS_AND_ROWS, BODY_ROWS, REMOVE_50_ROWS_FROM_BOTTOM); assertEquals("Row 49: 0,49", getBodyCell(-1, 0).getText()); scrollVerticallyTo(0); // let the DOM organize itself Thread.sleep(500); // if something goes wrong, it'll explode before this. assertEquals("Row 0: 0,0", getBodyCell(0, 0).getText()); } @Test public void testRemoveMoreThanPagefulNearBottomWhileScrolledToBottom() throws Exception { openTestURL(); selectMenuPath(GENERAL, POPULATE_COLUMN_ROW); scrollVerticallyTo(BOTTOM_SCROLL_POSITION); selectMenuPath(COLUMNS_AND_ROWS, BODY_ROWS, REMOVE_50_ROWS_FROM_ALMOST_BOTTOM); assertEquals("Row 49: 0,99", getBodyCell(-1, 0).getText()); scrollVerticallyTo(0); // let the DOM organize itself Thread.sleep(500); // if something goes wrong, it'll explode before this. assertEquals("Row 0: 0,0", getBodyCell(0, 0).getText()); } @Test public void testRemoveMoreThanPagefulNearBottomWhileScrolledAlmostToBottom() throws Exception { openTestURL(); selectMenuPath(GENERAL, POPULATE_COLUMN_ROW); // bottom minus 15 rows. scrollVerticallyTo(BOTTOM_SCROLL_POSITION - 15 * 20); selectMenuPath(COLUMNS_AND_ROWS, BODY_ROWS, REMOVE_50_ROWS_FROM_ALMOST_BOTTOM); // let the DOM organize itself Thread.sleep(500); assertEquals("Row 49: 0,99", getBodyCell(-1, 0).getText()); scrollVerticallyTo(0); // let the DOM organize itself Thread.sleep(500); // if something goes wrong, it'll explode before this. assertEquals("Row 0: 0,0", getBodyCell(0, 0).getText()); } }
package com.sap.cloud.lm.sl.cf.process.steps; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import org.cloudfoundry.client.lib.domain.CloudApplication; import org.cloudfoundry.client.lib.domain.CloudEntity.Meta; import org.cloudfoundry.client.lib.domain.CloudService; import org.cloudfoundry.client.lib.domain.CloudServiceBinding; import org.cloudfoundry.client.lib.domain.CloudServiceInstance; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import org.mockito.Mockito; import com.sap.cloud.lm.sl.cf.client.lib.domain.CloudApplicationExtended; import com.sap.cloud.lm.sl.cf.client.lib.domain.CloudServiceExtended; import com.sap.cloud.lm.sl.cf.core.helpers.MapToEnvironmentConverter; import com.sap.cloud.lm.sl.cf.core.model.DeployedMta; import com.sap.cloud.lm.sl.cf.core.model.DeployedMtaModule; import com.sap.cloud.lm.sl.cf.core.util.NameUtil; import com.sap.cloud.lm.sl.cf.process.message.Messages; import com.sap.cloud.lm.sl.common.SLException; import com.sap.cloud.lm.sl.common.util.JsonUtil; import com.sap.cloud.lm.sl.common.util.MapUtil; import com.sap.cloud.lm.sl.common.util.TestUtil; @RunWith(Parameterized.class) public class CheckForCreationConflictsStepTest extends SyncFlowableStepTest<CheckForCreationConflictsStep> { private final StepInput stepInput; private final String expectedExceptionMessage; private Map<CloudServiceExtended, CloudServiceInstance> existingServiceInstances; private boolean shouldWarn; @Rule public ExpectedException expectedException = ExpectedException.none(); @Parameters public static Iterable<Object[]> getParameters() { return Arrays.asList(new Object[][] { // @formatter:off // (0) Services to deploy don't exist; applications to deploy don't exist -> should be OK: { "check-for-creation-conflicts-step-input-1.json", null, false, }, // (1) Services to deploy exist, not part of the deployed MTA, don't have bound applications -> should warn: { "check-for-creation-conflicts-step-input-2.json", null, true, }, // (2) Services to deploy exist, not part of the deployed MTA, have bound applications -> expect exception: { "check-for-creation-conflicts-step-input-3.json", null, true, }, // (3) Services to deploy exist, part of the deployed MTA, don't have bound applications -> should be OK: { "check-for-creation-conflicts-step-input-4.json", null, false, }, // (4) Services to deploy exist, part of the deployed MTA, have bound applications -> should be OK: { "check-for-creation-conflicts-step-input-5.json", null, false, }, // (5) Applications to deploy exist, not part of the deployed MTA, but stand-alone -> should warn: { "check-for-creation-conflicts-step-input-6.json", null, true, }, // (6) Applications to deploy exist, part of the deployed MTA -> should be OK: { "check-for-creation-conflicts-step-input-7.json", null, false }, // (7) Services to deploy exist, not part of the deployed MTA, have bound applications from another MTA -> expect exception: { "check-for-creation-conflicts-step-input-8.json", MessageFormat.format(Messages.SERVICE_ASSOCIATED_WITH_OTHER_MTAS, "service-1", "com.sap.example.mta-1, com.sap.example.mta-2"), false, }, // (8) Services to deploy exist, not part of the deployed MTA, have bound applications from another MTA, but they do not claim to 'own' the service -> should be OK: { "check-for-creation-conflicts-step-input-9.json", null, false, }, // @formatter:on }); } public CheckForCreationConflictsStepTest(String stepInput, String expectedExceptionMessage, boolean shouldWarn) throws Exception { this.stepInput = JsonUtil.fromJson(TestUtil.getResourceAsString(stepInput, CheckForCreationConflictsStepTest.class), StepInput.class); this.expectedExceptionMessage = expectedExceptionMessage; this.shouldWarn = shouldWarn; } @Before public void setUp() throws Exception { prepareException(); prepareDeployedMta(); prepareContext(); prepareClient(); } @Test public void testExecute() throws Exception { step.execute(context); assertStepFinishedSuccessfully(); if (shouldWarn) { Mockito.verify(stepLogger, Mockito.atLeastOnce()) .warn(Mockito.anyString()); } } private void prepareException() { if (expectedExceptionMessage != null) { expectedException.expectMessage(expectedExceptionMessage); expectedException.expect(SLException.class); } } private void prepareDeployedMta() { DeployedMta deployedMta = new DeployedMta(); prepareServices(deployedMta); prepareModules(deployedMta); StepsUtil.setDeployedMta(context, deployedMta); } private void prepareModules(DeployedMta deployedMta) { List<DeployedMtaModule> deployedModules = simpleAppListToModuleList(stepInput.appsFromDeployedMta); deployedMta.setModules(deployedModules); } private void prepareServices(DeployedMta deployedMta) { Set<String> servicesNames = new HashSet<>(); stepInput.servicesFromDeployedMta.forEach(service -> servicesNames.add(service.getName())); deployedMta.setServices(servicesNames); } private void prepareContext() { StepsUtil.setServicesToCreate(context, stepInput.servicesToDeploy); List<String> appsToDeploy = new ArrayList<>(); stepInput.appsToDeploy.forEach(app -> appsToDeploy.add(app.name)); StepsUtil.setAppsToDeploy(context, appsToDeploy); List<CloudApplication> existingApps = new ArrayList<>(); stepInput.existingApps.forEach(app -> existingApps.add(app.toCloudApplication())); StepsUtil.setDeployedApps(context, existingApps); } private List<DeployedMtaModule> simpleAppListToModuleList(List<SimpleApplication> simpleApps) { List<DeployedMtaModule> modulesList = new ArrayList<>(); simpleApps.forEach(app -> modulesList.add(new DeployedMtaModule(app.name, app.name, null, null, null, null, null))); return modulesList; } private void prepareClient() throws Exception { existingServiceInstances = createServiceInstances(stepInput); prepareExistingServices(); } private Map<CloudServiceExtended, CloudServiceInstance> createServiceInstances(StepInput stepInput) throws Exception { Map<CloudServiceExtended, CloudServiceInstance> result = new HashMap<>(); for (CloudServiceExtended service : stepInput.existingServices) { List<SimpleApplication> boundApplications = findBoundApplications(service.getName(), stepInput.existingApps); result.put(service, createServiceInstance(service, boundApplications)); } return result; } private List<SimpleApplication> findBoundApplications(String serviceName, List<SimpleApplication> applications) { return applications.stream() .filter((application) -> application.boundServices.contains(serviceName)) .collect(Collectors.toList()); } private CloudServiceInstance createServiceInstance(CloudServiceExtended service, List<SimpleApplication> boundApplications) { CloudServiceInstance instance = new CloudServiceInstance(); instance.setBindings(createServiceBindings(boundApplications)); instance.setCredentials(service.getCredentials()); return instance; } private List<CloudServiceBinding> createServiceBindings(List<SimpleApplication> boundApplications) { return boundApplications.stream() .map(boundApplication -> createServiceBinding(boundApplication)) .collect(Collectors.toList()); } private CloudServiceBinding createServiceBinding(SimpleApplication boundApplication) { CloudServiceBinding binding = new CloudServiceBinding(); binding.setApplicationGuid(NameUtil.getUUID(boundApplication.name)); return binding; } private void prepareExistingServices() { List<CloudService> existingServices = new ArrayList<>(); stepInput.existingServices.forEach(service -> existingServices.add(service)); Mockito.when(client.getServices()) .thenReturn(existingServices); prepareServiceInstances(); } private void prepareServiceInstances() { existingServiceInstances.forEach((service, instance) -> prepareServiceInstance(service, instance)); } private void prepareServiceInstance(CloudServiceExtended service, CloudServiceInstance instance) { Mockito.when(client.getServiceInstance(service.getName())) .thenReturn(instance); } @Override protected CheckForCreationConflictsStep createStep() { return new CheckForCreationConflictsStep(); } private static class StepInput { List<CloudServiceExtended> servicesToDeploy = Collections.emptyList(); List<CloudServiceExtended> existingServices = Collections.emptyList(); List<CloudServiceExtended> servicesFromDeployedMta = Collections.emptyList(); List<SimpleApplication> appsToDeploy = Collections.emptyList(); List<SimpleApplication> existingApps = Collections.emptyList(); List<SimpleApplication> appsFromDeployedMta = Collections.emptyList(); } private static final MapToEnvironmentConverter ENV_CONVERTER = new MapToEnvironmentConverter(false); private static class SimpleApplication { String name; List<String> boundServices = Collections.emptyList(); Map<String, Object> env = Collections.emptyMap(); CloudApplicationExtended toCloudApplication() { CloudApplicationExtended application = new CloudApplicationExtended(new Meta(NameUtil.getUUID(name), null, null), name); application.setEnv(MapUtil.upcast(ENV_CONVERTER.asEnv(env))); return application; } } }
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.source; import android.support.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.FormatHolder; import com.google.android.exoplayer2.decoder.DecoderInputBuffer; import com.google.android.exoplayer2.extractor.ExtractorInput; import com.google.android.exoplayer2.extractor.TrackOutput; import com.google.android.exoplayer2.source.SampleMetadataQueue.SampleExtrasHolder; import com.google.android.exoplayer2.upstream.Allocation; import com.google.android.exoplayer2.upstream.Allocator; import com.google.android.exoplayer2.util.ParsableByteArray; import java.io.EOFException; import java.io.IOException; import java.nio.ByteBuffer; /** * A queue of media samples. */ public final class SampleQueue implements TrackOutput { /** * A listener for changes to the upstream format. */ public interface UpstreamFormatChangedListener { /** * Called on the loading thread when an upstream format change occurs. * * @param format The new upstream format. */ void onUpstreamFormatChanged(Format format); } private static final int INITIAL_SCRATCH_SIZE = 32; private final Allocator allocator; private final int allocationLength; private final SampleMetadataQueue metadataQueue; private final SampleExtrasHolder extrasHolder; private final ParsableByteArray scratch; // References into the linked list of allocations. private AllocationNode firstAllocationNode; private AllocationNode readAllocationNode; private AllocationNode writeAllocationNode; // Accessed only by the consuming thread. private Format downstreamFormat; // Accessed only by the loading thread (or the consuming thread when there is no loading thread). private boolean pendingFormatAdjustment; private Format lastUnadjustedFormat; private long sampleOffsetUs; private long totalBytesWritten; private boolean pendingSplice; private UpstreamFormatChangedListener upstreamFormatChangeListener; /** * @param allocator An {@link Allocator} from which allocations for sample data can be obtained. */ public SampleQueue(Allocator allocator) { this.allocator = allocator; allocationLength = allocator.getIndividualAllocationLength(); metadataQueue = new SampleMetadataQueue(); extrasHolder = new SampleExtrasHolder(); scratch = new ParsableByteArray(INITIAL_SCRATCH_SIZE); firstAllocationNode = new AllocationNode(0, allocationLength); readAllocationNode = firstAllocationNode; writeAllocationNode = firstAllocationNode; } // Called by the consuming thread, but only when there is no loading thread. /** * Resets the output without clearing the upstream format. Equivalent to {@code reset(false)}. */ public void reset() { reset(false); } /** * Resets the output. * * @param resetUpstreamFormat Whether the upstream format should be cleared. If set to false, * samples queued after the reset (and before a subsequent call to {@link #format(Format)}) * are assumed to have the current upstream format. If set to true, {@link #format(Format)} * must be called after the reset before any more samples can be queued. */ public void reset(boolean resetUpstreamFormat) { metadataQueue.reset(resetUpstreamFormat); clearAllocationNodes(firstAllocationNode); firstAllocationNode = new AllocationNode(0, allocationLength); readAllocationNode = firstAllocationNode; writeAllocationNode = firstAllocationNode; totalBytesWritten = 0; allocator.trim(); } /** * Sets a source identifier for subsequent samples. * * @param sourceId The source identifier. */ public void sourceId(int sourceId) { metadataQueue.sourceId(sourceId); } /** * Indicates samples that are subsequently queued should be spliced into those already queued. */ public void splice() { pendingSplice = true; } /** * Returns the current absolute write index. */ public int getWriteIndex() { return metadataQueue.getWriteIndex(); } /** * Discards samples from the write side of the queue. * * @param discardFromIndex The absolute index of the first sample to be discarded. Must be in the * range [{@link #getReadIndex()}, {@link #getWriteIndex()}]. */ public void discardUpstreamSamples(int discardFromIndex) { totalBytesWritten = metadataQueue.discardUpstreamSamples(discardFromIndex); if (totalBytesWritten == 0 || totalBytesWritten == firstAllocationNode.startPosition) { clearAllocationNodes(firstAllocationNode); firstAllocationNode = new AllocationNode(totalBytesWritten, allocationLength); readAllocationNode = firstAllocationNode; writeAllocationNode = firstAllocationNode; } else { // Find the last node containing at least 1 byte of data that we need to keep. AllocationNode lastNodeToKeep = firstAllocationNode; while (totalBytesWritten > lastNodeToKeep.endPosition) { lastNodeToKeep = lastNodeToKeep.next; } // Discard all subsequent nodes. AllocationNode firstNodeToDiscard = lastNodeToKeep.next; clearAllocationNodes(firstNodeToDiscard); // Reset the successor of the last node to be an uninitialized node. lastNodeToKeep.next = new AllocationNode(lastNodeToKeep.endPosition, allocationLength); // Update writeAllocationNode and readAllocationNode as necessary. writeAllocationNode = totalBytesWritten == lastNodeToKeep.endPosition ? lastNodeToKeep.next : lastNodeToKeep; if (readAllocationNode == firstNodeToDiscard) { readAllocationNode = lastNodeToKeep.next; } } } // Called by the consuming thread. /** * Returns whether a sample is available to be read. */ public boolean hasNextSample() { return metadataQueue.hasNextSample(); } /** * Returns the current absolute read index. */ public int getReadIndex() { return metadataQueue.getReadIndex(); } /** * Peeks the source id of the next sample to be read, or the current upstream source id if the * queue is empty or if the read position is at the end of the queue. * * @return The source id. */ public int peekSourceId() { return metadataQueue.peekSourceId(); } /** * Returns the upstream {@link Format} in which samples are being queued. */ public Format getUpstreamFormat() { return metadataQueue.getUpstreamFormat(); } /** * Returns the largest sample timestamp that has been queued since the last {@link #reset}. * <p> * Samples that were discarded by calling {@link #discardUpstreamSamples(int)} are not * considered as having been queued. Samples that were dequeued from the front of the queue are * considered as having been queued. * * @return The largest sample timestamp that has been queued, or {@link Long#MIN_VALUE} if no * samples have been queued. */ public long getLargestQueuedTimestampUs() { return metadataQueue.getLargestQueuedTimestampUs(); } /** * Rewinds the read position to the first sample in the queue. */ public void rewind() { metadataQueue.rewind(); readAllocationNode = firstAllocationNode; } /** * Discards up to but not including the sample immediately before or at the specified time. * * @param timeUs The time to discard to. * @param toKeyframe If true then discards samples up to the keyframe before or at the specified * time, rather than any sample before or at that time. * @param stopAtReadPosition If true then samples are only discarded if they're before the * read position. If false then samples at and beyond the read position may be discarded, in * which case the read position is advanced to the first remaining sample. */ public void discardTo(long timeUs, boolean toKeyframe, boolean stopAtReadPosition) { discardDownstreamTo(metadataQueue.discardTo(timeUs, toKeyframe, stopAtReadPosition)); } /** * Discards up to but not including the read position. */ public void discardToRead() { discardDownstreamTo(metadataQueue.discardToRead()); } /** * Discards to the end of the queue. The read position is also advanced. */ public void discardToEnd() { discardDownstreamTo(metadataQueue.discardToEnd()); } /** * Advances the read position to the end of the queue. */ public void advanceToEnd() { metadataQueue.advanceToEnd(); } /** * Attempts to advance the read position to the sample before or at the specified time. * * @param timeUs The time to advance to. * @param toKeyframe If true then attempts to advance to the keyframe before or at the specified * time, rather than to any sample before or at that time. * @param allowTimeBeyondBuffer Whether the operation can succeed if {@code timeUs} is beyond the * end of the queue, by advancing the read position to the last sample (or keyframe). * @return Whether the operation was a success. A successful advance is one in which the read * position was unchanged or advanced, and is now at a sample meeting the specified criteria. */ public boolean advanceTo(long timeUs, boolean toKeyframe, boolean allowTimeBeyondBuffer) { return metadataQueue.advanceTo(timeUs, toKeyframe, allowTimeBeyondBuffer); } /** * Attempts to read from the queue. * * @param formatHolder A {@link FormatHolder} to populate in the case of reading a format. * @param buffer A {@link DecoderInputBuffer} to populate in the case of reading a sample or the * end of the stream. If the end of the stream has been reached, the * {@link C#BUFFER_FLAG_END_OF_STREAM} flag will be set on the buffer. * @param formatRequired Whether the caller requires that the format of the stream be read even if * it's not changing. A sample will never be read if set to true, however it is still possible * for the end of stream or nothing to be read. * @param loadingFinished True if an empty queue should be considered the end of the stream. * @param decodeOnlyUntilUs If a buffer is read, the {@link C#BUFFER_FLAG_DECODE_ONLY} flag will * be set if the buffer's timestamp is less than this value. * @return The result, which can be {@link C#RESULT_NOTHING_READ}, {@link C#RESULT_FORMAT_READ} or * {@link C#RESULT_BUFFER_READ}. */ public int read(FormatHolder formatHolder, DecoderInputBuffer buffer, boolean formatRequired, boolean loadingFinished, long decodeOnlyUntilUs) { int result = metadataQueue.read(formatHolder, buffer, formatRequired, loadingFinished, downstreamFormat, extrasHolder); switch (result) { case C.RESULT_FORMAT_READ: downstreamFormat = formatHolder.format; return C.RESULT_FORMAT_READ; case C.RESULT_BUFFER_READ: if (!buffer.isEndOfStream()) { if (buffer.timeUs < decodeOnlyUntilUs) { buffer.addFlag(C.BUFFER_FLAG_DECODE_ONLY); } // Read encryption data if the sample is encrypted. if (buffer.isEncrypted()) { readEncryptionData(buffer, extrasHolder); } // Write the sample data into the holder. buffer.ensureSpaceForWrite(extrasHolder.size); readData(extrasHolder.offset, buffer.data, extrasHolder.size); } return C.RESULT_BUFFER_READ; case C.RESULT_NOTHING_READ: return C.RESULT_NOTHING_READ; default: throw new IllegalStateException(); } } /** * Reads encryption data for the current sample. * <p> * The encryption data is written into {@link DecoderInputBuffer#cryptoInfo}, and * {@link SampleExtrasHolder#size} is adjusted to subtract the number of bytes that were read. The * same value is added to {@link SampleExtrasHolder#offset}. * * @param buffer The buffer into which the encryption data should be written. * @param extrasHolder The extras holder whose offset should be read and subsequently adjusted. */ private void readEncryptionData(DecoderInputBuffer buffer, SampleExtrasHolder extrasHolder) { long offset = extrasHolder.offset; // Read the signal byte. scratch.reset(1); readData(offset, scratch.data, 1); offset++; byte signalByte = scratch.data[0]; boolean subsampleEncryption = (signalByte & 0x80) != 0; int ivSize = signalByte & 0x7F; // Read the initialization vector. if (buffer.cryptoInfo.iv == null) { buffer.cryptoInfo.iv = new byte[16]; } readData(offset, buffer.cryptoInfo.iv, ivSize); offset += ivSize; // Read the subsample count, if present. int subsampleCount; if (subsampleEncryption) { scratch.reset(2); readData(offset, scratch.data, 2); offset += 2; subsampleCount = scratch.readUnsignedShort(); } else { subsampleCount = 1; } // Write the clear and encrypted subsample sizes. int[] clearDataSizes = buffer.cryptoInfo.numBytesOfClearData; if (clearDataSizes == null || clearDataSizes.length < subsampleCount) { clearDataSizes = new int[subsampleCount]; } int[] encryptedDataSizes = buffer.cryptoInfo.numBytesOfEncryptedData; if (encryptedDataSizes == null || encryptedDataSizes.length < subsampleCount) { encryptedDataSizes = new int[subsampleCount]; } if (subsampleEncryption) { int subsampleDataLength = 6 * subsampleCount; scratch.reset(subsampleDataLength); readData(offset, scratch.data, subsampleDataLength); offset += subsampleDataLength; scratch.setPosition(0); for (int i = 0; i < subsampleCount; i++) { clearDataSizes[i] = scratch.readUnsignedShort(); encryptedDataSizes[i] = scratch.readUnsignedIntToInt(); } } else { clearDataSizes[0] = 0; encryptedDataSizes[0] = extrasHolder.size - (int) (offset - extrasHolder.offset); } // Populate the cryptoInfo. CryptoData cryptoData = extrasHolder.cryptoData; buffer.cryptoInfo.set(subsampleCount, clearDataSizes, encryptedDataSizes, cryptoData.encryptionKey, buffer.cryptoInfo.iv, cryptoData.cryptoMode, cryptoData.encryptedBlocks, cryptoData.clearBlocks); // Adjust the offset and size to take into account the bytes read. int bytesRead = (int) (offset - extrasHolder.offset); extrasHolder.offset += bytesRead; extrasHolder.size -= bytesRead; } /** * Reads data from the front of the rolling buffer. * * @param absolutePosition The absolute position from which data should be read. * @param target The buffer into which data should be written. * @param length The number of bytes to read. */ private void readData(long absolutePosition, ByteBuffer target, int length) { advanceReadTo(absolutePosition); int remaining = length; while (remaining > 0) { int toCopy = Math.min(remaining, (int) (readAllocationNode.endPosition - absolutePosition)); Allocation allocation = readAllocationNode.allocation; target.put(allocation.data, readAllocationNode.translateOffset(absolutePosition), toCopy); remaining -= toCopy; absolutePosition += toCopy; if (absolutePosition == readAllocationNode.endPosition) { readAllocationNode = readAllocationNode.next; } } } /** * Reads data from the front of the rolling buffer. * * @param absolutePosition The absolute position from which data should be read. * @param target The array into which data should be written. * @param length The number of bytes to read. */ private void readData(long absolutePosition, byte[] target, int length) { advanceReadTo(absolutePosition); int remaining = length; while (remaining > 0) { int toCopy = Math.min(remaining, (int) (readAllocationNode.endPosition - absolutePosition)); Allocation allocation = readAllocationNode.allocation; System.arraycopy(allocation.data, readAllocationNode.translateOffset(absolutePosition), target, length - remaining, toCopy); remaining -= toCopy; absolutePosition += toCopy; if (absolutePosition == readAllocationNode.endPosition) { readAllocationNode = readAllocationNode.next; } } } /** * Advances {@link #readAllocationNode} to the specified absolute position. * * @param absolutePosition The position to which {@link #readAllocationNode} should be advanced. */ private void advanceReadTo(long absolutePosition) { while (absolutePosition >= readAllocationNode.endPosition) { readAllocationNode = readAllocationNode.next; } } /** * Advances {@link #firstAllocationNode} to the specified absolute position. * {@link #readAllocationNode} is also advanced if necessary to avoid it falling behind * {@link #firstAllocationNode}. Nodes that have been advanced past are cleared, and their * underlying allocations are returned to the allocator. * * @param absolutePosition The position to which {@link #firstAllocationNode} should be advanced. * May be {@link C#POSITION_UNSET}, in which case calling this method is a no-op. */ private void discardDownstreamTo(long absolutePosition) { if (absolutePosition == C.POSITION_UNSET) { return; } while (absolutePosition >= firstAllocationNode.endPosition) { allocator.release(firstAllocationNode.allocation); firstAllocationNode = firstAllocationNode.clear(); } // If we discarded the node referenced by readAllocationNode then we need to advance it to the // first remaining node. if (readAllocationNode.startPosition < firstAllocationNode.startPosition) { readAllocationNode = firstAllocationNode; } } // Called by the loading thread. /** * Sets a listener to be notified of changes to the upstream format. * * @param listener The listener. */ public void setUpstreamFormatChangeListener(UpstreamFormatChangedListener listener) { upstreamFormatChangeListener = listener; } /** * Sets an offset that will be added to the timestamps (and sub-sample timestamps) of samples * that are subsequently queued. * * @param sampleOffsetUs The timestamp offset in microseconds. */ public void setSampleOffsetUs(long sampleOffsetUs) { if (this.sampleOffsetUs != sampleOffsetUs) { this.sampleOffsetUs = sampleOffsetUs; pendingFormatAdjustment = true; } } @Override public void format(Format format) { Format adjustedFormat = getAdjustedSampleFormat(format, sampleOffsetUs); boolean formatChanged = metadataQueue.format(adjustedFormat); lastUnadjustedFormat = format; pendingFormatAdjustment = false; if (upstreamFormatChangeListener != null && formatChanged) { upstreamFormatChangeListener.onUpstreamFormatChanged(adjustedFormat); } } @Override public int sampleData(ExtractorInput input, int length, boolean allowEndOfInput) throws IOException, InterruptedException { length = preAppend(length); int bytesAppended = input.read(writeAllocationNode.allocation.data, writeAllocationNode.translateOffset(totalBytesWritten), length); if (bytesAppended == C.RESULT_END_OF_INPUT) { if (allowEndOfInput) { return C.RESULT_END_OF_INPUT; } throw new EOFException(); } postAppend(bytesAppended); return bytesAppended; } @Override public void sampleData(ParsableByteArray buffer, int length) { while (length > 0) { int bytesAppended = preAppend(length); buffer.readBytes(writeAllocationNode.allocation.data, writeAllocationNode.translateOffset(totalBytesWritten), bytesAppended); length -= bytesAppended; postAppend(bytesAppended); } } @Override public void sampleMetadata(long timeUs, @C.BufferFlags int flags, int size, int offset, CryptoData cryptoData) { if (pendingFormatAdjustment) { format(lastUnadjustedFormat); } if (pendingSplice) { if ((flags & C.BUFFER_FLAG_KEY_FRAME) == 0 || !metadataQueue.attemptSplice(timeUs)) { return; } pendingSplice = false; } timeUs += sampleOffsetUs; long absoluteOffset = totalBytesWritten - size - offset; metadataQueue.commitSample(timeUs, flags, absoluteOffset, size, cryptoData); } // Private methods. /** * Clears allocation nodes starting from {@code fromNode}. * * @param fromNode The node from which to clear. */ private void clearAllocationNodes(AllocationNode fromNode) { if (!fromNode.wasInitialized) { return; } // Bulk release allocations for performance (it's significantly faster when using // DefaultAllocator because the allocator's lock only needs to be acquired and released once) // [Internal: See b/29542039]. int allocationCount = (writeAllocationNode.wasInitialized ? 1 : 0) + ((int) (writeAllocationNode.startPosition - fromNode.startPosition) / allocationLength); Allocation[] allocationsToRelease = new Allocation[allocationCount]; AllocationNode currentNode = fromNode; for (int i = 0; i < allocationsToRelease.length; i++) { allocationsToRelease[i] = currentNode.allocation; currentNode = currentNode.clear(); } allocator.release(allocationsToRelease); } /** * Called before writing sample data to {@link #writeAllocationNode}. May cause * {@link #writeAllocationNode} to be initialized. * * @param length The number of bytes that the caller wishes to write. * @return The number of bytes that the caller is permitted to write, which may be less than * {@code length}. */ private int preAppend(int length) { if (!writeAllocationNode.wasInitialized) { writeAllocationNode.initialize(allocator.allocate(), new AllocationNode(writeAllocationNode.endPosition, allocationLength)); } return Math.min(length, (int) (writeAllocationNode.endPosition - totalBytesWritten)); } /** * Called after writing sample data. May cause {@link #writeAllocationNode} to be advanced. * * @param length The number of bytes that were written. */ private void postAppend(int length) { totalBytesWritten += length; if (totalBytesWritten == writeAllocationNode.endPosition) { writeAllocationNode = writeAllocationNode.next; } } /** * Adjusts a {@link Format} to incorporate a sample offset into {@link Format#subsampleOffsetUs}. * * @param format The {@link Format} to adjust. * @param sampleOffsetUs The offset to apply. * @return The adjusted {@link Format}. */ private static Format getAdjustedSampleFormat(Format format, long sampleOffsetUs) { if (format == null) { return null; } if (sampleOffsetUs != 0 && format.subsampleOffsetUs != Format.OFFSET_SAMPLE_RELATIVE) { format = format.copyWithSubsampleOffsetUs(format.subsampleOffsetUs + sampleOffsetUs); } return format; } /** * A node in a linked list of {@link Allocation}s held by the output. */ private static final class AllocationNode { /** * The absolute position of the start of the data (inclusive). */ public final long startPosition; /** * The absolute position of the end of the data (exclusive). */ public final long endPosition; /** * Whether the node has been initialized. Remains true after {@link #clear()}. */ public boolean wasInitialized; /** * The {@link Allocation}, or {@code null} if the node is not initialized. */ @Nullable public Allocation allocation; /** * The next {@link AllocationNode} in the list, or {@code null} if the node has not been * initialized. Remains set after {@link #clear()}. */ @Nullable public AllocationNode next; /** * @param startPosition See {@link #startPosition}. * @param allocationLength The length of the {@link Allocation} with which this node will be * initialized. */ public AllocationNode(long startPosition, int allocationLength) { this.startPosition = startPosition; this.endPosition = startPosition + allocationLength; } /** * Initializes the node. * * @param allocation The node's {@link Allocation}. * @param next The next {@link AllocationNode}. */ public void initialize(Allocation allocation, AllocationNode next) { this.allocation = allocation; this.next = next; wasInitialized = true; } /** * Gets the offset into the {@link #allocation}'s {@link Allocation#data} that corresponds to * the specified absolute position. * * @param absolutePosition The absolute position. * @return The corresponding offset into the allocation's data. */ public int translateOffset(long absolutePosition) { return (int) (absolutePosition - startPosition) + allocation.offset; } /** * Clears {@link #allocation} and {@link #next}. * * @return The cleared next {@link AllocationNode}. */ public AllocationNode clear() { allocation = null; AllocationNode temp = next; next = null; return temp; } } }
/******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package com.alfresco.orm; import java.io.Serializable; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.alfresco.service.ServiceRegistry; import org.alfresco.service.cmr.repository.AssociationRef; import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.cmr.repository.NodeService; import org.alfresco.service.cmr.repository.StoreRef; import org.alfresco.service.namespace.QName; import org.apache.commons.lang.StringUtils; import org.springframework.beans.factory.BeanFactory; import com.alfresco.orm.annotation.AlfrescoAspect; import com.alfresco.orm.annotation.AlfrescoAssociation; import com.alfresco.orm.annotation.AlfrescoQName; import com.alfresco.orm.annotation.SetProperty; import com.alfresco.orm.annotation.SpringBeanID; import com.alfresco.orm.exception.ORMException; import com.alfresco.orm.mapping.AlfrescoContent; import com.alfresco.orm.reflection.ReflectionUtil; public abstract class ORMUtil { public static Map<QName, Serializable> getAlfrescoProperty(final AlfrescoORM alfrescoORM) throws IllegalArgumentException, IllegalAccessException, SecurityException, NoSuchMethodException, InvocationTargetException, InstantiationException { List<Field> fields = new ArrayList<Field>(); ReflectionUtil.getFields(alfrescoORM.getClass(), fields); Map<QName, Serializable> retVal = new HashMap<QName, Serializable>(fields.size()); for (Field field : fields) { if (!field.isAnnotationPresent(SetProperty.class)) { if (field.isAnnotationPresent(AlfrescoQName.class) && !field.isAnnotationPresent(AlfrescoAssociation.class)) { AlfrescoQName alfrescoQName = field.getAnnotation(AlfrescoQName.class); QName qName = QName.createQName(alfrescoQName.namespaceURI(), alfrescoQName.localName()); Method getterMethod = ReflectionUtil.getMethod(alfrescoORM.getClass(), field.getName()); retVal.put(qName, (Serializable) getterMethod.invoke(alfrescoORM)); } else if (field.isAnnotationPresent(AlfrescoAspect.class)) { Method getterMethod = ReflectionUtil.getMethod(alfrescoORM.getClass(), field.getName()); AlfrescoORM aspect = (AlfrescoORM) getterMethod.invoke(alfrescoORM) ; if(null != aspect) { retVal.putAll(getAlfrescoProperty(aspect)); } } } } return retVal; } /** * * @param alfrescoContent * @param properties * @param nodeService * @param restrictedPropertiesForUpdate * @throws NoSuchMethodException * @throws IllegalAccessException * @throws InvocationTargetException * @throws ORMException */ public static void saveProperties(final AlfrescoContent alfrescoContent, final Map<QName, Serializable> properties, final NodeService nodeService, final List<String> restrictedPropertiesForUpdate) throws NoSuchMethodException, IllegalAccessException, InvocationTargetException, ORMException { NodeRef nodeRef = getNodeRef(alfrescoContent); Map<QName, Serializable> propertiesFinal = new HashMap<QName, Serializable>(properties.size()); for (Entry<QName, Serializable> prop : properties.entrySet()) { if (!restrictedPropertiesForUpdate.contains(prop.getKey().getLocalName())) { propertiesFinal.put(prop.getKey(), prop.getValue()); } } nodeService.setProperties(nodeRef, properties); } /** * * @param alfrescoContent * @throws SecurityException * @throws NoSuchMethodException * @throws IllegalArgumentException * @throws IllegalAccessException * @throws InvocationTargetException * @throws ORMException */ public static void executeCustomeMethodForProperty(final AlfrescoContent alfrescoContent, final BeanFactory beanFactory) throws SecurityException, NoSuchMethodException, IllegalArgumentException, IllegalAccessException, InvocationTargetException, ORMException { NodeRef nodeRef = getNodeRef(alfrescoContent); List<Field> fields = new ArrayList<Field>(); ReflectionUtil.getFields(alfrescoContent.getClass(), fields); for (Field field : fields) { if (field.isAnnotationPresent(SetProperty.class)) { SetProperty setProperty = field.getAnnotation(SetProperty.class); if (StringUtils.isNotEmpty(setProperty.setPropertMethodName())) { Object target = getTargetServiceBean(setProperty.springBeanID(), beanFactory); Method customeMethod = target.getClass().getMethod(setProperty.setPropertMethodName(), NodeRef.class, AlfrescoORM.class); customeMethod.invoke(target, nodeRef, alfrescoContent); } else { throw new ORMException("Please set cutome method name to set property"); } } } } public static void executeAssociation(final AlfrescoContent alfrescoContent, final BeanFactory beanFactory, final ServiceRegistry serviceRegistry) throws ORMException, IllegalArgumentException, IllegalAccessException, InvocationTargetException { NodeRef nodeRef = getNodeRef(alfrescoContent); List<Field> fields = new ArrayList<Field>(); ReflectionUtil.getFields(alfrescoContent.getClass(), fields); for (Field field : fields) { if (field.isAnnotationPresent(AlfrescoAssociation.class)) { AlfrescoQName alfrescoQName = field.getAnnotation(AlfrescoQName.class); if (null == alfrescoQName) { throw new ORMException("please add alfresco quname aspect to the association field: " + field ); } List<AlfrescoContent> associationAlfrescoORMs = getAsscoiationObject(alfrescoContent, field); QName qName = QName.createQName(alfrescoQName.namespaceURI(), alfrescoQName.localName()); removeAllAssociation(serviceRegistry,nodeRef,qName) ; for (AlfrescoContent associationAlfrescoORM : associationAlfrescoORMs) { if (StringUtils.isNotBlank(associationAlfrescoORM.getNodeUUID())) { // TODO: understand requirement and check that do we need to update pojo or just need to update association //UpdateHelper.getUpdateHelper().update(associationAlfrescoORM); NodeRef associationNodeRef = getNodeRef(associationAlfrescoORM); List<AssociationRef> associationRefs = serviceRegistry.getNodeService().getTargetAssocs(nodeRef, qName) ; if(!associationRefs.isEmpty()) { boolean doAdd = true ; for(AssociationRef associationRef : associationRefs) { if(associationRef.getTargetRef().equals(associationNodeRef)) { doAdd = false ; } } if(doAdd) { serviceRegistry.getNodeService().createAssociation(nodeRef, associationNodeRef, qName); } } else { serviceRegistry.getNodeService().createAssociation(nodeRef, associationNodeRef, qName); } } else { CreateHelper.getCreateHelper().save(associationAlfrescoORM); NodeRef associationNodeRef = getNodeRef(associationAlfrescoORM); serviceRegistry.getNodeService().createAssociation(nodeRef, associationNodeRef, qName); } } } } } /** * Method to remove all association for QName from the noderef * @param serviceRegistry * @param nodeRef * @param qName */ private static void removeAllAssociation(ServiceRegistry serviceRegistry,NodeRef nodeRef,QName qName) { List<AssociationRef> associationRefs = serviceRegistry.getNodeService().getTargetAssocs(nodeRef, qName) ; for(AssociationRef associationRef : associationRefs) { serviceRegistry.getNodeService().removeAssociation(nodeRef, associationRef.getTargetRef(), qName); } } /** * @param alfrescoContent * @param field * @return * @throws IllegalAccessException * @throws InvocationTargetException */ private static List<AlfrescoContent> getAsscoiationObject(final AlfrescoContent alfrescoContent, final Field field) throws IllegalAccessException, InvocationTargetException { List<AlfrescoContent> retVal = new ArrayList<AlfrescoContent>(); AlfrescoAssociation alfrescoAssociation = field.getAnnotation(AlfrescoAssociation.class); Method method = ReflectionUtil.getMethod(alfrescoContent.getClass(), field.getName()); if (alfrescoAssociation.many()) { Collection<? extends AlfrescoContent> temp = (Collection<? extends AlfrescoContent>) method.invoke(alfrescoContent) ; if(null != temp) { retVal.addAll(temp); } } else { AlfrescoContent temp = (AlfrescoContent) method.invoke(alfrescoContent) ; if(null != temp) { retVal.add(temp); } } return retVal; } /** * @param alfrescoContent * @return */ public static NodeRef getNodeRef(final AlfrescoContent alfrescoContent) { NodeRef nodeRef = new NodeRef(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE, alfrescoContent.getNodeUUID()); return nodeRef; } /** * @param alfrescoContent * @return * @throws ORMException */ public static Object getTargetServiceBean(final SpringBeanID springBeanID, final BeanFactory beanFactory) throws ORMException { String springBeanId = springBeanID.value(); if (StringUtils.isEmpty(springBeanId)) { throw new ORMException("Please provide springBeanId"); } Object target = beanFactory.getBean(springBeanId); return target; } }
package org.apache.lucene.store; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.Closeable; import java.io.FileNotFoundException; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.IdentityHashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Random; import java.util.Set; import java.util.TreeSet; import java.util.concurrent.atomic.AtomicInteger; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.NoDeletionPolicy; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.ThrottledIndexOutput; import org.apache.lucene.util._TestUtil; /** * This is a Directory Wrapper that adds methods * intended to be used only by unit tests. * It also adds a number of features useful for testing: * <ul> * <li> Instances created by {@link LuceneTestCase#newDirectory()} are tracked * to ensure they are closed by the test. * <li> When a MockDirectoryWrapper is closed, it will throw an exception if * it has any open files against it (with a stacktrace indicating where * they were opened from). * <li> When a MockDirectoryWrapper is closed, it runs CheckIndex to test if * the index was corrupted. * <li> MockDirectoryWrapper simulates some "features" of Windows, such as * refusing to write/delete to open files. * </ul> */ public class MockDirectoryWrapper extends BaseDirectoryWrapper { long maxSize; // Max actual bytes used. This is set by MockRAMOutputStream: long maxUsedSize; double randomIOExceptionRate; Random randomState; boolean noDeleteOpenFile = true; boolean preventDoubleWrite = true; boolean trackDiskUsage = false; boolean wrapLockFactory = true; private Set<String> unSyncedFiles; private Set<String> createdFiles; private Set<String> openFilesForWrite = new HashSet<String>(); Set<String> openLocks = Collections.synchronizedSet(new HashSet<String>()); volatile boolean crashed; private ThrottledIndexOutput throttledOutput; private Throttling throttling = Throttling.SOMETIMES; final AtomicInteger inputCloneCount = new AtomicInteger(); // use this for tracking files for crash. // additionally: provides debugging information in case you leave one open private Map<Closeable,Exception> openFileHandles = Collections.synchronizedMap(new IdentityHashMap<Closeable,Exception>()); // NOTE: we cannot initialize the Map here due to the // order in which our constructor actually does this // member initialization vs when it calls super. It seems // like super is called, then our members are initialized: private Map<String,Integer> openFiles; // Only tracked if noDeleteOpenFile is true: if an attempt // is made to delete an open file, we enroll it here. private Set<String> openFilesDeleted; private synchronized void init() { if (openFiles == null) { openFiles = new HashMap<String,Integer>(); openFilesDeleted = new HashSet<String>(); } if (createdFiles == null) createdFiles = new HashSet<String>(); if (unSyncedFiles == null) unSyncedFiles = new HashSet<String>(); } public MockDirectoryWrapper(Random random, Directory delegate) { super(delegate); // must make a private random since our methods are // called from different threads; else test failures may // not be reproducible from the original seed this.randomState = new Random(random.nextInt()); this.throttledOutput = new ThrottledIndexOutput(ThrottledIndexOutput .mBitsToBytes(40 + randomState.nextInt(10)), 5 + randomState.nextInt(5), null); // force wrapping of lockfactory this.lockFactory = new MockLockFactoryWrapper(this, delegate.getLockFactory()); init(); } public Directory getDelegate() { return this.delegate; } public int getInputCloneCount() { return inputCloneCount.get(); } public void setTrackDiskUsage(boolean v) { trackDiskUsage = v; } /** If set to true, we throw an IOException if the same * file is opened by createOutput, ever. */ public void setPreventDoubleWrite(boolean value) { preventDoubleWrite = value; } /** * Enum for controlling hard disk throttling. * Set via {@link MockDirectoryWrapper #setThrottling(Throttling)} * <p> * WARNING: can make tests very slow. */ public static enum Throttling { /** always emulate a slow hard disk. could be very slow! */ ALWAYS, /** sometimes (2% of the time) emulate a slow hard disk. */ SOMETIMES, /** never throttle output */ NEVER } public void setThrottling(Throttling throttling) { this.throttling = throttling; } @Override public synchronized void sync(Collection<String> names) throws IOException { maybeYield(); maybeThrowDeterministicException(); if (crashed) { throw new IOException("cannot sync after crash"); } unSyncedFiles.removeAll(names); // TODO: need to improve hack to be OK w/ // RateLimitingDirWrapper in between... if (true || LuceneTestCase.rarely(randomState) || delegate instanceof NRTCachingDirectory) { // don't wear out our hardware so much in tests. delegate.sync(names); } } @Override public String toString() { // NOTE: do not maybeYield here, since it consumes // randomness and can thus (unexpectedly during // debugging) change the behavior of a seed // maybeYield(); return "MockDirWrapper(" + delegate + ")"; } public synchronized final long sizeInBytes() throws IOException { if (delegate instanceof RAMDirectory) return ((RAMDirectory) delegate).sizeInBytes(); else { // hack long size = 0; for (String file : delegate.listAll()) size += delegate.fileLength(file); return size; } } /** Simulates a crash of OS or machine by overwriting * unsynced files. */ public synchronized void crash() throws IOException { crashed = true; openFiles = new HashMap<String,Integer>(); openFilesForWrite = new HashSet<String>(); openFilesDeleted = new HashSet<String>(); Iterator<String> it = unSyncedFiles.iterator(); unSyncedFiles = new HashSet<String>(); // first force-close all files, so we can corrupt on windows etc. // clone the file map, as these guys want to remove themselves on close. Map<Closeable,Exception> m = new IdentityHashMap<Closeable,Exception>(openFileHandles); for (Closeable f : m.keySet()) { try { f.close(); } catch (Exception ignored) {} } while(it.hasNext()) { String name = it.next(); int damage = randomState.nextInt(5); String action = null; if (damage == 0) { action = "deleted"; deleteFile(name, true); } else if (damage == 1) { action = "zeroed"; // Zero out file entirely long length = fileLength(name); byte[] zeroes = new byte[256]; long upto = 0; IndexOutput out = delegate.createOutput(name, LuceneTestCase.newIOContext(randomState)); while(upto < length) { final int limit = (int) Math.min(length-upto, zeroes.length); out.writeBytes(zeroes, 0, limit); upto += limit; } out.close(); } else if (damage == 2) { action = "partially truncated"; // Partially Truncate the file: // First, make temp file and copy only half this // file over: String tempFileName; while (true) { tempFileName = ""+randomState.nextInt(); if (!delegate.fileExists(tempFileName)) { break; } } final IndexOutput tempOut = delegate.createOutput(tempFileName, LuceneTestCase.newIOContext(randomState)); IndexInput ii = delegate.openInput(name, LuceneTestCase.newIOContext(randomState)); tempOut.copyBytes(ii, ii.length()/2); tempOut.close(); ii.close(); // Delete original and copy bytes back: deleteFile(name, true); final IndexOutput out = delegate.createOutput(name, LuceneTestCase.newIOContext(randomState)); ii = delegate.openInput(tempFileName, LuceneTestCase.newIOContext(randomState)); out.copyBytes(ii, ii.length()); out.close(); ii.close(); deleteFile(tempFileName, true); } else if (damage == 3) { // The file survived intact: action = "didn't change"; } else { action = "fully truncated"; // Totally truncate the file to zero bytes deleteFile(name, true); IndexOutput out = delegate.createOutput(name, LuceneTestCase.newIOContext(randomState)); out.setLength(0); out.close(); } if (LuceneTestCase.VERBOSE) { System.out.println("MockDirectoryWrapper: " + action + " unsynced file: " + name); } } } public synchronized void clearCrash() { crashed = false; openLocks.clear(); } public void setMaxSizeInBytes(long maxSize) { this.maxSize = maxSize; } public long getMaxSizeInBytes() { return this.maxSize; } /** * Returns the peek actual storage used (bytes) in this * directory. */ public long getMaxUsedSizeInBytes() { return this.maxUsedSize; } public void resetMaxUsedSizeInBytes() throws IOException { this.maxUsedSize = getRecomputedActualSizeInBytes(); } /** * Emulate windows whereby deleting an open file is not * allowed (raise IOException). */ public void setNoDeleteOpenFile(boolean value) { this.noDeleteOpenFile = value; } public boolean getNoDeleteOpenFile() { return noDeleteOpenFile; } /** * If 0.0, no exceptions will be thrown. Else this should * be a double 0.0 - 1.0. We will randomly throw an * IOException on the first write to an OutputStream based * on this probability. */ public void setRandomIOExceptionRate(double rate) { randomIOExceptionRate = rate; } public double getRandomIOExceptionRate() { return randomIOExceptionRate; } void maybeThrowIOException() throws IOException { maybeThrowIOException(null); } void maybeThrowIOException(String message) throws IOException { if (randomIOExceptionRate > 0.0) { int number = Math.abs(randomState.nextInt() % 1000); if (number < randomIOExceptionRate*1000) { if (LuceneTestCase.VERBOSE) { System.out.println(Thread.currentThread().getName() + ": MockDirectoryWrapper: now throw random exception" + (message == null ? "" : " (" + message + ")")); new Throwable().printStackTrace(System.out); } throw new IOException("a random IOException" + (message == null ? "" : "(" + message + ")")); } } } @Override public synchronized void deleteFile(String name) throws IOException { maybeYield(); deleteFile(name, false); } // sets the cause of the incoming ioe to be the stack // trace when the offending file name was opened private synchronized IOException fillOpenTrace(IOException ioe, String name, boolean input) { for(Map.Entry<Closeable,Exception> ent : openFileHandles.entrySet()) { if (input && ent.getKey() instanceof MockIndexInputWrapper && ((MockIndexInputWrapper) ent.getKey()).name.equals(name)) { ioe.initCause(ent.getValue()); break; } else if (!input && ent.getKey() instanceof MockIndexOutputWrapper && ((MockIndexOutputWrapper) ent.getKey()).name.equals(name)) { ioe.initCause(ent.getValue()); break; } } return ioe; } private void maybeYield() { if (randomState.nextBoolean()) { Thread.yield(); } } private synchronized void deleteFile(String name, boolean forced) throws IOException { maybeYield(); maybeThrowDeterministicException(); if (crashed && !forced) throw new IOException("cannot delete after crash"); if (unSyncedFiles.contains(name)) unSyncedFiles.remove(name); if (!forced && noDeleteOpenFile) { if (openFiles.containsKey(name)) { openFilesDeleted.add(name); throw fillOpenTrace(new IOException("MockDirectoryWrapper: file \"" + name + "\" is still open: cannot delete"), name, true); } else { openFilesDeleted.remove(name); } } delegate.deleteFile(name); } public synchronized Set<String> getOpenDeletedFiles() { return new HashSet<String>(openFilesDeleted); } private boolean failOnCreateOutput = true; public void setFailOnCreateOutput(boolean v) { failOnCreateOutput = v; } @Override public synchronized IndexOutput createOutput(String name, IOContext context) throws IOException { maybeYield(); if (failOnCreateOutput) { maybeThrowDeterministicException(); } if (crashed) throw new IOException("cannot createOutput after crash"); init(); synchronized(this) { if (preventDoubleWrite && createdFiles.contains(name) && !name.equals("segments.gen")) throw new IOException("file \"" + name + "\" was already written to"); } if (noDeleteOpenFile && openFiles.containsKey(name)) throw new IOException("MockDirectoryWrapper: file \"" + name + "\" is still open: cannot overwrite"); if (crashed) throw new IOException("cannot createOutput after crash"); unSyncedFiles.add(name); createdFiles.add(name); if (delegate instanceof RAMDirectory) { RAMDirectory ramdir = (RAMDirectory) delegate; RAMFile file = new RAMFile(ramdir); RAMFile existing = ramdir.fileMap.get(name); // Enforce write once: if (existing!=null && !name.equals("segments.gen") && preventDoubleWrite) throw new IOException("file " + name + " already exists"); else { if (existing!=null) { ramdir.sizeInBytes.getAndAdd(-existing.sizeInBytes); existing.directory = null; } ramdir.fileMap.put(name, file); } } //System.out.println(Thread.currentThread().getName() + ": MDW: create " + name); IndexOutput delegateOutput = delegate.createOutput(name, LuceneTestCase.newIOContext(randomState, context)); if (randomState.nextInt(10) == 0){ // once in a while wrap the IO in a Buffered IO with random buffer sizes delegateOutput = new BufferedIndexOutputWrapper(1+randomState.nextInt(BufferedIndexOutput.DEFAULT_BUFFER_SIZE), delegateOutput); } final IndexOutput io = new MockIndexOutputWrapper(this, delegateOutput, name); addFileHandle(io, name, Handle.Output); openFilesForWrite.add(name); // throttling REALLY slows down tests, so don't do it very often for SOMETIMES. if (throttling == Throttling.ALWAYS || (throttling == Throttling.SOMETIMES && randomState.nextInt(50) == 0) && !(delegate instanceof RateLimitedDirectoryWrapper)) { if (LuceneTestCase.VERBOSE) { System.out.println("MockDirectoryWrapper: throttling indexOutput"); } return throttledOutput.newFromDelegate(io); } else { return io; } } private static enum Handle { Input, Output, Slice } synchronized void addFileHandle(Closeable c, String name, Handle handle) { Integer v = openFiles.get(name); if (v != null) { v = Integer.valueOf(v.intValue()+1); openFiles.put(name, v); } else { openFiles.put(name, Integer.valueOf(1)); } openFileHandles.put(c, new RuntimeException("unclosed Index" + handle.name() + ": " + name)); } private boolean failOnOpenInput = true; public void setFailOnOpenInput(boolean v) { failOnOpenInput = v; } @Override public synchronized IndexInput openInput(String name, IOContext context) throws IOException { maybeYield(); if (failOnOpenInput) { maybeThrowDeterministicException(); } if (!delegate.fileExists(name)) { throw new FileNotFoundException(name + " in dir=" + delegate); } // cannot open a file for input if it's still open for // output, except for segments.gen and segments_N if (openFilesForWrite.contains(name) && !name.startsWith("segments")) { throw fillOpenTrace(new IOException("MockDirectoryWrapper: file \"" + name + "\" is still open for writing"), name, false); } IndexInput delegateInput = delegate.openInput(name, LuceneTestCase.newIOContext(randomState, context)); final IndexInput ii; int randomInt = randomState.nextInt(500); if (randomInt == 0) { if (LuceneTestCase.VERBOSE) { System.out.println("MockDirectoryWrapper: using SlowClosingMockIndexInputWrapper for file " + name); } ii = new SlowClosingMockIndexInputWrapper(this, name, delegateInput); } else if (randomInt == 1) { if (LuceneTestCase.VERBOSE) { System.out.println("MockDirectoryWrapper: using SlowOpeningMockIndexInputWrapper for file " + name); } ii = new SlowOpeningMockIndexInputWrapper(this, name, delegateInput); } else { ii = new MockIndexInputWrapper(this, name, delegateInput); } addFileHandle(ii, name, Handle.Input); return ii; } /** Provided for testing purposes. Use sizeInBytes() instead. */ public synchronized final long getRecomputedSizeInBytes() throws IOException { if (!(delegate instanceof RAMDirectory)) return sizeInBytes(); long size = 0; for(final RAMFile file: ((RAMDirectory)delegate).fileMap.values()) { size += file.getSizeInBytes(); } return size; } /** Like getRecomputedSizeInBytes(), but, uses actual file * lengths rather than buffer allocations (which are * quantized up to nearest * RAMOutputStream.BUFFER_SIZE (now 1024) bytes. */ public final synchronized long getRecomputedActualSizeInBytes() throws IOException { if (!(delegate instanceof RAMDirectory)) return sizeInBytes(); long size = 0; for (final RAMFile file : ((RAMDirectory)delegate).fileMap.values()) size += file.length; return size; } private boolean assertNoUnreferencedFilesOnClose = true; public void setAssertNoUnrefencedFilesOnClose(boolean v) { assertNoUnreferencedFilesOnClose = v; } /** * Set to false if you want to return the pure lockfactory * and not wrap it with MockLockFactoryWrapper. * <p> * Be careful if you turn this off: MockDirectoryWrapper might * no longer be able to detect if you forget to close an IndexWriter, * and spit out horribly scary confusing exceptions instead of * simply telling you that. */ public void setWrapLockFactory(boolean v) { this.wrapLockFactory = v; } @Override public synchronized void close() throws IOException { // files that we tried to delete, but couldn't because readers were open. // all that matters is that we tried! (they will eventually go away) Set<String> pendingDeletions = new HashSet<String>(openFilesDeleted); maybeYield(); if (openFiles == null) { openFiles = new HashMap<String,Integer>(); openFilesDeleted = new HashSet<String>(); } if (noDeleteOpenFile && openFiles.size() > 0) { // print the first one as its very verbose otherwise Exception cause = null; Iterator<Exception> stacktraces = openFileHandles.values().iterator(); if (stacktraces.hasNext()) cause = stacktraces.next(); // RuntimeException instead of IOException because // super() does not throw IOException currently: throw new RuntimeException("MockDirectoryWrapper: cannot close: there are still open files: " + openFiles, cause); } if (noDeleteOpenFile && openLocks.size() > 0) { throw new RuntimeException("MockDirectoryWrapper: cannot close: there are still open locks: " + openLocks); } isOpen = false; if (getCheckIndexOnClose()) { if (indexPossiblyExists()) { if (LuceneTestCase.VERBOSE) { System.out.println("\nNOTE: MockDirectoryWrapper: now crash"); } crash(); // corrumpt any unsynced-files if (LuceneTestCase.VERBOSE) { System.out.println("\nNOTE: MockDirectoryWrapper: now run CheckIndex"); } _TestUtil.checkIndex(this, getCrossCheckTermVectorsOnClose()); // TODO: factor this out / share w/ TestIW.assertNoUnreferencedFiles if (assertNoUnreferencedFilesOnClose) { // now look for unreferenced files: discount ones that we tried to delete but could not Set<String> allFiles = new HashSet<String>(Arrays.asList(listAll())); allFiles.removeAll(pendingDeletions); String[] startFiles = allFiles.toArray(new String[0]); IndexWriterConfig iwc = new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, null); iwc.setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE); new IndexWriter(delegate, iwc).rollback(); String[] endFiles = delegate.listAll(); Set<String> startSet = new TreeSet<String>(Arrays.asList(startFiles)); Set<String> endSet = new TreeSet<String>(Arrays.asList(endFiles)); if (pendingDeletions.contains("segments.gen") && endSet.contains("segments.gen")) { // this is possible if we hit an exception while writing segments.gen, we try to delete it // and it ends out in pendingDeletions (but IFD wont remove this). startSet.add("segments.gen"); if (LuceneTestCase.VERBOSE) { System.out.println("MDW: Unreferenced check: Ignoring segments.gen that we could not delete."); } } // its possible we cannot delete the segments_N on windows if someone has it open and // maybe other files too, depending on timing. normally someone on windows wouldnt have // an issue (IFD would nuke this stuff eventually), but we pass NoDeletionPolicy... for (String file : pendingDeletions) { if (file.startsWith("segments") && !file.equals("segments.gen") && endSet.contains(file)) { startSet.add(file); if (LuceneTestCase.VERBOSE) { System.out.println("MDW: Unreferenced check: Ignoring segments file: " + file + " that we could not delete."); } SegmentInfos sis = new SegmentInfos(); try { sis.read(delegate, file); } catch (IOException ioe) { // OK: likely some of the .si files were deleted } try { Set<String> ghosts = new HashSet<String>(sis.files(delegate, false)); for (String s : ghosts) { if (endSet.contains(s) && !startSet.contains(s)) { assert pendingDeletions.contains(s); if (LuceneTestCase.VERBOSE) { System.out.println("MDW: Unreferenced check: Ignoring referenced file: " + s + " " + "from " + file + " that we could not delete."); } startSet.add(s); } } } catch (Throwable t) { System.err.println("ERROR processing leftover segments file " + file + ":"); t.printStackTrace(); } } } startFiles = startSet.toArray(new String[0]); endFiles = endSet.toArray(new String[0]); if (!Arrays.equals(startFiles, endFiles)) { List<String> removed = new ArrayList<String>(); for(String fileName : startFiles) { if (!endSet.contains(fileName)) { removed.add(fileName); } } List<String> added = new ArrayList<String>(); for(String fileName : endFiles) { if (!startSet.contains(fileName)) { added.add(fileName); } } String extras; if (removed.size() != 0) { extras = "\n\nThese files were removed: " + removed; } else { extras = ""; } if (added.size() != 0) { extras += "\n\nThese files were added (waaaaaaaaaat!): " + added; } if (pendingDeletions.size() != 0) { extras += "\n\nThese files we had previously tried to delete, but couldn't: " + pendingDeletions; } assert false : "unreferenced files: before delete:\n " + Arrays.toString(startFiles) + "\n after delete:\n " + Arrays.toString(endFiles) + extras; } DirectoryReader ir1 = DirectoryReader.open(this); int numDocs1 = ir1.numDocs(); ir1.close(); new IndexWriter(this, new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, null)).close(); DirectoryReader ir2 = DirectoryReader.open(this); int numDocs2 = ir2.numDocs(); ir2.close(); assert numDocs1 == numDocs2 : "numDocs changed after opening/closing IW: before=" + numDocs1 + " after=" + numDocs2; } } } delegate.close(); } synchronized void removeOpenFile(Closeable c, String name) { Integer v = openFiles.get(name); // Could be null when crash() was called if (v != null) { if (v.intValue() == 1) { openFiles.remove(name); } else { v = Integer.valueOf(v.intValue()-1); openFiles.put(name, v); } } openFileHandles.remove(c); } public synchronized void removeIndexOutput(IndexOutput out, String name) { openFilesForWrite.remove(name); removeOpenFile(out, name); } public synchronized void removeIndexInput(IndexInput in, String name) { removeOpenFile(in, name); } /** * Objects that represent fail-able conditions. Objects of a derived * class are created and registered with the mock directory. After * register, each object will be invoked once for each first write * of a file, giving the object a chance to throw an IOException. */ public static class Failure { /** * eval is called on the first write of every new file. */ public void eval(MockDirectoryWrapper dir) throws IOException { } /** * reset should set the state of the failure to its default * (freshly constructed) state. Reset is convenient for tests * that want to create one failure object and then reuse it in * multiple cases. This, combined with the fact that Failure * subclasses are often anonymous classes makes reset difficult to * do otherwise. * * A typical example of use is * Failure failure = new Failure() { ... }; * ... * mock.failOn(failure.reset()) */ public Failure reset() { return this; } protected boolean doFail; public void setDoFail() { doFail = true; } public void clearDoFail() { doFail = false; } } ArrayList<Failure> failures; /** * add a Failure object to the list of objects to be evaluated * at every potential failure point */ synchronized public void failOn(Failure fail) { if (failures == null) { failures = new ArrayList<Failure>(); } failures.add(fail); } /** * Iterate through the failures list, giving each object a * chance to throw an IOE */ synchronized void maybeThrowDeterministicException() throws IOException { if (failures != null) { for(int i = 0; i < failures.size(); i++) { failures.get(i).eval(this); } } } @Override public synchronized String[] listAll() throws IOException { maybeYield(); return delegate.listAll(); } @Override public synchronized boolean fileExists(String name) throws IOException { maybeYield(); return delegate.fileExists(name); } @Override public synchronized long fileLength(String name) throws IOException { maybeYield(); return delegate.fileLength(name); } @Override public synchronized Lock makeLock(String name) { maybeYield(); return getLockFactory().makeLock(name); } @Override public synchronized void clearLock(String name) throws IOException { maybeYield(); getLockFactory().clearLock(name); } @Override public synchronized void setLockFactory(LockFactory lockFactory) throws IOException { maybeYield(); // sneaky: we must pass the original this way to the dir, because // some impls (e.g. FSDir) do instanceof here. delegate.setLockFactory(lockFactory); // now set our wrapped factory here this.lockFactory = new MockLockFactoryWrapper(this, lockFactory); } @Override public synchronized LockFactory getLockFactory() { maybeYield(); if (wrapLockFactory) { return lockFactory; } else { return delegate.getLockFactory(); } } @Override public synchronized String getLockID() { maybeYield(); return delegate.getLockID(); } @Override public synchronized void copy(Directory to, String src, String dest, IOContext context) throws IOException { maybeYield(); // randomize the IOContext here? delegate.copy(to, src, dest, context); } @Override public IndexInputSlicer createSlicer(final String name, IOContext context) throws IOException { maybeYield(); if (!delegate.fileExists(name)) { throw new FileNotFoundException(name); } // cannot open a file for input if it's still open for // output, except for segments.gen and segments_N if (openFilesForWrite.contains(name) && !name.startsWith("segments")) { throw fillOpenTrace(new IOException("MockDirectoryWrapper: file \"" + name + "\" is still open for writing"), name, false); } final IndexInputSlicer delegateHandle = delegate.createSlicer(name, context); final IndexInputSlicer handle = new IndexInputSlicer() { private boolean isClosed; @Override public void close() throws IOException { if (!isClosed) { delegateHandle.close(); MockDirectoryWrapper.this.removeOpenFile(this, name); isClosed = true; } } @Override public IndexInput openSlice(String sliceDescription, long offset, long length) throws IOException { maybeYield(); IndexInput ii = new MockIndexInputWrapper(MockDirectoryWrapper.this, name, delegateHandle.openSlice(sliceDescription, offset, length)); addFileHandle(ii, name, Handle.Input); return ii; } @Override public IndexInput openFullSlice() throws IOException { maybeYield(); IndexInput ii = new MockIndexInputWrapper(MockDirectoryWrapper.this, name, delegateHandle.openFullSlice()); addFileHandle(ii, name, Handle.Input); return ii; } }; addFileHandle(handle, name, Handle.Slice); return handle; } final class BufferedIndexOutputWrapper extends BufferedIndexOutput { private final IndexOutput io; public BufferedIndexOutputWrapper(int bufferSize, IndexOutput io) { super(bufferSize); this.io = io; } @Override public long length() throws IOException { return io.length(); } @Override protected void flushBuffer(byte[] b, int offset, int len) throws IOException { io.writeBytes(b, offset, len); } @Override public void seek(long pos) throws IOException { flush(); io.seek(pos); } @Override public void flush() throws IOException { try { super.flush(); } finally { io.flush(); } } @Override public void close() throws IOException { try { super.close(); } finally { io.close(); } } } }
package org.drip.sample.rates; import org.drip.analytics.date.JulianDate; import org.drip.analytics.rates.DiscountCurve; import org.drip.param.creator.*; import org.drip.param.valuation.ValuationParams; import org.drip.product.creator.*; import org.drip.product.definition.CalibratableComponent; import org.drip.quant.common.FormatUtil; import org.drip.quant.function1D.QuadraticRationalShapeControl; import org.drip.service.api.CreditAnalytics; import org.drip.spline.basis.*; import org.drip.spline.params.*; import org.drip.spline.stretch.*; import org.drip.state.estimator.*; /* * -*- mode: java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */ /*! * Copyright (C) 2014 Lakshmi Krishnamurthy * Copyright (C) 2013 Lakshmi Krishnamurthy * * This file is part of CreditAnalytics, a free-software/open-source library for fixed income analysts and * developers - http://www.credit-trader.org * * CreditAnalytics is a free, full featured, fixed income credit analytics library, developed with a special * focus towards the needs of the bonds and credit products community. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ /** * ShapePreservingDFZeroSmooth demonstrates the usage of different shape preserving and smoothing techniques * involved in the discount curve creation. It shows the following: * - Construct the Array of Cash/Swap Instruments and their Quotes from the given set of parameters. * - Construct the Cash/Swap Instrument Set Stretch Builder. * - Set up the Linear Curve Calibrator using the following parameters: * - Cubic Exponential Mixture Basis Spline Set * - Ck = 2, Segment Curvature Penalty = 2 * - Quadratic Rational Shape Controller * - Natural Boundary Setting * - Set up the Global Curve Control parameters as follows: * - Zero Rate Quantification Metric * - Cubic Polynomial Basis Spline Set * - Ck = 2, Segment Curvature Penalty = 2 * - Quadratic Rational Shape Controller * - Natural Boundary Setting * - Set up the Local Curve Control parameters as follows: * - C1 Bessel Monotone Smoothener with no spurious extrema elimination and no monotone filter * - Zero Rate Quantification Metric * - Cubic Polynomial Basis Spline Set * - Ck = 2, Segment Curvature Penalty = 2 * - Quadratic Rational Shape Controller * - Natural Boundary Setting * - Construct the Shape Preserving Discount Curve by applying the linear curve calibrator to the array of * Cash and Swap Stretches. * - Construct the Globally Smoothened Discount Curve by applying the linear curve calibrator and the Global * Curve Control parameters to the array of Cash and Swap Stretches and the shape preserving discount * curve. * - Construct the Locally Smoothened Discount Curve by applying the linear curve calibrator and the Local * Curve Control parameters to the array of Cash and Swap Stretches and the shape preserving discount * curve. * - Cross-Comparison of the Cash/Swap Calibration Instrument "Rate" metric across the different curve * construction methodologies. * - Cross-Comparison of the Swap Calibration Instrument "Rate" metric across the different curve * construction methodologies for a sequence of bespoke swap instruments. * * @author Lakshmi Krishnamurthy */ public class ShapePreservingDFZeroSmooth { /* * Construct the Array of Cash Instruments from the given set of parameters * * USE WITH CARE: This sample ignores errors and does not handle exceptions. */ private static final CalibratableComponent[] CashInstrumentsFromMaturityDays ( final JulianDate dtEffective, final java.lang.String[] astrTenor) throws Exception { CalibratableComponent[] aCash = new CalibratableComponent[astrTenor.length]; for (int i = 0; i < astrTenor.length; ++i) aCash[i] = CashBuilder.CreateCash (dtEffective, dtEffective.addTenorAndAdjust (astrTenor[i], "MXN"), "MXN"); return aCash; } /* * Construct the Array of Swap Instruments from the given set of parameters * * USE WITH CARE: This sample ignores errors and does not handle exceptions. */ private static final CalibratableComponent[] SwapInstrumentsFromMaturityTenor ( final JulianDate dtEffective, final String[] astrTenor) throws Exception { CalibratableComponent[] aSwap = new CalibratableComponent[astrTenor.length]; for (int i = 0; i < astrTenor.length; ++i) aSwap[i] = RatesStreamBuilder.CreateIRS ( dtEffective, dtEffective.addTenorAndAdjust (astrTenor[i], "MXN"), 0., "MXN", "MXN-LIBOR-6M", "MXN"); return aSwap; } /* * This sample demonstrates the usage of different shape preserving and smoothing techniques involved in * the discount curve creation. It shows the following: * - Construct the Array of Cash/Swap Instruments and their Quotes from the given set of parameters. * - Construct the Cash/Swap Instrument Set Stretch Builder. * - Set up the Linear Curve Calibrator using the following parameters: * - Cubic Exponential Mixture Basis Spline Set * - Ck = 2, Segment Curvature Penalty = 2 * - Quadratic Rational Shape Controller * - Natural Boundary Setting * - Set up the Global Curve Control parameters as follows: * - Zero Rate Quantification Metric * - Cubic Polynomial Basis Spline Set * - Ck = 2, Segment Curvature Penalty = 2 * - Quadratic Rational Shape Controller * - Natural Boundary Setting * - Set up the Local Curve Control parameters as follows: * - C1 Bessel Monotone Smoothener with no spurious extrema elimination and no monotone filter * - Zero Rate Quantification Metric * - Cubic Polynomial Basis Spline Set * - Ck = 2, Segment Curvature Penalty = 2 * - Quadratic Rational Shape Controller * - Natural Boundary Setting * - Construct the Shape Preserving Discount Curve by applying the linear curve calibrator to the array * of Cash and Swap Stretches. * - Construct the Globally Smoothened Discount Curve by applying the linear curve calibrator and the * Global Curve Control parameters to the array of Cash and Swap Stretches and the shape preserving * discount curve. * - Construct the Locally Smoothened Discount Curve by applying the linear curve calibrator and the * Local Curve Control parameters to the array of Cash and Swap Stretches and the shape preserving * discount curve. * - Cross-Comparison of the Cash/Swap Calibration Instrument "Rate" metric across the different curve * construction methodologies. * - Cross-Comparison of the Swap Calibration Instrument "Rate" metric across the different curve * construction methodologies for a sequence of bespoke swap instruments. * * USE WITH CARE: This sample ignores errors and does not handle exceptions. */ private static final void ShapePreservingDFZeroSmoothSample() throws Exception { /* * Initialize the Credit Analytics Library */ CreditAnalytics.Init (""); JulianDate dtToday = JulianDate.Today().addTenorAndAdjust ("0D", "MXN"); /* * Construct the Array of Cash Instruments and their Quotes from the given set of parameters */ CalibratableComponent[] aCashComp = CashInstrumentsFromMaturityDays ( dtToday, new java.lang.String[] {"1M"}); double[] adblCashQuote = new double[] {0.0403}; /* * Construct the Cash Instrument Set Stretch Builder */ StretchRepresentationSpec rrsCash = StretchRepresentationSpec.CreateStretchBuilderSet ( "CASH", DiscountCurve.LATENT_STATE_DISCOUNT, DiscountCurve.QUANTIFICATION_METRIC_DISCOUNT_FACTOR, aCashComp, "Rate", adblCashQuote, null); /* * Construct the Array of Swap Instruments and their Quotes from the given set of parameters */ CalibratableComponent[] aSwapComp = SwapInstrumentsFromMaturityTenor (dtToday, new java.lang.String[] {"3M", "6M", "9M", "1Y", "2Y", "3Y", "4Y", "5Y", "7Y", "10Y", "15Y", "20Y", "30Y"}); double[] adblSwapQuote = new double[] {0.0396, 0.0387, 0.0388, 0.0389, 0.04135, 0.04455, 0.0486, 0.0526, 0.0593, 0.0649, 0.0714596, 0.0749596, 0.0776}; /* * Construct the Swap Instrument Set Stretch Builder */ StretchRepresentationSpec rrsSwap = StretchRepresentationSpec.CreateStretchBuilderSet ( "SWAP", DiscountCurve.LATENT_STATE_DISCOUNT, DiscountCurve.QUANTIFICATION_METRIC_DISCOUNT_FACTOR, aSwapComp, "Rate", adblSwapQuote, null); StretchRepresentationSpec[] aRRS = new StretchRepresentationSpec[] {rrsCash, rrsSwap}; /* * Set up the Linear Curve Calibrator using the following parameters: * - Cubic Exponential Mixture Basis Spline Set * - Ck = 2, Segment Curvature Penalty = 2 * - Quadratic Rational Shape Controller * - Natural Boundary Setting */ LinearCurveCalibrator lcc = new LinearCurveCalibrator ( new SegmentCustomBuilderControl ( MultiSegmentSequenceBuilder.BASIS_SPLINE_EXPONENTIAL_MIXTURE, new ExponentialMixtureSetParams (new double[] {0.01, 0.05, 0.25}), SegmentDesignInelasticControl.Create (2, 2), new ResponseScalingShapeControl (true, new QuadraticRationalShapeControl (0.))), BoundarySettings.NaturalStandard(), MultiSegmentSequence.CALIBRATE, null, null); /* * Set up the Global Curve Control parameters as follows: * - Zero Rate Quantification Metric * - Cubic Polynomial Basis Spline Set * - Ck = 2, Segment Curvature Penalty = 2 * - Quadratic Rational Shape Controller * - Natural Boundary Setting */ GlobalControlCurveParams gccp = new GlobalControlCurveParams ( org.drip.analytics.rates.DiscountCurve.QUANTIFICATION_METRIC_ZERO_RATE, new SegmentCustomBuilderControl ( MultiSegmentSequenceBuilder.BASIS_SPLINE_POLYNOMIAL, new PolynomialFunctionSetParams (4), SegmentDesignInelasticControl.Create (2, 2), new ResponseScalingShapeControl (true, new QuadraticRationalShapeControl (0.))), BoundarySettings.NaturalStandard(), MultiSegmentSequence.CALIBRATE, null, null); /* * Set up the Local Curve Control parameters as follows: * - C1 Bessel Monotone Smoothener with no spurious extrema elimination and no monotone filter * - Zero Rate Quantification Metric * - Cubic Polynomial Basis Spline Set * - Ck = 2, Segment Curvature Penalty = 2 * - Quadratic Rational Shape Controller * - Natural Boundary Setting */ LocalControlCurveParams lccp = new LocalControlCurveParams ( org.drip.spline.pchip.LocalMonotoneCkGenerator.C1_BESSEL, org.drip.analytics.rates.DiscountCurve.QUANTIFICATION_METRIC_ZERO_RATE, new SegmentCustomBuilderControl ( MultiSegmentSequenceBuilder.BASIS_SPLINE_POLYNOMIAL, new PolynomialFunctionSetParams (4), SegmentDesignInelasticControl.Create (2, 2), new ResponseScalingShapeControl (true, new QuadraticRationalShapeControl (0.))), MultiSegmentSequence.CALIBRATE, null, null, false, false); /* * Construct the Shape Preserving Discount Curve by applying the linear curve calibrator to the array * of Cash and Swap Stretches. */ DiscountCurve dcShapePreserving = RatesScenarioCurveBuilder.ShapePreservingDFBuild ( lcc, aRRS, new ValuationParams (dtToday, dtToday, "MXN"), null, null, null, 1.); /* * Construct the Globally Smoothened Discount Curve by applying the linear curve calibrator and the * Global Curve Control parameters to the array of Cash and Swap Stretches and the shape preserving * discount curve. */ DiscountCurve dcGloballySmooth = RatesScenarioCurveBuilder.SmoothingGlobalControlBuild ( dcShapePreserving, lcc, gccp, aRRS, new ValuationParams (dtToday, dtToday, "MXN"), null, null, null); /* * Construct the Locally Smoothened Discount Curve by applying the linear curve calibrator and the * Local Curve Control parameters to the array of Cash and Swap Stretches and the shape preserving * discount curve. */ DiscountCurve dcLocallySmooth = RatesScenarioCurveBuilder.SmoothingLocalControlBuild ( dcShapePreserving, lcc, lccp, aRRS, new ValuationParams (dtToday, dtToday, "MXN"), null, null, null); /* * Cross-Comparison of the Cash Calibration Instrument "Rate" metric across the different curve * construction methodologies. */ System.out.println ("\n\t----------------------------------------------------------------"); System.out.println ("\t----------------------------------------------------------------"); System.out.println ("\t CASH INSTRUMENTS CALIBRATION RECOVERY"); System.out.println ("\t----------------------------------------------------------------"); System.out.println ("\t SHAPE PRESERVING | SMOOTHING #1 | SMOOTHING #2 | INPUT QUOTE "); System.out.println ("\t----------------------------------------------------------------"); System.out.println ("\t----------------------------------------------------------------"); for (int i = 0; i < aCashComp.length; ++i) System.out.println ("\t[" + aCashComp[i].getMaturityDate() + "] = " + FormatUtil.FormatDouble ( aCashComp[i].calcMeasureValue ( new ValuationParams (dtToday, dtToday, "MXN"), null, ComponentMarketParamsBuilder.CreateComponentMarketParams (dcShapePreserving, null, null, null, null, null, null), null, "Rate"), 1, 6, 1.) + " | " + FormatUtil.FormatDouble ( aCashComp[i].calcMeasureValue ( new ValuationParams (dtToday, dtToday, "MXN"), null, ComponentMarketParamsBuilder.CreateComponentMarketParams (dcGloballySmooth, null, null, null, null, null, null), null, "Rate"), 1, 6, 1.) + " | " + FormatUtil.FormatDouble ( aCashComp[i].calcMeasureValue ( new ValuationParams (dtToday, dtToday, "MXN"), null, ComponentMarketParamsBuilder.CreateComponentMarketParams (dcLocallySmooth, null, null, null, null, null, null), null, "Rate"), 1, 6, 1.) + " | " + FormatUtil.FormatDouble (adblCashQuote[i], 1, 6, 1.) ); /* * Cross-Comparison of the Swap Calibration Instrument "Rate" metric across the different curve * construction methodologies. */ System.out.println ("\n\t----------------------------------------------------------------"); System.out.println ("\t----------------------------------------------------------------"); System.out.println ("\t SWAP INSTRUMENTS CALIBRATION RECOVERY"); System.out.println ("\t----------------------------------------------------------------"); System.out.println ("\t SHAPE PRESERVING | SMOOTHING #1 | SMOOTHING #2 | INPUT QUOTE "); System.out.println ("\t----------------------------------------------------------------"); System.out.println ("\t----------------------------------------------------------------"); for (int i = 0; i < aSwapComp.length; ++i) System.out.println ("\t[" + aSwapComp[i].getMaturityDate() + "] = " + FormatUtil.FormatDouble ( aSwapComp[i].calcMeasureValue ( new ValuationParams (dtToday, dtToday, "MXN"), null, ComponentMarketParamsBuilder.CreateComponentMarketParams (dcShapePreserving, null, null, null, null, null, null), null, "CalibSwapRate"), 1, 6, 1.) + " | " + FormatUtil.FormatDouble ( aSwapComp[i].calcMeasureValue ( new ValuationParams (dtToday, dtToday, "MXN"), null, ComponentMarketParamsBuilder.CreateComponentMarketParams (dcGloballySmooth, null, null, null, null, null, null), null, "CalibSwapRate"), 1, 6, 1.) + " | " + FormatUtil.FormatDouble ( aSwapComp[i].calcMeasureValue ( new ValuationParams (dtToday, dtToday, "MXN"), null, ComponentMarketParamsBuilder.CreateComponentMarketParams (dcLocallySmooth, null, null, null, null, null, null), null, "CalibSwapRate"), 1, 6, 1.) + " | " + FormatUtil.FormatDouble (adblSwapQuote[i], 1, 6, 1.) ); /* * Cross-Comparison of the Swap Calibration Instrument "Rate" metric across the different curve * construction methodologies for a sequence of bespoke swap instruments. */ CalibratableComponent[] aCC = SwapInstrumentsFromMaturityTenor (dtToday, new java.lang.String[] {"3Y", "6Y", "9Y", "12Y", "15Y", "18Y", "21Y", "24Y", "27Y", "30Y"}); System.out.println ("\n\t----------------------------------------------------------------"); System.out.println ("\t----------------------------------------------------------------"); System.out.println ("\t BESPOKE SWAPS PAR RATE"); System.out.println ("\t----------------------------------------------------------------"); System.out.println ("\t SHAPE PRESERVING | SMOOTHING #1 | SMOOTHING #2"); System.out.println ("\t----------------------------------------------------------------"); System.out.println ("\t----------------------------------------------------------------"); for (int i = 0; i < aCC.length; ++i) System.out.println ("\t[" + aCC[i].getMaturityDate() + "] = " + FormatUtil.FormatDouble ( aCC[i].calcMeasureValue (new ValuationParams (dtToday, dtToday, "MXN"), null, ComponentMarketParamsBuilder.CreateComponentMarketParams (dcShapePreserving, null, null, null, null, null, null), null, "CalibSwapRate"), 1, 6, 1.) + " | " + FormatUtil.FormatDouble ( aCC[i].calcMeasureValue (new ValuationParams (dtToday, dtToday, "MXN"), null, ComponentMarketParamsBuilder.CreateComponentMarketParams (dcGloballySmooth, null, null, null, null, null, null), null, "CalibSwapRate"), 1, 6, 1.) + " | " + FormatUtil.FormatDouble ( aCC[i].calcMeasureValue (new ValuationParams (dtToday, dtToday, "MXN"), null, ComponentMarketParamsBuilder.CreateComponentMarketParams (dcLocallySmooth, null, null, null, null, null, null), null, "CalibSwapRate"), 1, 6, 1.) ); } public static final void main ( final String[] astrArgs) throws Exception { ShapePreservingDFZeroSmoothSample(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.jdbc; import java.sql.SQLException; import java.sql.Time; import java.sql.Timestamp; import java.sql.Types; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.Map; import java.util.UUID; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.marshaller.Marshaller; import org.apache.ignite.marshaller.jdk.JdkMarshaller; import static java.sql.Types.BIGINT; import static java.sql.Types.BINARY; import static java.sql.Types.BOOLEAN; import static java.sql.Types.DATE; import static java.sql.Types.DOUBLE; import static java.sql.Types.FLOAT; import static java.sql.Types.INTEGER; import static java.sql.Types.OTHER; import static java.sql.Types.SMALLINT; import static java.sql.Types.TIME; import static java.sql.Types.TIMESTAMP; import static java.sql.Types.TINYINT; import static java.sql.Types.VARCHAR; /** * Utility methods for JDBC driver. */ class JdbcUtils { /** Marshaller. */ private static final Marshaller MARSHALLER = new JdkMarshaller(); /** * Marshals task argument to byte array. * * @param args Task argument. * @return Byte array. * @throws SQLException In marshalling failed. */ public static byte[] marshalArgument(Map<String, Object> args) throws SQLException { assert args != null; try { return MARSHALLER.marshal(args); } catch (IgniteCheckedException e) { throw new SQLException("Failed to unmarshal result.", e); } } /** * Unmarshals exception from byte array. * * @param bytes Byte array. * @return Exception. * @throws SQLException If unmarshalling failed. */ public static SQLException unmarshalError(byte[] bytes) throws SQLException { return unmarshal(bytes); } /** * Unmarshals object from byte array. * * @param bytes Byte array. * @return Object. * @throws SQLException If unmarshalling failed. */ public static <T> T unmarshal(byte[] bytes) throws SQLException { assert bytes != null; try { return MARSHALLER.unmarshal(bytes, null); } catch (IgniteCheckedException e) { throw new SQLException("Failed to unmarshal result.", e); } } /** * Creates task argument for first execution. * * @param nodeId Node ID. * @param cacheName Cache name. * @param sql SQL query. * @param timeout Query timeout. * @param args Query arguments. * @param pageSize Page size. * @param maxRows Maximum number of rows. * @return Task argument. */ public static Map<String, Object> taskArgument(UUID nodeId, String cacheName, String sql, long timeout, Object[] args, int pageSize, int maxRows) { assert sql != null; assert timeout >= 0; assert pageSize > 0; assert maxRows >= 0; Map<String, Object> map = U.newHashMap(7); map.put("confNodeId", nodeId); map.put("cache", cacheName); map.put("sql", sql); map.put("timeout", timeout); map.put("args", args != null ? Arrays.asList(args) : Collections.emptyList()); map.put("pageSize", pageSize); map.put("maxRows", maxRows); return map; } /** * Creates task argument. * * @param nodeId Node ID. * @param futId Future ID. * @param pageSize Page size. * @param maxRows Maximum number of rows. * @return Task argument. */ public static Map<String, Object> taskArgument(UUID nodeId, UUID futId, int pageSize, int maxRows) { assert nodeId != null; assert futId != null; assert pageSize > 0; assert maxRows >= 0; Map<String, Object> map = U.newHashMap(4); map.put("nodeId", nodeId); map.put("futId", futId); map.put("pageSize", pageSize); map.put("maxRows", maxRows); return map; } /** * Converts Java class name to type from {@link Types}. * * @param cls Java class name. * @return Type from {@link Types}. */ @SuppressWarnings("IfMayBeConditional") public static int type(String cls) { if (Boolean.class.getName().equals(cls) || boolean.class.getName().equals(cls)) return BOOLEAN; else if (Byte.class.getName().equals(cls) || byte.class.getName().equals(cls)) return TINYINT; else if (Short.class.getName().equals(cls) || short.class.getName().equals(cls)) return SMALLINT; else if (Integer.class.getName().equals(cls) || int.class.getName().equals(cls)) return INTEGER; else if (Long.class.getName().equals(cls) || long.class.getName().equals(cls)) return BIGINT; else if (Float.class.getName().equals(cls) || float.class.getName().equals(cls)) return FLOAT; else if (Double.class.getName().equals(cls) || double.class.getName().equals(cls)) return DOUBLE; else if (String.class.getName().equals(cls)) return VARCHAR; else if (byte[].class.getName().equals(cls)) return BINARY; else if (Time.class.getName().equals(cls)) return TIME; else if (Timestamp.class.getName().equals(cls)) return TIMESTAMP; else if (Date.class.getName().equals(cls)) return DATE; else return OTHER; } /** * Converts Java class name to SQL type name. * * @param cls Java class name. * @return SQL type name. */ @SuppressWarnings("IfMayBeConditional") public static String typeName(String cls) { if (Boolean.class.getName().equals(cls) || boolean.class.getName().equals(cls)) return "BOOLEAN"; else if (Byte.class.getName().equals(cls) || byte.class.getName().equals(cls)) return "TINYINT"; else if (Short.class.getName().equals(cls) || short.class.getName().equals(cls)) return "SMALLINT"; else if (Integer.class.getName().equals(cls) || int.class.getName().equals(cls)) return "INTEGER"; else if (Long.class.getName().equals(cls) || long.class.getName().equals(cls)) return "BIGINT"; else if (Float.class.getName().equals(cls) || float.class.getName().equals(cls)) return "FLOAT"; else if (Double.class.getName().equals(cls) || double.class.getName().equals(cls)) return "DOUBLE"; else if (String.class.getName().equals(cls)) return "VARCHAR"; else if (byte[].class.getName().equals(cls)) return "BINARY"; else if (Time.class.getName().equals(cls)) return "TIME"; else if (Timestamp.class.getName().equals(cls)) return "TIMESTAMP"; else if (Date.class.getName().equals(cls)) return "DATE"; else return "OTHER"; } /** * Determines whether type is nullable. * * @param name Column name. * @param cls Java class name. * @return {@code True} if nullable. */ public static boolean nullable(String name, String cls) { return !"_KEY".equalsIgnoreCase(name) && !"_VAL".equalsIgnoreCase(name) && !(boolean.class.getName().equals(cls) || byte.class.getName().equals(cls) || short.class.getName().equals(cls) || int.class.getName().equals(cls) || long.class.getName().equals(cls) || float.class.getName().equals(cls) || double.class.getName().equals(cls)); } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.stubs; import com.intellij.diagnostic.PluginException; import com.intellij.lang.Language; import com.intellij.lang.LanguageParserDefinitions; import com.intellij.lang.ParserDefinition; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.fileTypes.FileTypeRegistry; import com.intellij.openapi.fileTypes.LanguageFileType; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.CharsetToolkit; import com.intellij.psi.templateLanguages.TemplateLanguage; import com.intellij.psi.tree.IFileElementType; import com.intellij.psi.tree.IStubFileElementType; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.indexing.IndexInfrastructure; import com.intellij.util.indexing.IndexingStamp; import gnu.trove.THashMap; import gnu.trove.THashSet; import gnu.trove.TLongObjectHashMap; import gnu.trove.TObjectLongHashMap; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; class StubVersionMap { private static final String INDEXED_FILETYPES = "indexed_filetypes"; private static final String RECORD_SEPARATOR = "\uFFFF"; private static final String LINE_SEPARATOR = "\n"; private static final Charset ourEncoding = CharsetToolkit.UTF8_CHARSET; private static final Logger LOG = Logger.getInstance(StubVersionMap.class); private final Map<FileType, Object> fileTypeToVersionOwner = new THashMap<>(); private final TObjectLongHashMap<FileType> fileTypeToVersion = new TObjectLongHashMap<>(); private final TLongObjectHashMap<FileType> versionToFileType = new TLongObjectHashMap<>(); private long myStubIndexStamp; StubVersionMap() throws IOException { for (final FileType fileType : FileTypeRegistry.getInstance().getRegisteredFileTypes()) { Object owner = getVersionOwner(fileType); if (owner != null) { fileTypeToVersionOwner.put(fileType, owner); } } updateState(); } private void updateState() throws IOException { final long currentStubIndexStamp = IndexingStamp.getIndexCreationStamp(StubUpdatingIndex.INDEX_ID); File allIndexedFiles = allIndexedFilesRegistryFile(); List<String> removedFileTypes = new ArrayList<>(); List<FileType> updatedFileTypes = new ArrayList<>(); List<FileType> addedFileTypes = new ArrayList<>(); long lastUsedCounter = currentStubIndexStamp; boolean canUsePreviousMappings = allIndexedFiles.exists(); FileTypeRegistry fileTypeRegistry = FileTypeRegistry.getInstance(); Set<FileType> loadedFileTypes = new THashSet<>(); if (canUsePreviousMappings) { List<String> stringList = StringUtil.split(FileUtil.loadFile(allIndexedFiles, ourEncoding), LINE_SEPARATOR); long allIndexedFilesVersion = Long.parseLong(stringList.get(0)); if (allIndexedFilesVersion == currentStubIndexStamp) { for (int i = 1, size = stringList.size(); i < size; ++i) { List<String> strings = StringUtil.split(stringList.get(i), RECORD_SEPARATOR); String fileTypeName = strings.get(0); long usedTimeStamp = Long.parseLong(strings.get(2)); lastUsedCounter = Math.min(lastUsedCounter, usedTimeStamp); FileType fileType = fileTypeRegistry.findFileTypeByName(fileTypeName); if (fileType == null) removedFileTypes.add(fileTypeName); else { loadedFileTypes.add(fileType); Object owner = getVersionOwner(fileType); if (owner == null) removedFileTypes.add(fileTypeName); else { if (!Comparing.equal(strings.get(1), typeAndVersion(owner))) { updatedFileTypes.add(fileType); } else { registerStamp(fileType, usedTimeStamp); } } } } } else { canUsePreviousMappings = false; } } for(FileType fileType:fileTypeToVersionOwner.keySet()) { if (!loadedFileTypes.contains(fileType)) { addedFileTypes.add(fileType); } } if (canUsePreviousMappings && (!addedFileTypes.isEmpty() || !removedFileTypes.isEmpty())) { StubUpdatingIndex.LOG.info("requesting complete stub index rebuild due to changes: " + (addedFileTypes.isEmpty() ? "" : "added file types:" + StringUtil.join(addedFileTypes, FileType::getName, ",") + ";") + (removedFileTypes.isEmpty() ? "":"removed file types:" + StringUtil.join(removedFileTypes, ","))); throw new IOException(); // StubVersionMap will be recreated } long counter = lastUsedCounter - 1; // important to start with value smaller and progress downwards for(FileType fileType: ContainerUtil.concat(updatedFileTypes, addedFileTypes)) { while (versionToFileType.containsKey(counter)) --counter; registerStamp(fileType, counter); } if (!addedFileTypes.isEmpty() || !updatedFileTypes.isEmpty() || !removedFileTypes.isEmpty()) { if (!addedFileTypes.isEmpty()) { StubUpdatingIndex.LOG.info("Following new file types will be indexed:" + StringUtil.join(addedFileTypes, FileType::getName, ",")); } if (!updatedFileTypes.isEmpty()) { StubUpdatingIndex.LOG.info("Stub version was changed for " + StringUtil.join(updatedFileTypes, FileType::getName, ",")); } if (!removedFileTypes.isEmpty()) { StubUpdatingIndex.LOG.info("Following file types will not be indexed:" + StringUtil.join(removedFileTypes, ",")); } StringBuilder allFileTypes = new StringBuilder(); allFileTypes.append(currentStubIndexStamp).append(LINE_SEPARATOR); for (FileType fileType : fileTypeToVersionOwner.keySet()) { Object owner = fileTypeToVersionOwner.get(fileType); long timestamp = fileTypeToVersion.get(fileType); allFileTypes.append(fileType.getName()).append(RECORD_SEPARATOR).append(typeAndVersion(owner)).append(RECORD_SEPARATOR) .append(timestamp).append(LINE_SEPARATOR); } FileUtil.writeToFile(allIndexedFiles, allFileTypes.toString().getBytes(ourEncoding)); } myStubIndexStamp = currentStubIndexStamp; } private void registerStamp(FileType fileTypeByName, long stamp) { fileTypeToVersion.put(fileTypeByName, stamp); FileType previousType = versionToFileType.put(stamp, fileTypeByName); if (previousType != null) { assert false; } } private static Object getVersionOwner(FileType fileType) { Object owner = null; if (fileType instanceof LanguageFileType) { Language l = ((LanguageFileType)fileType).getLanguage(); ParserDefinition parserDefinition = LanguageParserDefinitions.INSTANCE.forLanguage(l); if (parserDefinition != null) { final IFileElementType type = parserDefinition.getFileNodeType(); if (type instanceof IStubFileElementType) { owner = type; } } } BinaryFileStubBuilder builder = BinaryFileStubBuilders.INSTANCE.forFileType(fileType); if (builder != null) { owner = builder; } return owner; } public long getStamp(FileType type) { return fileTypeToVersion.get(type); } void clear() { fileTypeToVersion.clear(); versionToFileType.clear(); try { updateState(); } catch (IOException ex) { throw new RuntimeException(ex); } } @NotNull private static File allIndexedFilesRegistryFile() { return new File(new File(IndexInfrastructure.getIndexRootDir(StubUpdatingIndex.INDEX_ID), ".fileTypes"), INDEXED_FILETYPES); } @NotNull private static String typeAndVersion(Object owner) { return info(owner) + "," + version(owner); } private static String info(Object owner) { if (owner instanceof IStubFileElementType) { return "stub:" + owner.getClass().getName(); } else { return "binary stub builder:" + owner.getClass().getName(); } } private static int version(Object owner) { if (owner instanceof IStubFileElementType) { IStubFileElementType elementType = (IStubFileElementType)owner; if (elementType.getLanguage() instanceof TemplateLanguage && elementType.getStubVersion() < IStubFileElementType.getTemplateStubVersion()) { PluginException.logPluginError(LOG, elementType.getLanguage() + " stub version should call super.getStubVersion()", null, elementType.getClass()); } return elementType.getStubVersion(); } else { return ((BinaryFileStubBuilder)owner).getStubVersion(); } } public int getIndexingTimestampDiffForFileType(FileType type) { return (int)(myStubIndexStamp - fileTypeToVersion.get(type)); } public @Nullable FileType getFileTypeByIndexingTimestampDiff(int diff) { return versionToFileType.get(myStubIndexStamp - diff); } }
/** * Copyright 2013 Ming Chen<uchenm@gmail.com> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package chess.view; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Cursor; import java.awt.Dimension; import java.awt.GridLayout; import java.awt.Image; import java.awt.Toolkit; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.ComponentAdapter; import java.awt.event.ComponentEvent; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.ObjectInputStream; import javax.imageio.ImageIO; import javax.swing.JButton; import javax.swing.JFileChooser; import javax.swing.JFrame; import javax.swing.JLayeredPane; import javax.swing.JList; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.UIManager; import javax.swing.border.EmptyBorder; import javax.swing.filechooser.FileNameExtensionFilter; import chess.model.ModelLocator; import chess.model.game.BoardModel; import chess.model.game.Game; import chess.model.game.GameReplayer; public class ReplayBoard extends JFrame { /** * */ private static final long serialVersionUID = 1L; // the width of the board frame private int frameWidth = 860; // the height of the board frame private int frameHeight = 800; // size of window private Dimension wndSize; // the default toolkit of AWT private Toolkit theKit; private ReplayPanel chessPanel; // private ChessMenu chessMenu; private StatusBar statusBar; private GameReplayer replayer; private Game game; private JList stepList; public ReplayBoard(Game game) throws IOException { this.game = game; replayer = new GameReplayer(game); replayer.resetGame(); final int width = 600, height = 660; ModelLocator.getInstance().setBoard(new BoardModel(width, height)); this.setJMenuBar(getChessMenu()); this.setLayout(new BorderLayout(5, 5)); this.setCursor(new Cursor(Cursor.HAND_CURSOR)); this.setTitle("Game Replay"); this.setIconImage(ImageIO.read(new File("res/chess.jpg"))); stepList = new JList(replayer.getListModel()); getContentPane().add(stepList, BorderLayout.EAST); initChessPanel(width, height); initStatusBar(); // the default toolkit theKit = getToolkit(); // window size wndSize = theKit.getScreenSize(); // determine the start point and the size of the board frame setBounds((wndSize.width - frameWidth) / 2, (wndSize.height - frameHeight) / 2, frameWidth, frameHeight); // fix the size of the frame addComponentListener(new ComponentAdapter() { public void componentResized(ComponentEvent e) { setBounds((wndSize.width - frameWidth) / 2, (wndSize.height - frameHeight) / 2, frameWidth, frameHeight); } }); // this.add(chessPanel,BorderLayout.CENTER); this.update(this.getGraphics()); this.pack(); // this.setResizable(true); // this.setLocationRelativeTo(null); this.setVisible(true); } private void initStatusBar() { JPanel bottomPanel = new JPanel(); bottomPanel.setLayout(new GridLayout(2, 1)); statusBar = new StatusBar(); bottomPanel.add(new ReplayButtonsPanel()); bottomPanel.add(statusBar); getContentPane().add(bottomPanel, BorderLayout.SOUTH); } private void initChessPanel(final int width, final int height) throws IOException { JPanel leftPanel = new JPanel(); leftPanel.setLayout(new BorderLayout()); // leftPanel.setBorder(new LineBorder(Color.RED)); JLayeredPane lPane = new JLayeredPane(); lPane.setPreferredSize(new Dimension(width, height)); // Chess Board as background image. Image backgroundImg = ImageIO .read(new File("res/images/ChessBoard.png")); ImagePanel background = new ImagePanel(backgroundImg, width, height); lPane.add(background, new Integer(0)); chessPanel = new ReplayPanel(width, height, game); lPane.add(chessPanel, new Integer(1)); leftPanel.add(lPane, BorderLayout.CENTER); getContentPane().add(leftPanel, BorderLayout.CENTER); // GameController.getInstance().setChessPanel(chessPanel); } public JMenuBar getChessMenu() { // Chinese Chess JMenuBar menuBar = new JMenuBar(); // Main menu JMenu gameMenu = new JMenu("Game"); JMenuItem openItem = new JMenuItem("Open Game");// load game openItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { ObjectInputStream ois = null; try { JFileChooser chooser = new JFileChooser(); FileNameExtensionFilter filter = new FileNameExtensionFilter( "Chinese Chess Game File", "cnchess"); chooser.setFileFilter(filter); chooser.showOpenDialog(null); String filepath = chooser.getSelectedFile() .getAbsolutePath(); ois = new ObjectInputStream(new FileInputStream(filepath)); game = (Game) ois.readObject(); replayer.setGame(game); replayer.resetGame(); replayer.getListModel().removeAllElements(); // stepList = new JList(replayer.getListModel()); stepList.updateUI(); chessPanel.setGame(game); chessPanel.updateUI(); } catch (Exception ex) { ex.printStackTrace(); JOptionPane.showMessageDialog(null, "Open Game?"); } finally { try { ois.close(); } catch (Exception ex) { } } } }); gameMenu.add(openItem); menuBar.add(gameMenu); // return menu Bar return menuBar; } @SuppressWarnings("serial") public class ReplayButtonsPanel extends JPanel { public ReplayButtonsPanel() { // this.setBounds(200, 200, 600, 200); this.setBorder(new EmptyBorder(10, 10, 10, 10)); this.setLayout(new GridLayout(1, 4, 70, 10)); // this.setSize(width, height); this.setBackground(Color.WHITE); JButton first = new JButton("First Step"); JButton next = new JButton("Next Step"); JButton previous = new JButton("Previous Step"); JButton last = new JButton("Last Step"); this.add(first); this.add(next); this.add(previous); this.add(last); first.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { replayer.gotoFirstStep(); chessPanel.updateUI(); } }); next.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { // JButton menu = (JButton) e.getSource(); if (replayer != null) { replayer.gotoNextStep(); chessPanel.updateUI(); } else { } } }); previous.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { replayer.gotoPrevousStep(); chessPanel.updateUI(); } }); last.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { replayer.gotoLastStep(); chessPanel.updateUI(); } }); } } public static void main(String[] args) { try { // set lookand feel UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); // new gameboard // new ReplayBoard(); } catch (Exception e) { System.exit(0); } } }
/******************************************************************************************* * The MIT License (MIT) * * Copyright (c) 2013 Erik R Serating * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. *********************************************************************************************/ package com.google.code.siren4j.converter; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import com.google.common.collect.ImmutableList; import org.hamcrest.Matchers; import org.junit.Ignore; import org.junit.Test; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; import com.google.code.siren4j.component.Action; import com.google.code.siren4j.component.Entity; import com.google.code.siren4j.component.Link; import com.google.code.siren4j.component.builder.ActionBuilder; import com.google.code.siren4j.component.builder.LinkBuilder; import com.google.code.siren4j.component.impl.ActionImpl.Method; import com.google.code.siren4j.component.testpojos.Author; import com.google.code.siren4j.component.testpojos.Comment; import com.google.code.siren4j.component.testpojos.Comment.Status; import com.google.code.siren4j.component.testpojos.Course; import com.google.code.siren4j.component.testpojos.EntityClassAndNamePojo; import com.google.code.siren4j.component.testpojos.ExtendedNormalPojo; import com.google.code.siren4j.component.testpojos.NoNamePojo; import com.google.code.siren4j.component.testpojos.OverriddenCollection; import com.google.code.siren4j.component.testpojos.ResourceA; import com.google.code.siren4j.component.testpojos.ResourceB; import com.google.code.siren4j.component.testpojos.Video; import com.google.code.siren4j.component.testpojos.Video.Rating; import com.google.code.siren4j.error.Siren4JConversionException; import com.google.code.siren4j.error.Siren4JRuntimeException; import com.google.code.siren4j.resource.CollectionResource; import com.google.code.siren4j.util.ComponentUtils; import static org.junit.Assert.*; public class ReflectingConverterTest { @Test //@Ignore public void testToJacksonThereAndBackEntity() throws Exception { Entity ent = ReflectingConverter.newInstance().toEntity(getTestCourse()); String there = ent.toString(); ObjectMapper mapper = new ObjectMapper(); mapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false); Entity back = mapper.readValue(there, Entity.class); assertEquals(ent.toString(), back.toString()); } @Test public void testOverriddenCollectionToEntity() throws Exception { OverriddenCollection resource = new OverriddenCollection(); Entity ent = ReflectingConverter.newInstance().toEntity(resource); } @Test public void testNonResourceClass() throws Exception { ExtendedNormalPojo pojo = new ExtendedNormalPojo(); pojo.setId(12); pojo.setName("Test pojo 1"); pojo.setLastmodify(new Date()); Collection<String> refs = new CollectionResource<String>(); refs.add("Foo"); refs.add("Bar"); pojo.setRefs(refs); Map<String, String> map = new HashMap<String, String>(); map.put("key1", "dog"); map.put("key2", "cat"); pojo.setTestMap(map); Entity ent = ReflectingConverter.newInstance().toEntity(pojo); System.out.println("Normal pojo:"); System.out.println(ent.toString()); } @Test //@Ignore public void testSubEntityRelOverride() throws Exception { // If rel specified in sub entity annotation, it should be used by the // sub entity. Entity ent = ReflectingConverter.newInstance().toEntity(getTestCourse()); Entity subEnt = ComponentUtils.getSubEntityByRel(ent, "lastComment"); assertNotNull("Should have found sub entity with rel equal to 'lastComment'", subEnt); } @Test //@Ignore public void testSubEntityOverrideLinks() throws Exception { Entity ent = ReflectingConverter.newInstance().toEntity(getTestCourse()); Entity subEnt = ComponentUtils.getSubEntityByRel(ent, "firstComment"); assertNotNull("Should have found sub entity with rel equal to 'firstComment'", subEnt); Link courseLink = ComponentUtils.getLinkByRel(subEnt, "course"); assertEquals("/courses/testCourseID1/overridden", courseLink.getHref()); Link fooLink = ComponentUtils.getLinkByRel(subEnt, "foo"); assertNotNull("Expected to find a link named 'foo'", fooLink); } @Test //@Ignore public void testSubEntityOverrideActions() throws Exception { Entity ent = ReflectingConverter.newInstance().toEntity(getTestCourse()); Entity subEnt = ComponentUtils.getSubEntityByRel(ent, "firstComment"); assertNotNull("Should have found sub entity with rel equal to 'firstComment'", subEnt); Action deleteAction = ComponentUtils.getActionByName(subEnt, "Delete"); assertEquals("/comments/14/overridden", deleteAction.getHref()); Action rejectAction = ComponentUtils.getActionByName(subEnt, "Reject"); assertNotNull("Expected action with the name 'Reject'", rejectAction); } @Test //@Ignore public void testDynamicLinksOverride() throws Exception { Course course = getTestCourse(); String overriddenHref = "/overridden"; List<Link> dynamicLinks = new ArrayList<Link>(); dynamicLinks.add(LinkBuilder.newInstance().setRelationship("reviews").setHref(overriddenHref).build()); course.setEntityLinks(dynamicLinks); Entity ent = ReflectingConverter.newInstance().toEntity(course); Link reviewsLink = ComponentUtils.getLinkByRel(ent, "reviews"); assertEquals(overriddenHref, reviewsLink.getHref()); } @Test //@Ignore public void testDynamicActionOverride() throws Exception { Course course = getTestCourse(); String overridenHref = "/overridden"; List<Action> dynamicActions = new ArrayList<Action>(); dynamicActions.add(ActionBuilder.newInstance().setName("addReview").setHref(overridenHref) .setMethod(Method.DELETE).build()); course.setEntityActions(dynamicActions); Entity ent = ReflectingConverter.newInstance().toEntity(course); Action deleteAction = ComponentUtils.getActionByName(ent, "addReview"); assertEquals(overridenHref, deleteAction.getHref()); } @Test //@Ignore public void testNoResolveTokens() throws Exception { // tokens with square brackets around the key should not be resolved to // the value // they should only end up as normal tokens themselves with square // brackets removed. Entity ent = ReflectingConverter.newInstance().toEntity(getTestCourse()); Entity authorsEnt = ComponentUtils.getSubEntityByRel(ent, "authors2"); // Check to be sure that both normal and parent. tokens get resolved // correctly. // Resolving: /authors?courseid={parent.courseid}/{offset} where // parent.courseid is the course object's field and offset is the // collection resources // field. String expected = "/authors?courseid=testCourseID1/{offset}"; Link selfLink = ComponentUtils.getLinkByRel(authorsEnt, "self"); assertEquals(expected, selfLink.getHref()); } @Test //@Ignore public void testResolveTokens() throws Exception { Entity ent = ReflectingConverter.newInstance().toEntity(getTestCourse()); Entity authorsEnt = ComponentUtils.getSubEntityByRel(ent, "authors"); // Check to be sure that both normal and parent. tokens get resolved // correctly. // Resolving: /authors?courseid={parent.courseid}/{offset} where // parent.courseid is the course object's field and offset is the // collection resources // field. String expected = "/authors?courseid=testCourseID1/10"; Link selfLink = ComponentUtils.getLinkByRel(authorsEnt, "self"); assertEquals(expected, selfLink.getHref()); } @Test //@Ignore public void testOverrideEmbeddedLink() throws Exception { Course testCourse = getTestCourse(); testCourse.setDescription(""); Entity ent = ReflectingConverter.newInstance().toEntity(testCourse); System.out.println("Test override embedded Link:"); System.out.println(ent.toString()); } @Test //@Ignore public void testCondition() throws Exception { Course testCourse = getTestCourse(); testCourse.setDescription(""); Entity ent = ReflectingConverter.newInstance().toEntity(testCourse); System.out.println("Test condition:"); System.out.println(ent.toString()); assertNull(ent.getActions()); } @Test //@Ignore public void testOverrideRelationship() throws Exception { } @Test //@Ignore public void testEnumProperties() throws Exception { Video video = new Video(); video.setId("z1977"); video.setName("Star Wars"); video.setDescription("An epic science fiction space opera"); video.setRating(Rating.PG); video.setGenre("scifi"); Entity ent = ReflectingConverter.newInstance().toEntity(video); assertEquals(Rating.PG, ent.getProperties().get("rating")); } @Test //@Ignore public void testSubEntityUsesFieldnameForRel() throws Exception { Entity ent = ReflectingConverter.newInstance().toEntity(getTestCourse()); Entity authorsEnt = ComponentUtils.getSubEntityByRel(ent, "authors"); assertNotNull("Expected subentity with 'authors' relationship to exist.", authorsEnt); } @Test //@Ignore public void testPropertyNameOverride() throws Exception { Comment comment = getTestComment("12", "testCourseID1", "X113", "This course is great."); Entity ent = ReflectingConverter.newInstance().toEntity(comment); Map<String, Object> props = ent.getProperties(); System.out.println(ent.toString()); assertTrue("Expecting property named 'user' to exist.", props.containsKey("user")); } @Test @Ignore public void testToEntityPerformance() throws Exception { Date start = new Date(); for (int i = 1; i < 10000; i++) { ReflectingConverter.newInstance().toEntity(getTestCourse()); } Date end = new Date(); long elapsed = end.getTime() - start.getTime(); System.out.println("Elapsed time: " + elapsed + " milliseconds"); } @Test public void testCollectionResourceTopLevelEntity() throws Exception { CollectionResource<NoNamePojo> coll = new CollectionResource<NoNamePojo>(); coll.add(new NoNamePojo("id1", "foo", "bar")); coll.add(new NoNamePojo("id2", "hello", "world")); Entity ent = ReflectingConverter.newInstance().toEntity(coll); System.out.println("testCollectionResourceTopLevelEntity:"); System.out.println(ent.toString()); } @Test public void testToResource() throws Exception { Entity ent = ReflectingConverter.newInstance().toEntity(getTestCourse()); ResourceRegistry reg = ResourceRegistryImpl.newInstance("com.google.code.siren4j"); Object result = ReflectingConverter.newInstance(reg).toObject(ent); System.out.println("ToResource: "); System.out.println(ReflectingConverter.newInstance().toEntity((Course)result).toString()); } @Test public void testBaseUri() throws Exception { Course course = getTestCourse(); course.setBaseUri("http://myhost:8080/rest/"); course.setFullyQualifiedLinks(true); course.setEntityLinks(ImmutableList.of(linkWithCustomUri())); Entity ent = ReflectingConverter.newInstance().toEntity(course); assertThat(ComponentUtils.getLinkByRel(ent, "custom-link").getHref(), Matchers.startsWith("custom-scheme://example")); System.out.println("testBaseUri: "); System.out.println(ent.toString()); } private Link linkWithCustomUri() { return LinkBuilder.createLinkBuilder().setHref("custom-scheme://example.org/resource").setRelationship("custom-link").build(); } @Test(expected = Siren4JRuntimeException.class) public void testUsingBothEntityClassAndName() throws Exception { EntityClassAndNamePojo pojo = new EntityClassAndNamePojo(); Entity ent = ReflectingConverter.newInstance().toEntity(pojo); } @Test public void testToObjectWithMissingProperties() throws Exception { ResourceA resourceA = new ResourceA(); resourceA.setProp1("Foo"); resourceA.setProp2("Bar"); ReflectingConverter converter = (ReflectingConverter)ReflectingConverter.newInstance(); converter.setErrorOnMissingProperty(false); Entity ent = converter.toEntity(resourceA); System.out.println(ent.toString()); ResourceB resourceB = (ResourceB)converter.toObject(ent, ResourceB.class); assertNotNull(resourceB); assertEquals("Foo", resourceB.getProp1()); } @Test(expected = Siren4JConversionException.class) public void testToObjectWithMissingPropertiesShouldThrowException() throws Exception { ResourceA resourceA = new ResourceA(); resourceA.setProp1("Foo"); resourceA.setProp2("Bar"); ReflectingConverter converter = (ReflectingConverter)ReflectingConverter.newInstance(); converter.setErrorOnMissingProperty(true); Entity ent = converter.toEntity(resourceA); System.out.println(ent.toString()); ResourceB resourceB = (ResourceB)converter.toObject(ent, ResourceB.class); } private Course getTestCourse() { Course course = new Course(); course.setCourseid("testCourseID1"); course.setCreatedate(new Date()); course.setTitle("Test Course 1 Title"); course.setDescription("Test Course 1 Description"); course.setType("Online"); CollectionResource<Author> authors = new CollectionResource<Author>(); authors.setOffset(10); Author author1 = new Author(); author1.setFirstname("Jim"); author1.setMiddlename("T"); author1.setLastname("Smith"); author1.setId("X111"); authors.add(author1); Author author2 = new Author(); author2.setFirstname("Anne"); author2.setMiddlename("C"); author2.setLastname("Frank"); author2.setId("X211"); authors.add(author2); course.setAuthors(authors); course.setAuthors2(authors); course.setLastComment(getTestComment("12", "testCourseID1", "X113", "This course is great.")); course.setFirstComment(getTestComment("14", "testCourseID1", "X115", "This course is too easy.")); Collection<Integer> basicColl = new ArrayList<Integer>(); basicColl.add(56); basicColl.add(10); basicColl.add(15); course.setBasicCollection(basicColl); Map<String, Boolean> boolMap = new HashMap<String, Boolean>(); boolMap.put("firstEntry", true); boolMap.put("secondEntry", false); course.setBoolMap(boolMap); course.setEmbedComment(getTestComment("16", "testCourseID1", "X116", "This comment is embedded.")); return course; } private Comment getTestComment(String id, String courseid, String userid, String commentText) { Comment comment = new Comment(); comment.setId(id); comment.setCourseid(courseid); comment.setUserid(userid); comment.setCommentText(commentText); comment.setCreatedate(new Date()); comment.setStatus(Status.PENDING); return comment; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.bootstrap; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.Constants; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.node.NodeValidationException; import org.elasticsearch.test.ESTestCase; import java.net.InetAddress; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import static org.hamcrest.CoreMatchers.allOf; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.hasToString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; public class BootstrapChecksTests extends ESTestCase { private static final BootstrapContext defaultContext = new BootstrapContext(Settings.EMPTY, MetaData.EMPTY_META_DATA); public void testNonProductionMode() throws NodeValidationException { // nothing should happen since we are in non-production mode final List<TransportAddress> transportAddresses = new ArrayList<>(); for (int i = 0; i < randomIntBetween(1, 8); i++) { TransportAddress localTransportAddress = new TransportAddress(InetAddress.getLoopbackAddress(), i); transportAddresses.add(localTransportAddress); } TransportAddress publishAddress = new TransportAddress(InetAddress.getLoopbackAddress(), 0); BoundTransportAddress boundTransportAddress = mock(BoundTransportAddress.class); when(boundTransportAddress.boundAddresses()).thenReturn(transportAddresses.toArray(new TransportAddress[0])); when(boundTransportAddress.publishAddress()).thenReturn(publishAddress); BootstrapChecks.check(defaultContext, boundTransportAddress, Collections.emptyList()); } public void testNoLogMessageInNonProductionMode() throws NodeValidationException { final Logger logger = mock(Logger.class); BootstrapChecks.check(defaultContext, false, Collections.emptyList(), logger); verifyNoMoreInteractions(logger); } public void testLogMessageInProductionMode() throws NodeValidationException { final Logger logger = mock(Logger.class); BootstrapChecks.check(defaultContext, true, Collections.emptyList(), logger); verify(logger).info("bound or publishing to a non-loopback address, enforcing bootstrap checks"); verifyNoMoreInteractions(logger); } public void testEnforceLimitsWhenBoundToNonLocalAddress() { final List<TransportAddress> transportAddresses = new ArrayList<>(); final TransportAddress nonLocalTransportAddress = buildNewFakeTransportAddress(); transportAddresses.add(nonLocalTransportAddress); for (int i = 0; i < randomIntBetween(0, 7); i++) { final TransportAddress randomTransportAddress = randomBoolean() ? buildNewFakeTransportAddress() : new TransportAddress(InetAddress.getLoopbackAddress(), i); transportAddresses.add(randomTransportAddress); } final TransportAddress publishAddress = randomBoolean() ? buildNewFakeTransportAddress() : new TransportAddress(InetAddress.getLoopbackAddress(), 0); final BoundTransportAddress boundTransportAddress = mock(BoundTransportAddress.class); Collections.shuffle(transportAddresses, random()); when(boundTransportAddress.boundAddresses()).thenReturn(transportAddresses.toArray(new TransportAddress[0])); when(boundTransportAddress.publishAddress()).thenReturn(publishAddress); final String discoveryType = randomFrom("zen", "single-node"); assertEquals(BootstrapChecks.enforceLimits(boundTransportAddress, discoveryType), !"single-node".equals(discoveryType)); } public void testEnforceLimitsWhenPublishingToNonLocalAddress() { final List<TransportAddress> transportAddresses = new ArrayList<>(); for (int i = 0; i < randomIntBetween(1, 8); i++) { final TransportAddress randomTransportAddress = buildNewFakeTransportAddress(); transportAddresses.add(randomTransportAddress); } final TransportAddress publishAddress = new TransportAddress(InetAddress.getLoopbackAddress(), 0); final BoundTransportAddress boundTransportAddress = mock(BoundTransportAddress.class); when(boundTransportAddress.boundAddresses()).thenReturn(transportAddresses.toArray(new TransportAddress[0])); when(boundTransportAddress.publishAddress()).thenReturn(publishAddress); final String discoveryType = randomFrom("zen", "single-node"); assertEquals(BootstrapChecks.enforceLimits(boundTransportAddress, discoveryType), !"single-node".equals(discoveryType)); } public void testExceptionAggregation() { final List<BootstrapCheck> checks = Arrays.asList( context -> BootstrapCheck.BootstrapCheckResult.failure("first"), context -> BootstrapCheck.BootstrapCheckResult.failure("second")); final NodeValidationException e = expectThrows(NodeValidationException.class, () -> BootstrapChecks.check(defaultContext, true, checks, "testExceptionAggregation")); assertThat(e, hasToString(allOf(containsString("bootstrap checks failed"), containsString("first"), containsString("second")))); final Throwable[] suppressed = e.getSuppressed(); assertThat(suppressed.length, equalTo(2)); assertThat(suppressed[0], instanceOf(IllegalStateException.class)); assertThat(suppressed[0], hasToString(containsString("first"))); assertThat(suppressed[1], instanceOf(IllegalStateException.class)); assertThat(suppressed[1], hasToString(containsString("second"))); } public void testHeapSizeCheck() throws NodeValidationException { final int initial = randomIntBetween(0, Integer.MAX_VALUE - 1); final int max = randomIntBetween(initial + 1, Integer.MAX_VALUE); final AtomicLong initialHeapSize = new AtomicLong(initial); final AtomicLong maxHeapSize = new AtomicLong(max); final BootstrapChecks.HeapSizeCheck check = new BootstrapChecks.HeapSizeCheck() { @Override long getInitialHeapSize() { return initialHeapSize.get(); } @Override long getMaxHeapSize() { return maxHeapSize.get(); } }; final NodeValidationException e = expectThrows( NodeValidationException.class, () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testHeapSizeCheck")); assertThat( e.getMessage(), containsString("initial heap size [" + initialHeapSize.get() + "] " + "not equal to maximum heap size [" + maxHeapSize.get() + "]")); initialHeapSize.set(maxHeapSize.get()); BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testHeapSizeCheck"); // nothing should happen if the initial heap size or the max // heap size is not available if (randomBoolean()) { initialHeapSize.set(0); } else { maxHeapSize.set(0); } BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testHeapSizeCheck"); } public void testFileDescriptorLimits() throws NodeValidationException { final boolean osX = randomBoolean(); // simulates OS X versus non-OS X final int limit = osX ? 10240 : 1 << 16; final AtomicLong maxFileDescriptorCount = new AtomicLong(randomIntBetween(1, limit - 1)); final BootstrapChecks.FileDescriptorCheck check; if (osX) { check = new BootstrapChecks.OsXFileDescriptorCheck() { @Override long getMaxFileDescriptorCount() { return maxFileDescriptorCount.get(); } }; } else { check = new BootstrapChecks.FileDescriptorCheck() { @Override long getMaxFileDescriptorCount() { return maxFileDescriptorCount.get(); } }; } final NodeValidationException e = expectThrows(NodeValidationException.class, () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testFileDescriptorLimits")); assertThat(e.getMessage(), containsString("max file descriptors")); maxFileDescriptorCount.set(randomIntBetween(limit + 1, Integer.MAX_VALUE)); BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testFileDescriptorLimits"); // nothing should happen if current file descriptor count is // not available maxFileDescriptorCount.set(-1); BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testFileDescriptorLimits"); } public void testFileDescriptorLimitsThrowsOnInvalidLimit() { final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> new BootstrapChecks.FileDescriptorCheck(-randomIntBetween(0, Integer.MAX_VALUE))); assertThat(e.getMessage(), containsString("limit must be positive but was")); } public void testMlockallCheck() throws NodeValidationException { class MlockallCheckTestCase { private final boolean mlockallSet; private final boolean isMemoryLocked; private final boolean shouldFail; MlockallCheckTestCase(final boolean mlockallSet, final boolean isMemoryLocked, final boolean shouldFail) { this.mlockallSet = mlockallSet; this.isMemoryLocked = isMemoryLocked; this.shouldFail = shouldFail; } } final List<MlockallCheckTestCase> testCases = new ArrayList<>(); testCases.add(new MlockallCheckTestCase(true, true, false)); testCases.add(new MlockallCheckTestCase(true, false, true)); testCases.add(new MlockallCheckTestCase(false, true, false)); testCases.add(new MlockallCheckTestCase(false, false, false)); for (final MlockallCheckTestCase testCase : testCases) { final BootstrapChecks.MlockallCheck check = new BootstrapChecks.MlockallCheck() { @Override boolean isMemoryLocked() { return testCase.isMemoryLocked; } }; BootstrapContext bootstrapContext = new BootstrapContext( Settings.builder().put("bootstrap.memory_lock", testCase.mlockallSet).build(), null); if (testCase.shouldFail) { final NodeValidationException e = expectThrows( NodeValidationException.class, () -> BootstrapChecks.check( bootstrapContext, true, Collections.singletonList(check), "testFileDescriptorLimitsThrowsOnInvalidLimit")); assertThat( e.getMessage(), containsString("memory locking requested for elasticsearch process but memory is not locked")); } else { // nothing should happen BootstrapChecks.check(bootstrapContext, true, Collections.singletonList(check), "testFileDescriptorLimitsThrowsOnInvalidLimit"); } } } public void testMaxNumberOfThreadsCheck() throws NodeValidationException { final int limit = 1 << 11; final AtomicLong maxNumberOfThreads = new AtomicLong(randomIntBetween(1, limit - 1)); final BootstrapChecks.MaxNumberOfThreadsCheck check = new BootstrapChecks.MaxNumberOfThreadsCheck() { @Override long getMaxNumberOfThreads() { return maxNumberOfThreads.get(); } }; final NodeValidationException e = expectThrows( NodeValidationException.class, () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxNumberOfThreadsCheck")); assertThat(e.getMessage(), containsString("max number of threads")); maxNumberOfThreads.set(randomIntBetween(limit + 1, Integer.MAX_VALUE)); BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxNumberOfThreadsCheck"); // nothing should happen if current max number of threads is // not available maxNumberOfThreads.set(-1); BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxNumberOfThreadsCheck"); } public void testMaxSizeVirtualMemory() throws NodeValidationException { final long rlimInfinity = Constants.MAC_OS_X ? 9223372036854775807L : -1L; final AtomicLong maxSizeVirtualMemory = new AtomicLong(randomIntBetween(0, Integer.MAX_VALUE)); final BootstrapChecks.MaxSizeVirtualMemoryCheck check = new BootstrapChecks.MaxSizeVirtualMemoryCheck() { @Override long getMaxSizeVirtualMemory() { return maxSizeVirtualMemory.get(); } @Override long getRlimInfinity() { return rlimInfinity; } }; final NodeValidationException e = expectThrows( NodeValidationException.class, () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxSizeVirtualMemory")); assertThat(e.getMessage(), containsString("max size virtual memory")); maxSizeVirtualMemory.set(rlimInfinity); BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxSizeVirtualMemory"); // nothing should happen if max size virtual memory is not available maxSizeVirtualMemory.set(Long.MIN_VALUE); BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxSizeVirtualMemory"); } public void testMaxFileSizeCheck() throws NodeValidationException { final long rlimInfinity = Constants.MAC_OS_X ? 9223372036854775807L : -1L; final AtomicLong maxFileSize = new AtomicLong(randomIntBetween(0, Integer.MAX_VALUE)); final BootstrapChecks.MaxFileSizeCheck check = new BootstrapChecks.MaxFileSizeCheck() { @Override long getMaxFileSize() { return maxFileSize.get(); } @Override long getRlimInfinity() { return rlimInfinity; } }; final NodeValidationException e = expectThrows( NodeValidationException.class, () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxFileSize")); assertThat(e.getMessage(), containsString("max file size")); maxFileSize.set(rlimInfinity); BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxFileSize"); // nothing should happen if max file size is not available maxFileSize.set(Long.MIN_VALUE); BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxFileSize"); } public void testMaxMapCountCheck() throws NodeValidationException { final int limit = 1 << 18; final AtomicLong maxMapCount = new AtomicLong(randomIntBetween(1, limit - 1)); final BootstrapChecks.MaxMapCountCheck check = new BootstrapChecks.MaxMapCountCheck() { @Override long getMaxMapCount() { return maxMapCount.get(); } }; final NodeValidationException e = expectThrows( NodeValidationException.class, () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxMapCountCheck")); assertThat(e.getMessage(), containsString("max virtual memory areas vm.max_map_count")); maxMapCount.set(randomIntBetween(limit + 1, Integer.MAX_VALUE)); BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxMapCountCheck"); // nothing should happen if current vm.max_map_count is not // available maxMapCount.set(-1); BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxMapCountCheck"); } public void testClientJvmCheck() throws NodeValidationException { final AtomicReference<String> vmName = new AtomicReference<>("Java HotSpot(TM) 32-Bit Client VM"); final BootstrapCheck check = new BootstrapChecks.ClientJvmCheck() { @Override String getVmName() { return vmName.get(); } }; final NodeValidationException e = expectThrows( NodeValidationException.class, () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testClientJvmCheck")); assertThat( e.getMessage(), containsString("JVM is using the client VM [Java HotSpot(TM) 32-Bit Client VM] " + "but should be using a server VM for the best performance")); vmName.set("Java HotSpot(TM) 32-Bit Server VM"); BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testClientJvmCheck"); } public void testUseSerialGCCheck() throws NodeValidationException { final AtomicReference<String> useSerialGC = new AtomicReference<>("true"); final BootstrapCheck check = new BootstrapChecks.UseSerialGCCheck() { @Override String getUseSerialGC() { return useSerialGC.get(); } }; final NodeValidationException e = expectThrows( NodeValidationException.class, () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testUseSerialGCCheck")); assertThat( e.getMessage(), containsString("JVM is using the serial collector but should not be for the best performance; " + "" + "either it's the default for the VM [" + JvmInfo.jvmInfo().getVmName() +"] or -XX:+UseSerialGC was explicitly specified")); useSerialGC.set("false"); BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testUseSerialGCCheck"); } public void testSystemCallFilterCheck() throws NodeValidationException { final AtomicBoolean isSystemCallFilterInstalled = new AtomicBoolean(); BootstrapContext context = randomBoolean() ? new BootstrapContext(Settings.builder().put("bootstrap.system_call_filter", true) .build(), null) : defaultContext; final BootstrapChecks.SystemCallFilterCheck systemCallFilterEnabledCheck = new BootstrapChecks.SystemCallFilterCheck() { @Override boolean isSystemCallFilterInstalled() { return isSystemCallFilterInstalled.get(); } }; final NodeValidationException e = expectThrows( NodeValidationException.class, () -> BootstrapChecks.check(context, true, Collections.singletonList(systemCallFilterEnabledCheck), "testSystemCallFilterCheck")); assertThat( e.getMessage(), containsString("system call filters failed to install; " + "check the logs and fix your configuration or disable system call filters at your own risk")); isSystemCallFilterInstalled.set(true); BootstrapChecks.check(context, true, Collections.singletonList(systemCallFilterEnabledCheck), "testSystemCallFilterCheck"); BootstrapContext context_1 = new BootstrapContext(Settings.builder().put("bootstrap.system_call_filter", false).build(), null); final BootstrapChecks.SystemCallFilterCheck systemCallFilterNotEnabledCheck = new BootstrapChecks.SystemCallFilterCheck() { @Override boolean isSystemCallFilterInstalled() { return isSystemCallFilterInstalled.get(); } }; isSystemCallFilterInstalled.set(false); BootstrapChecks.check(context_1, true, Collections.singletonList(systemCallFilterNotEnabledCheck), "testSystemCallFilterCheck"); isSystemCallFilterInstalled.set(true); BootstrapChecks.check(context_1, true, Collections.singletonList(systemCallFilterNotEnabledCheck), "testSystemCallFilterCheck"); } public void testMightForkCheck() throws NodeValidationException { final AtomicBoolean isSystemCallFilterInstalled = new AtomicBoolean(); final AtomicBoolean mightFork = new AtomicBoolean(); final BootstrapChecks.MightForkCheck check = new BootstrapChecks.MightForkCheck() { @Override boolean isSystemCallFilterInstalled() { return isSystemCallFilterInstalled.get(); } @Override boolean mightFork() { return mightFork.get(); } @Override String message(BootstrapContext context) { return "error"; } }; runMightForkTest( check, isSystemCallFilterInstalled, () -> mightFork.set(false), () -> mightFork.set(true), e -> assertThat(e.getMessage(), containsString("error"))); } public void testOnErrorCheck() throws NodeValidationException { final AtomicBoolean isSystemCallFilterInstalled = new AtomicBoolean(); final AtomicReference<String> onError = new AtomicReference<>(); final BootstrapChecks.MightForkCheck check = new BootstrapChecks.OnErrorCheck() { @Override boolean isSystemCallFilterInstalled() { return isSystemCallFilterInstalled.get(); } @Override String onError() { return onError.get(); } }; final String command = randomAlphaOfLength(16); runMightForkTest( check, isSystemCallFilterInstalled, () -> onError.set(randomBoolean() ? "" : null), () -> onError.set(command), e -> assertThat( e.getMessage(), containsString( "OnError [" + command + "] requires forking but is prevented by system call filters " + "([bootstrap.system_call_filter=true]); upgrade to at least Java 8u92 and use ExitOnOutOfMemoryError"))); } public void testOnOutOfMemoryErrorCheck() throws NodeValidationException { final AtomicBoolean isSystemCallFilterInstalled = new AtomicBoolean(); final AtomicReference<String> onOutOfMemoryError = new AtomicReference<>(); final BootstrapChecks.MightForkCheck check = new BootstrapChecks.OnOutOfMemoryErrorCheck() { @Override boolean isSystemCallFilterInstalled() { return isSystemCallFilterInstalled.get(); } @Override String onOutOfMemoryError() { return onOutOfMemoryError.get(); } }; final String command = randomAlphaOfLength(16); runMightForkTest( check, isSystemCallFilterInstalled, () -> onOutOfMemoryError.set(randomBoolean() ? "" : null), () -> onOutOfMemoryError.set(command), e -> assertThat( e.getMessage(), containsString( "OnOutOfMemoryError [" + command + "]" + " requires forking but is prevented by system call filters ([bootstrap.system_call_filter=true]);" + " upgrade to at least Java 8u92 and use ExitOnOutOfMemoryError"))); } private void runMightForkTest( final BootstrapChecks.MightForkCheck check, final AtomicBoolean isSystemCallFilterInstalled, final Runnable disableMightFork, final Runnable enableMightFork, final Consumer<NodeValidationException> consumer) throws NodeValidationException { final String methodName = Thread.currentThread().getStackTrace()[2].getMethodName(); // if system call filter is disabled, nothing should happen isSystemCallFilterInstalled.set(false); if (randomBoolean()) { disableMightFork.run(); } else { enableMightFork.run(); } BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), methodName); // if system call filter is enabled, but we will not fork, nothing should // happen isSystemCallFilterInstalled.set(true); disableMightFork.run(); BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), methodName); // if system call filter is enabled, and we might fork, the check should be enforced, regardless of bootstrap checks being enabled // or not isSystemCallFilterInstalled.set(true); enableMightFork.run(); final NodeValidationException e = expectThrows( NodeValidationException.class, () -> BootstrapChecks.check(defaultContext, randomBoolean(), Collections.singletonList(check), methodName)); consumer.accept(e); } public void testEarlyAccessCheck() throws NodeValidationException { final AtomicReference<String> javaVersion = new AtomicReference<>(randomFrom("1.8.0_152-ea", "9-ea")); final BootstrapChecks.EarlyAccessCheck eaCheck = new BootstrapChecks.EarlyAccessCheck() { @Override String jvmVendor() { return "Oracle Corporation"; } @Override String javaVersion() { return javaVersion.get(); } }; final List<BootstrapCheck> checks = Collections.singletonList(eaCheck); final NodeValidationException e = expectThrows( NodeValidationException.class, () -> { BootstrapChecks.check(defaultContext, true, checks, "testEarlyAccessCheck"); }); assertThat( e.getMessage(), containsString( "Java version [" + javaVersion.get() + "] is an early-access build, only use release builds")); // if not on an early-access build, nothing should happen javaVersion.set(randomFrom("1.8.0_152", "9")); BootstrapChecks.check(defaultContext, true, checks, "testEarlyAccessCheck"); } public void testG1GCCheck() throws NodeValidationException { final AtomicBoolean isG1GCEnabled = new AtomicBoolean(true); final AtomicBoolean isJava8 = new AtomicBoolean(true); final AtomicReference<String> jvmVersion = new AtomicReference<>(String.format(Locale.ROOT, "25.%d-b%d", randomIntBetween(0, 39), randomIntBetween(1, 128))); final BootstrapChecks.G1GCCheck g1GCCheck = new BootstrapChecks.G1GCCheck() { @Override String jvmVendor() { return "Oracle Corporation"; } @Override boolean isG1GCEnabled() { return isG1GCEnabled.get(); } @Override String jvmVersion() { return jvmVersion.get(); } @Override boolean isJava8() { return isJava8.get(); } }; final NodeValidationException e = expectThrows( NodeValidationException.class, () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(g1GCCheck), "testG1GCCheck")); assertThat( e.getMessage(), containsString( "JVM version [" + jvmVersion.get() + "] can cause data corruption when used with G1GC; upgrade to at least Java 8u40")); // if G1GC is disabled, nothing should happen isG1GCEnabled.set(false); BootstrapChecks.check(defaultContext, true, Collections.singletonList(g1GCCheck), "testG1GCCheck"); // if on or after update 40, nothing should happen independent of whether or not G1GC is enabled isG1GCEnabled.set(randomBoolean()); jvmVersion.set(String.format(Locale.ROOT, "25.%d-b%d", randomIntBetween(40, 112), randomIntBetween(1, 128))); BootstrapChecks.check(defaultContext, true, Collections.singletonList(g1GCCheck), "testG1GCCheck"); final BootstrapChecks.G1GCCheck nonOracleCheck = new BootstrapChecks.G1GCCheck() { @Override String jvmVendor() { return randomAlphaOfLength(8); } }; // if not on an Oracle JVM, nothing should happen BootstrapChecks.check(defaultContext, true, Collections.singletonList(nonOracleCheck), "testG1GCCheck"); final BootstrapChecks.G1GCCheck nonJava8Check = new BootstrapChecks.G1GCCheck() { @Override boolean isJava8() { return false; } }; // if not Java 8, nothing should happen BootstrapChecks.check(defaultContext, true, Collections.singletonList(nonJava8Check), "testG1GCCheck"); } public void testAllPermissionCheck() throws NodeValidationException { final AtomicBoolean isAllPermissionGranted = new AtomicBoolean(true); final BootstrapChecks.AllPermissionCheck allPermissionCheck = new BootstrapChecks.AllPermissionCheck() { @Override boolean isAllPermissionGranted() { return isAllPermissionGranted.get(); } }; final List<BootstrapCheck> checks = Collections.singletonList(allPermissionCheck); final NodeValidationException e = expectThrows( NodeValidationException.class, () -> BootstrapChecks.check(defaultContext, true, checks, "testIsAllPermissionCheck")); assertThat(e, hasToString(containsString("granting the all permission effectively disables security"))); // if all permissions are not granted, nothing should happen isAllPermissionGranted.set(false); BootstrapChecks.check(defaultContext, true, checks, "testIsAllPermissionCheck"); } public void testAlwaysEnforcedChecks() { final BootstrapCheck check = new BootstrapCheck() { @Override public BootstrapCheckResult check(BootstrapContext context) { return BootstrapCheckResult.failure("error"); } @Override public boolean alwaysEnforce() { return true; } }; final NodeValidationException alwaysEnforced = expectThrows( NodeValidationException.class, () -> BootstrapChecks.check(defaultContext, randomBoolean(), Collections.singletonList(check), "testAlwaysEnforcedChecks")); assertThat(alwaysEnforced, hasToString(containsString("error"))); } }
// Copyright 2009 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gwtorm.schema.sql; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assume.assumeNoException; import com.google.gwtorm.data.Address; import com.google.gwtorm.data.Person; import com.google.gwtorm.data.PhoneBookDb; import com.google.gwtorm.data.PhoneBookDb2; import com.google.gwtorm.jdbc.Database; import com.google.gwtorm.jdbc.JdbcExecutor; import com.google.gwtorm.jdbc.JdbcSchema; import com.google.gwtorm.jdbc.SimpleDataSource; import com.google.gwtorm.server.OrmException; import java.sql.DriverManager; import java.sql.SQLException; import java.util.Collections; import java.util.Properties; import java.util.Set; import org.junit.After; import org.junit.Before; import org.junit.Test; public class DialectMySQLTest extends SqlDialectTest { @Before public void setUp() throws Exception { Class.forName(com.mysql.jdbc.Driver.class.getName()); final String host = "localhost"; final String database = "gwtorm"; final String user = "gwtorm"; final String pass = "gwtorm"; final String url = "jdbc:mysql://" + host + "/" + database; try { db = DriverManager.getConnection(url, user, pass); } catch (Throwable t) { assumeNoException(t); } executor = new JdbcExecutor(db); dialect = new DialectMySQL().refine(db); final Properties p = new Properties(); p.setProperty("driver", com.mysql.jdbc.Driver.class.getName()); p.setProperty("url", db.getMetaData().getURL()); p.setProperty("user", user); p.setProperty("password", pass); phoneBook = new Database<>(new SimpleDataSource(p), PhoneBookDb.class); phoneBook2 = new Database<>(new SimpleDataSource(p), PhoneBookDb2.class); } @After public void tearDown() { if (executor == null) { return; } // Database content must be flushed because // tests assume that the database is empty drop("TABLE address_id"); drop("TABLE addresses"); drop("TABLE cnt"); drop("TABLE bar"); drop("TABLE foo"); drop("TABLE people"); if (executor != null) { executor.close(); } executor = null; if (db != null) { try { db.close(); } catch (SQLException e) { throw new RuntimeException("Cannot close database", e); } } db = null; } private void drop(String drop) { try { execute("DROP " + drop); } catch (OrmException e) { } } private void execute(final String sql) throws OrmException { executor.execute(sql); } @Test public void testListSequences() throws OrmException, SQLException { assertTrue(dialect.listSequences(db).isEmpty()); execute("CREATE TABLE cnt (s SERIAL)"); execute("CREATE TABLE foo (cnt INT)"); Set<String> s = dialect.listSequences(db); assertEquals(1, s.size()); assertTrue(s.contains("cnt")); assertFalse(s.contains("foo")); } @Test public void testListTables() throws OrmException, SQLException { assertTrue(dialect.listTables(db).isEmpty()); execute("CREATE TABLE cnt (s SERIAL)"); execute("CREATE TABLE foo (cnt INT)"); Set<String> s = dialect.listTables(db); assertEquals(1, s.size()); assertFalse(s.contains("cnt")); assertTrue(s.contains("foo")); } @Test public void testListIndexes() throws OrmException, SQLException { assertTrue(dialect.listTables(db).isEmpty()); execute("CREATE TABLE foo (cnt INT, bar INT, baz INT)"); execute("CREATE UNIQUE INDEX FOO_PRIMARY_IND ON foo(cnt)"); execute("CREATE INDEX FOO_SECOND_IND ON foo(bar, baz)"); Set<String> s = dialect.listIndexes(db, "foo"); assertEquals(2, s.size()); assertTrue(s.contains("foo_primary_ind")); assertTrue(s.contains("foo_second_ind")); dialect.dropIndex(executor, "foo", "foo_primary_ind"); dialect.dropIndex(executor, "foo", "foo_second_ind"); assertEquals(Collections.emptySet(), dialect.listIndexes(db, "foo")); } @Test public void testUpgradeSchema() throws SQLException, OrmException { final PhoneBookDb p = phoneBook.open(); try { p.updateSchema(executor); execute("CREATE TABLE cnt (s SERIAL)"); execute("CREATE TABLE foo (cnt INT)"); execute("ALTER TABLE people ADD COLUMN fake_name VARCHAR(20)"); execute("ALTER TABLE people DROP COLUMN registered"); execute("DROP TABLE addresses"); execute("DROP TABLE address_id"); Set<String> sequences, tables; p.updateSchema(executor); sequences = dialect.listSequences(db); tables = dialect.listTables(db); assertTrue(sequences.contains("cnt")); assertTrue(tables.contains("foo")); assertTrue(sequences.contains("address_id")); assertTrue(tables.contains("addresses")); p.pruneSchema(executor); sequences = dialect.listSequences(db); tables = dialect.listTables(db); assertFalse(sequences.contains("cnt")); assertFalse(tables.contains("foo")); final Person.Key pk = new Person.Key("Bob"); final Person bob = new Person(pk, p.nextAddressId()); p.people().insert(Collections.singleton(bob)); final Address addr = new Address(new Address.Key(pk, "home"), "some place"); p.addresses().insert(Collections.singleton(addr)); } finally { p.close(); } final PhoneBookDb2 p2 = phoneBook2.open(); try { ((JdbcSchema) p2).renameField(executor, "people", "registered", "isRegistered"); } finally { p2.close(); } } @Test public void testRenameTable() throws SQLException, OrmException { assertTrue(dialect.listTables(db).isEmpty()); execute("CREATE TABLE foo (cnt INT)"); Set<String> s = dialect.listTables(db); assertEquals(1, s.size()); assertTrue(s.contains("foo")); final PhoneBookDb p = phoneBook.open(); try { ((JdbcSchema) p).renameTable(executor, "foo", "bar"); } finally { p.close(); } s = dialect.listTables(db); assertTrue(s.contains("bar")); assertFalse(s.contains("for")); } }
package com.biermacht.brews.frontend.IngredientActivities; import android.content.Intent; import android.os.Bundle; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.WindowManager; import android.widget.AdapterView; import android.widget.Spinner; import android.widget.TextView; import com.biermacht.brews.R; import com.biermacht.brews.frontend.adapters.IngredientSpinnerAdapter; import com.biermacht.brews.frontend.adapters.SpinnerAdapter; import com.biermacht.brews.ingredient.Fermentable; import com.biermacht.brews.ingredient.Ingredient; import com.biermacht.brews.ingredient.PlaceholderIngredient; import com.biermacht.brews.recipe.Recipe; import com.biermacht.brews.utils.Constants; import com.biermacht.brews.utils.Units; import java.util.ArrayList; import java.util.Arrays; public class AddFermentableActivity extends AddEditIngredientActivity { // Holds the currently selected fermentable Fermentable fermentable; // Editable rows to display public View colorView; public View gravityView; public Spinner fermentableTypeSpinner; // Titles from rows public TextView colorViewTitle; public TextView gravityViewTitle; // Content from rows public TextView colorViewText; public TextView gravityViewText; // Storage for acquired values double gravity; double color; String type; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Disable delete button for this view findViewById(R.id.delete_button).setVisibility(View.GONE); // Initialize views and such here colorView = inflater.inflate(R.layout.row_layout_edit_text, mainView, false); gravityView = inflater.inflate(R.layout.row_layout_edit_text, mainView, false); /************************************************************************ ************* Add views ************************************************* *************************************************************************/ this.registerViews(Arrays.asList(fermentableTypeSpinner, colorView, gravityView)); this.setViews(Arrays.asList(searchableListView, timeView, amountView, colorView, gravityView)); /************************************************************************ ************* Get titles, set values ********************************** *************************************************************************/ colorViewTitle = (TextView) colorView.findViewById(R.id.title); colorViewTitle.setText("SRM Color"); gravityViewTitle = (TextView) gravityView.findViewById(R.id.title); gravityViewTitle.setText("Gravity Contribution"); searchableListViewTitle.setText("Fermentable"); amountViewTitle.setText("Amount " + "(" + Units.getFermentableUnits() + ")"); // Acquire text views colorViewText = (TextView) colorView.findViewById(R.id.text); gravityViewText = (TextView) gravityView.findViewById(R.id.text); // Set button text submitButton.setText(R.string.add); // Set initial position for searchable list setInitialSearchableListSelection(); } @Override public void onMissedClick(View v) { super.onMissedClick(v); Log.d("AddFermentableActivity", "Checking views for: " + v); if (v.equals(colorView)) { dialog = alertBuilder.editTextFloatAlert(colorViewText, colorViewTitle).create(); } else if (v.equals(gravityView)) { dialog = alertBuilder.editTextFloatAlert(gravityViewText, gravityViewTitle).create(); } else { Log.d("AddFermentableActivity", "View not found: " + v); return; } // Force keyboard open and show popup dialog.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_VISIBLE); dialog.show(); } @Override public void getList() { Log.d("AddFermentableActivityt", "Getting fermentables list"); ingredientList = new ArrayList<Ingredient>(); ingredientList.addAll(ingredientHandler.getFermentablesList()); // Add a placeholder ingredient. When selected, allows user to create // a new custom ingredient. Log.d("AddFermentableActivity", "Adding placeholder ingredient"); PlaceholderIngredient i = new PlaceholderIngredient("Create new"); i.setShortDescription("Create a custom fermentable"); ingredientList.add(0, i); } @Override public void setInitialSpinnerSelection() { spinnerView.setSelection(0); } @Override public void createSpinner() { this.adapter = new IngredientSpinnerAdapter(this, ingredientList, "Fermentable", true); adapter.setDropDownViewResource(android.R.layout.simple_dropdown_item_1line); spinnerView.setAdapter(this.adapter); fermentableTypeSpinner = (Spinner) inflater.inflate(R.layout.row_layout_spinner, mainView, false); SpinnerAdapter adapter = new SpinnerAdapter(this, Constants.FERMENTABLE_TYPES, "Type"); adapter.setDropDownViewResource(android.R.layout.simple_dropdown_item_1line); fermentableTypeSpinner.setAdapter(adapter); // Handle fermentable type selections fermentableTypeSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { public void onItemSelected(AdapterView<?> parentView, View selectedItemView, int position, long id) { type = Constants.FERMENTABLE_TYPES.get(position); Log.d("AddFermentableActivity", "Fermentable type selcted: " + type); if (mRecipe.getType().equals(Recipe.EXTRACT)) { timeView.setVisibility(View.VISIBLE); if (type.equals(Fermentable.TYPE_EXTRACT) || type.equals(Fermentable.TYPE_SUGAR)) { gravityViewText.setText("1.044"); timeViewTitle.setText(R.string.boil_time); timeViewText.setText(mRecipe.getBoilTime() + ""); } else if (type.equals(Fermentable.TYPE_GRAIN)) { gravityViewText.setText("1.037"); timeViewTitle.setText(R.string.steep_time); timeViewText.setText(15 + ""); } else { timeViewTitle.setText("Time"); } } else { timeView.setVisibility(View.GONE); } } public void onNothingSelected(AdapterView<?> parentView) { } }); } @Override public void configureSearchableListListener() { searchableListListener = new AdapterView.OnItemClickListener() { public void onItemClick(AdapterView<?> parentView, View selectedItemView, int position, long id) { // Handle the placeholder case if (filteredList.get(position).getType().equals(Ingredient.PLACEHOLDER)) { // Cancel the dialog cancelDialog(); // Switch into AddCustomFermentableActivity Intent intent = new Intent(AddFermentableActivity.this, AddCustomFermentableActivity .class); intent.putExtra(Constants.KEY_RECIPE, mRecipe); intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); startActivity(intent); finish(); return; } // Not a placeholder fermentable = (Fermentable) filteredList.get(position); // Set whether we show boil or steep if (mRecipe.getType().equals(Recipe.EXTRACT)) { if (fermentable.getFermentableType().equals(Fermentable.TYPE_EXTRACT)) { timeViewTitle.setText(R.string.boil_time); timeViewText.setText(mRecipe.getBoilTime() + ""); } else if (fermentable.getFermentableType().equals(Fermentable.TYPE_GRAIN)) { timeViewTitle.setText(R.string.steep_time); timeViewText.setText(15 + ""); } else { timeViewTitle.setText("Time"); } } else { // TODO: Support extract / adjunct times for all-grain recipes. timeView.setVisibility(View.GONE); } nameViewText.setText(fermentable.getName()); searchableListViewText.setText(fermentable.getName()); colorViewText.setText(String.format("%2.2f", fermentable.getLovibondColor())); gravityViewText.setText(String.format("%2.3f", fermentable.getGravity())); amountViewText.setText(1 + ""); timeViewText.setText(String.format("%d", mRecipe.getBoilTime())); type = fermentable.getFermentableType(); // Cancel dialog cancelDialog(); } }; } @Override public void onDeletePressed() { // Must be overriden } @Override public void onCancelPressed() { finish(); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.activity_add_ingredient, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case android.R.id.home: finish(); return true; } return super.onOptionsItemSelected(item); } @Override public void acquireValues() throws Exception { super.acquireValues(); Log.d("AddFermentableActivity", "Acquiring values for: " + fermentable.getName()); color = Double.parseDouble(colorViewText.getText().toString().replace(",", ".")); gravity = Double.parseDouble(gravityViewText.getText().toString().replace(",", ".")); fermentable.setName(name); fermentable.setTime(time); fermentable.setDisplayAmount(amount); fermentable.setLovibondColor(color); fermentable.setGravity(gravity); fermentable.setFermentableType(type); } @Override public void onFinished() { mRecipe.addIngredient(fermentable); mRecipe.save(AddFermentableActivity.this); finish(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext; import org.apache.hadoop.hive.ql.hooks.Hook; import org.apache.hadoop.hive.ql.hooks.HookContext; import org.apache.hadoop.hive.ql.hooks.HookUtils; import org.apache.hadoop.hive.ql.hooks.MetricsQueryLifeTimeHook; import org.apache.hadoop.hive.ql.hooks.QueryLifeTimeHook; import org.apache.hadoop.hive.ql.hooks.QueryLifeTimeHookContext; import org.apache.hadoop.hive.ql.hooks.QueryLifeTimeHookContextImpl; import org.apache.hadoop.hive.ql.hooks.QueryLifeTimeHookWithParseHooks; import org.apache.hadoop.hive.ql.log.PerfLogger; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHook; import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; import org.apache.hive.common.util.HiveStringUtils; /** * Handles hook executions for {@link Driver}. */ public class HookRunner { private static final String CLASS_NAME = Driver.class.getName(); private final HiveConf conf; private LogHelper console; private List<QueryLifeTimeHook> queryHooks = new ArrayList<>(); private List<HiveSemanticAnalyzerHook> saHooks = new ArrayList<>(); private List<HiveDriverRunHook> driverRunHooks = new ArrayList<>(); private List<ExecuteWithHookContext> preExecHooks = new ArrayList<>(); private List<ExecuteWithHookContext> postExecHooks = new ArrayList<>(); private List<ExecuteWithHookContext> onFailureHooks = new ArrayList<>(); private boolean initialized = false; /** * Constructs a {@link HookRunner} that loads all hooks to be run via a {@link HooksLoader}. */ HookRunner(HiveConf conf, SessionState.LogHelper console) { this.conf = conf; this.console = console; } public void initialize() { if (initialized) { return; } initialized = true; queryHooks.addAll(loadHooksFromConf(HiveConf.ConfVars.HIVE_QUERY_LIFETIME_HOOKS, QueryLifeTimeHook.class)); saHooks.addAll(loadHooksFromConf(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK, HiveSemanticAnalyzerHook.class)); driverRunHooks.addAll(loadHooksFromConf(HiveConf.ConfVars.HIVE_DRIVER_RUN_HOOKS, HiveDriverRunHook.class)); preExecHooks.addAll(loadHooksFromConf(HiveConf.ConfVars.PREEXECHOOKS, ExecuteWithHookContext.class)); postExecHooks.addAll(loadHooksFromConf(HiveConf.ConfVars.POSTEXECHOOKS, ExecuteWithHookContext.class)); onFailureHooks.addAll(loadHooksFromConf(HiveConf.ConfVars.ONFAILUREHOOKS, ExecuteWithHookContext.class)); if (conf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_METRICS_ENABLED)) { queryHooks.add(new MetricsQueryLifeTimeHook()); } } private <T extends Hook> List<T> loadHooksFromConf(ConfVars hookConfVar, Class<T> clazz) { try { return HookUtils.readHooksFromConf(conf, hookConfVar); } catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) { String message = "Error loading hooks(" + hookConfVar + "): " + HiveStringUtils.stringifyException(e); throw new RuntimeException(message, e); } } /** * If {@link QueryLifeTimeHookWithParseHooks} have been loaded via the {@link HooksLoader} then invoke the * {@link QueryLifeTimeHookWithParseHooks#beforeParse(QueryLifeTimeHookContext)} method for each * {@link QueryLifeTimeHookWithParseHooks}. * * @param command the Hive command that is being run */ void runBeforeParseHook(String command) { initialize(); if (!queryHooks.isEmpty()) { QueryLifeTimeHookContext qhc = new QueryLifeTimeHookContextImpl.Builder().withHiveConf(conf).withCommand(command).build(); for (QueryLifeTimeHook hook : queryHooks) { if (hook instanceof QueryLifeTimeHookWithParseHooks) { ((QueryLifeTimeHookWithParseHooks) hook).beforeParse(qhc); } } } } /** * If {@link QueryLifeTimeHookWithParseHooks} have been loaded via the {@link HooksLoader} then invoke the * {@link QueryLifeTimeHookWithParseHooks#afterParse(QueryLifeTimeHookContext, boolean)} method for each * {@link QueryLifeTimeHookWithParseHooks}. * * @param command the Hive command that is being run * @param parseError true if there was an error while parsing the command, false otherwise */ void runAfterParseHook(String command, boolean parseError) { initialize(); if (!queryHooks.isEmpty()) { QueryLifeTimeHookContext qhc = new QueryLifeTimeHookContextImpl.Builder().withHiveConf(conf).withCommand(command).build(); for (QueryLifeTimeHook hook : queryHooks) { if (hook instanceof QueryLifeTimeHookWithParseHooks) { ((QueryLifeTimeHookWithParseHooks) hook).afterParse(qhc, parseError); } } } } /** * Dispatches {@link QueryLifeTimeHook#beforeCompile(QueryLifeTimeHookContext)}. * * @param command the Hive command that is being run */ void runBeforeCompileHook(String command) { initialize(); if (!queryHooks.isEmpty()) { QueryLifeTimeHookContext qhc = new QueryLifeTimeHookContextImpl.Builder().withHiveConf(conf).withCommand(command).build(); for (QueryLifeTimeHook hook : queryHooks) { hook.beforeCompile(qhc); } } } /** * Dispatches {@link QueryLifeTimeHook#afterCompile(QueryLifeTimeHookContext, boolean)}. * * @param command the Hive command that is being run * @param compileError true if there was an error while compiling the command, false otherwise */ void runAfterCompilationHook(String command, boolean compileError) { initialize(); if (!queryHooks.isEmpty()) { QueryLifeTimeHookContext qhc = new QueryLifeTimeHookContextImpl.Builder().withHiveConf(conf).withCommand(command).build(); for (QueryLifeTimeHook hook : queryHooks) { hook.afterCompile(qhc, compileError); } } } /** * Dispatches {@link QueryLifeTimeHook#beforeExecution(QueryLifeTimeHookContext)}. * * @param command the Hive command that is being run * @param hookContext the {@link HookContext} of the command being run */ void runBeforeExecutionHook(String command, HookContext hookContext) { initialize(); if (!queryHooks.isEmpty()) { QueryLifeTimeHookContext qhc = new QueryLifeTimeHookContextImpl.Builder().withHiveConf(conf).withCommand(command) .withHookContext(hookContext).build(); for (QueryLifeTimeHook hook : queryHooks) { hook.beforeExecution(qhc); } } } /** * Dispatches {@link QueryLifeTimeHook#afterExecution(QueryLifeTimeHookContext, boolean)}. * * @param command the Hive command that is being run * @param hookContext the {@link HookContext} of the command being run * @param executionError true if there was an error while executing the command, false otherwise */ void runAfterExecutionHook(String command, HookContext hookContext, boolean executionError) { initialize(); if (!queryHooks.isEmpty()) { QueryLifeTimeHookContext qhc = new QueryLifeTimeHookContextImpl.Builder().withHiveConf(conf).withCommand(command) .withHookContext(hookContext).build(); for (QueryLifeTimeHook hook : queryHooks) { hook.afterExecution(qhc, executionError); } } } public ASTNode runPreAnalyzeHooks(HiveSemanticAnalyzerHookContext hookCtx, ASTNode tree) throws HiveException { initialize(); try { for (HiveSemanticAnalyzerHook hook : saHooks) { tree = hook.preAnalyze(hookCtx, tree); } return tree; } catch (HiveException e) { throw e; } catch (Exception e) { throw new HiveException("Error while invoking PreAnalyzeHooks:" + HiveStringUtils.stringifyException(e), e); } } public boolean hasPreAnalyzeHooks() { return !saHooks.isEmpty(); } public void runPostAnalyzeHooks(HiveSemanticAnalyzerHookContext hookCtx, List<Task<? extends Serializable>> allRootTasks) throws HiveException { initialize(); try { for (HiveSemanticAnalyzerHook hook : saHooks) { hook.postAnalyze(hookCtx, allRootTasks); } } catch (HiveException e) { throw e; } catch (Exception e) { throw new HiveException("Error while invoking PostAnalyzeHooks:" + HiveStringUtils.stringifyException(e), e); } } public void runPreDriverHooks(HiveDriverRunHookContext hookContext) throws HiveException { initialize(); try { for (HiveDriverRunHook driverRunHook : driverRunHooks) { driverRunHook.preDriverRun(hookContext); } } catch (HiveException e) { throw e; } catch (Exception e) { throw new HiveException("Error while invoking PreDriverHooks:" + HiveStringUtils.stringifyException(e), e); } } public void runPostDriverHooks(HiveDriverRunHookContext hookContext) throws HiveException { initialize(); try { for (HiveDriverRunHook driverRunHook : driverRunHooks) { driverRunHook.postDriverRun(hookContext); } } catch (HiveException e) { throw e; } catch (Exception e) { throw new HiveException("Error while invoking PostDriverHooks:" + HiveStringUtils.stringifyException(e), e); } } public void runPreHooks(HookContext hookContext) throws HiveException { initialize(); invokeGeneralHook(preExecHooks, PerfLogger.PRE_HOOK, hookContext); } public void runPostExecHooks(HookContext hookContext) throws HiveException { initialize(); invokeGeneralHook(postExecHooks, PerfLogger.POST_HOOK, hookContext); } public void runFailureHooks(HookContext hookContext) throws HiveException { initialize(); invokeGeneralHook(onFailureHooks, PerfLogger.FAILURE_HOOK, hookContext); } private static void invokeGeneralHook(List<ExecuteWithHookContext> hooks, String prefix, HookContext hookContext) throws HiveException { if (hooks.isEmpty()) { return; } try { PerfLogger perfLogger = SessionState.getPerfLogger(); for (ExecuteWithHookContext hook : hooks) { perfLogger.PerfLogBegin(CLASS_NAME, prefix + hook.getClass().getName()); hook.run(hookContext); perfLogger.PerfLogEnd(CLASS_NAME, prefix + hook.getClass().getName()); } } catch (HiveException e) { throw e; } catch (Exception e) { throw new HiveException("Error while invoking " + prefix + " hooks: " + HiveStringUtils.stringifyException(e), e); } } public void addLifeTimeHook(QueryLifeTimeHook hook) { queryHooks.add(hook); } public void addPreHook(ExecuteWithHookContext hook) { preExecHooks.add(hook); } public void addPostHook(ExecuteWithHookContext hook) { postExecHooks.add(hook); } public void addOnFailureHook(ExecuteWithHookContext hook) { onFailureHooks.add(hook); } public void addSemanticAnalyzerHook(HiveSemanticAnalyzerHook hook) { saHooks.add(hook); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.util; import java.io.IOException; import java.util.Arrays; import java.util.Random; import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.core.memory.DataInputView; import org.apache.flink.core.memory.DataOutputView; import org.apache.flink.types.StringValue; import javax.annotation.Nonnull; import javax.annotation.Nullable; import static org.apache.flink.util.Preconditions.checkNotNull; /** * Utility class to convert objects into strings in vice-versa. */ @PublicEvolving public final class StringUtils { private static final char[] HEX_CHARS = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f' }; /** * Given an array of bytes it will convert the bytes to a hex string * representation of the bytes. * * @param bytes * the bytes to convert in a hex string * @param start * start index, inclusively * @param end * end index, exclusively * @return hex string representation of the byte array */ public static String byteToHexString(final byte[] bytes, final int start, final int end) { if (bytes == null) { throw new IllegalArgumentException("bytes == null"); } int length = end - start; char[] out = new char[length * 2]; for (int i = start, j = 0; i < end; i++) { out[j++] = HEX_CHARS[(0xF0 & bytes[i]) >>> 4]; out[j++] = HEX_CHARS[0x0F & bytes[i]]; } return new String(out); } /** * Given an array of bytes it will convert the bytes to a hex string * representation of the bytes. * * @param bytes * the bytes to convert in a hex string * @return hex string representation of the byte array */ public static String byteToHexString(final byte[] bytes) { return byteToHexString(bytes, 0, bytes.length); } /** * Given a hex string this will return the byte array corresponding to the * string . * * @param hex * the hex String array * @return a byte array that is a hex string representation of the given * string. The size of the byte array is therefore hex.length/2 */ public static byte[] hexStringToByte(final String hex) { final byte[] bts = new byte[hex.length() / 2]; for (int i = 0; i < bts.length; i++) { bts[i] = (byte) Integer.parseInt(hex.substring(2 * i, 2 * i + 2), 16); } return bts; } /** * This method calls {@link Object#toString()} on the given object, unless the * object is an array. In that case, it will use the {@link #arrayToString(Object)} * method to create a string representation of the array that includes all contained * elements. * * @param o The object for which to create the string representation. * @return The string representation of the object. */ public static String arrayAwareToString(Object o) { if (o == null) { return "null"; } if (o.getClass().isArray()) { return arrayToString(o); } return o.toString(); } /** * Returns a string representation of the given array. This method takes an Object * to allow also all types of primitive type arrays. * * @param array The array to create a string representation for. * @return The string representation of the array. * @throws IllegalArgumentException If the given object is no array. */ public static String arrayToString(Object array) { if (array == null) { throw new NullPointerException(); } if (array instanceof int[]) { return Arrays.toString((int[]) array); } if (array instanceof long[]) { return Arrays.toString((long[]) array); } if (array instanceof Object[]) { return Arrays.toString((Object[]) array); } if (array instanceof byte[]) { return Arrays.toString((byte[]) array); } if (array instanceof double[]) { return Arrays.toString((double[]) array); } if (array instanceof float[]) { return Arrays.toString((float[]) array); } if (array instanceof boolean[]) { return Arrays.toString((boolean[]) array); } if (array instanceof char[]) { return Arrays.toString((char[]) array); } if (array instanceof short[]) { return Arrays.toString((short[]) array); } if (array.getClass().isArray()) { return "<unknown array type>"; } else { throw new IllegalArgumentException("The given argument is no array."); } } /** * Replaces control characters by their escape-coded version. For example, * if the string contains a line break character ('\n'), this character will * be replaced by the two characters backslash '\' and 'n'. As a consequence, the * resulting string will not contain any more control characters. * * @param str The string in which to replace the control characters. * @return The string with the replaced characters. */ public static String showControlCharacters(String str) { int len = str.length(); StringBuilder sb = new StringBuilder(); for (int i = 0; i < len; i += 1) { char c = str.charAt(i); switch (c) { case '\b': sb.append("\\b"); break; case '\t': sb.append("\\t"); break; case '\n': sb.append("\\n"); break; case '\f': sb.append("\\f"); break; case '\r': sb.append("\\r"); break; default: sb.append(c); } } return sb.toString(); } /** * Creates a random string with a length within the given interval. The string contains only characters that * can be represented as a single code point. * * @param rnd The random used to create the strings. * @param minLength The minimum string length. * @param maxLength The maximum string length (inclusive). * @return A random String. */ public static String getRandomString(Random rnd, int minLength, int maxLength) { int len = rnd.nextInt(maxLength - minLength + 1) + minLength; char[] data = new char[len]; for (int i = 0; i < data.length; i++) { data[i] = (char) (rnd.nextInt(0x7fff) + 1); } return new String(data); } /** * Creates a random string with a length within the given interval. The string contains only characters that * can be represented as a single code point. * * @param rnd The random used to create the strings. * @param minLength The minimum string length. * @param maxLength The maximum string length (inclusive). * @param minValue The minimum character value to occur. * @param maxValue The maximum character value to occur. * @return A random String. */ public static String getRandomString(Random rnd, int minLength, int maxLength, char minValue, char maxValue) { int len = rnd.nextInt(maxLength - minLength + 1) + minLength; char[] data = new char[len]; int diff = maxValue - minValue + 1; for (int i = 0; i < data.length; i++) { data[i] = (char) (rnd.nextInt(diff) + minValue); } return new String(data); } /** * Writes a String to the given output. * The written string can be read with {@link #readString(DataInputView)}. * * @param str The string to write * @param out The output to write to * * @throws IOException Thrown, if the writing or the serialization fails. */ public static void writeString(@Nonnull String str, DataOutputView out) throws IOException { checkNotNull(str); StringValue.writeString(str, out); } /** * Reads a non-null String from the given input. * * @param in The input to read from * @return The deserialized String * * @throws IOException Thrown, if the reading or the deserialization fails. */ public static String readString(DataInputView in) throws IOException { return StringValue.readString(in); } /** * Writes a String to the given output. The string may be null. * The written string can be read with {@link #readNullableString(DataInputView)}- * * @param str The string to write, or null. * @param out The output to write to. * * @throws IOException Thrown, if the writing or the serialization fails. */ public static void writeNullableString(@Nullable String str, DataOutputView out) throws IOException { if (str != null) { out.writeBoolean(true); writeString(str, out); } else { out.writeBoolean(false); } } /** * Reads a String from the given input. The string may be null and must have been written with * {@link #writeNullableString(String, DataOutputView)}. * * @param in The input to read from. * @return The deserialized string, or null. * * @throws IOException Thrown, if the reading or the deserialization fails. */ public static @Nullable String readNullableString(DataInputView in) throws IOException { if (in.readBoolean()) { return readString(in); } else { return null; } } /** * Checks if the string is null, empty, or contains only whitespace characters. * A whitespace character is defined via {@link Character#isWhitespace(char)}. * * @param str The string to check * @return True, if the string is null or blank, false otherwise. */ public static boolean isNullOrWhitespaceOnly(String str) { if (str == null || str.length() == 0) { return true; } final int len = str.length(); for (int i = 0; i < len; i++) { if (!Character.isWhitespace(str.charAt(i))) { return false; } } return true; } // ------------------------------------------------------------------------ /** Prevent instantiation of this utility class */ private StringUtils() {} }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.metrics; import java.util.HashMap; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; import org.apache.hadoop.yarn.client.api.TimelineClient; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.server.metrics.AppAttemptMetricsConstants; import org.apache.hadoop.yarn.server.metrics.ApplicationMetricsConstants; import org.apache.hadoop.yarn.server.metrics.ContainerMetricsConstants; import org.apache.hadoop.yarn.server.resourcemanager.RMServerUtils; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppMetrics; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer; import org.apache.hadoop.yarn.util.timeline.TimelineUtils; /** * This class is responsible for posting application, appattempt &amp; Container * lifecycle related events to timeline service v1. */ public class TimelineServiceV1Publisher extends AbstractSystemMetricsPublisher { private static final Logger LOG = LoggerFactory.getLogger(TimelineServiceV1Publisher.class); public TimelineServiceV1Publisher() { super("TimelineserviceV1Publisher"); } private TimelineClient client; @Override protected void serviceInit(Configuration conf) throws Exception { client = TimelineClient.createTimelineClient(); addIfService(client); super.serviceInit(conf); getDispatcher().register(SystemMetricsEventType.class, new TimelineV1EventHandler()); } @SuppressWarnings("unchecked") @Override public void appCreated(RMApp app, long createdTime) { TimelineEntity entity = createApplicationEntity(app.getApplicationId()); Map<String, Object> entityInfo = new HashMap<String, Object>(); entityInfo.put(ApplicationMetricsConstants.NAME_ENTITY_INFO, app.getName()); entityInfo.put(ApplicationMetricsConstants.TYPE_ENTITY_INFO, app.getApplicationType()); entityInfo.put(ApplicationMetricsConstants.USER_ENTITY_INFO, app.getUser()); entityInfo.put(ApplicationMetricsConstants.QUEUE_ENTITY_INFO, app.getQueue()); entityInfo.put(ApplicationMetricsConstants.SUBMITTED_TIME_ENTITY_INFO, app.getSubmitTime()); entityInfo.put(ApplicationMetricsConstants.APP_TAGS_INFO, app.getApplicationTags()); entityInfo.put( ApplicationMetricsConstants.UNMANAGED_APPLICATION_ENTITY_INFO, app.getApplicationSubmissionContext().getUnmanagedAM()); entityInfo.put(ApplicationMetricsConstants.APPLICATION_PRIORITY_INFO, app.getApplicationPriority().getPriority()); entityInfo.put(ApplicationMetricsConstants.AM_NODE_LABEL_EXPRESSION, app.getAmNodeLabelExpression()); entityInfo.put(ApplicationMetricsConstants.APP_NODE_LABEL_EXPRESSION, app.getAppNodeLabelExpression()); if (app.getCallerContext() != null) { if (app.getCallerContext().getContext() != null) { entityInfo.put(ApplicationMetricsConstants.YARN_APP_CALLER_CONTEXT, app.getCallerContext().getContext()); } if (app.getCallerContext().getSignature() != null) { entityInfo.put(ApplicationMetricsConstants.YARN_APP_CALLER_SIGNATURE, app.getCallerContext().getSignature()); } } ContainerLaunchContext amContainerSpec = app.getApplicationSubmissionContext().getAMContainerSpec(); entityInfo.put(ApplicationMetricsConstants.AM_CONTAINER_LAUNCH_COMMAND, amContainerSpec.getCommands()); entityInfo.put(ApplicationMetricsConstants.STATE_EVENT_INFO, RMServerUtils.createApplicationState(app.getState()).toString()); entity.setOtherInfo(entityInfo); TimelineEvent tEvent = new TimelineEvent(); tEvent.setEventType(ApplicationMetricsConstants.CREATED_EVENT_TYPE); tEvent.setTimestamp(createdTime); entity.addEvent(tEvent); getDispatcher().getEventHandler().handle(new TimelineV1PublishEvent( SystemMetricsEventType.PUBLISH_ENTITY, entity, app.getApplicationId())); } @Override public void appLaunched(RMApp app, long launchTime) { TimelineEntity entity = createApplicationEntity(app.getApplicationId()); TimelineEvent tEvent = new TimelineEvent(); tEvent.setEventType(ApplicationMetricsConstants.LAUNCHED_EVENT_TYPE); tEvent.setTimestamp(launchTime); entity.addEvent(tEvent); getDispatcher().getEventHandler().handle(new TimelineV1PublishEvent( SystemMetricsEventType.PUBLISH_ENTITY, entity, app.getApplicationId())); } @Override public void appFinished(RMApp app, RMAppState state, long finishedTime) { TimelineEntity entity = createApplicationEntity(app.getApplicationId()); TimelineEvent tEvent = new TimelineEvent(); tEvent.setEventType(ApplicationMetricsConstants.FINISHED_EVENT_TYPE); tEvent.setTimestamp(finishedTime); Map<String, Object> eventInfo = new HashMap<String, Object>(); eventInfo.put(ApplicationMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO, app.getDiagnostics().toString()); eventInfo.put(ApplicationMetricsConstants.FINAL_STATUS_EVENT_INFO, app.getFinalApplicationStatus().toString()); eventInfo.put(ApplicationMetricsConstants.STATE_EVENT_INFO, RMServerUtils.createApplicationState(state).toString()); String latestApplicationAttemptId = app.getCurrentAppAttempt() == null ? null : app.getCurrentAppAttempt().getAppAttemptId().toString(); if (latestApplicationAttemptId != null) { eventInfo.put(ApplicationMetricsConstants.LATEST_APP_ATTEMPT_EVENT_INFO, latestApplicationAttemptId); } RMAppMetrics appMetrics = app.getRMAppMetrics(); entity.addOtherInfo(ApplicationMetricsConstants.APP_CPU_METRICS, appMetrics.getVcoreSeconds()); entity.addOtherInfo(ApplicationMetricsConstants.APP_MEM_METRICS, appMetrics.getMemorySeconds()); entity.addOtherInfo(ApplicationMetricsConstants.APP_MEM_PREEMPT_METRICS, appMetrics.getPreemptedMemorySeconds()); entity.addOtherInfo(ApplicationMetricsConstants.APP_CPU_PREEMPT_METRICS, appMetrics.getPreemptedVcoreSeconds()); tEvent.setEventInfo(eventInfo); entity.addEvent(tEvent); getDispatcher().getEventHandler().handle(new TimelineV1PublishEvent( SystemMetricsEventType.PUBLISH_ENTITY, entity, app.getApplicationId())); } @SuppressWarnings("unchecked") @Override public void appUpdated(RMApp app, long updatedTime) { TimelineEntity entity = createApplicationEntity(app.getApplicationId()); Map<String, Object> eventInfo = new HashMap<String, Object>(); eventInfo.put(ApplicationMetricsConstants.QUEUE_ENTITY_INFO, app.getQueue()); eventInfo.put(ApplicationMetricsConstants.APPLICATION_PRIORITY_INFO, app.getApplicationPriority().getPriority()); TimelineEvent tEvent = new TimelineEvent(); tEvent.setEventType(ApplicationMetricsConstants.UPDATED_EVENT_TYPE); tEvent.setTimestamp(updatedTime); tEvent.setEventInfo(eventInfo); entity.addEvent(tEvent); getDispatcher().getEventHandler().handle(new TimelineV1PublishEvent( SystemMetricsEventType.PUBLISH_ENTITY, entity, app.getApplicationId())); } @SuppressWarnings("unchecked") @Override public void appStateUpdated(RMApp app, YarnApplicationState appState, long updatedTime) { TimelineEntity entity = createApplicationEntity(app.getApplicationId()); Map<String, Object> eventInfo = new HashMap<String, Object>(); eventInfo.put(ApplicationMetricsConstants.STATE_EVENT_INFO, appState); TimelineEvent tEvent = new TimelineEvent(); tEvent.setEventType(ApplicationMetricsConstants.STATE_UPDATED_EVENT_TYPE); tEvent.setTimestamp(updatedTime); tEvent.setEventInfo(eventInfo); entity.addEvent(tEvent); getDispatcher().getEventHandler().handle(new TimelineV1PublishEvent( SystemMetricsEventType.PUBLISH_ENTITY, entity, app.getApplicationId())); } @SuppressWarnings("unchecked") @Override public void appACLsUpdated(RMApp app, String appViewACLs, long updatedTime) { TimelineEntity entity = createApplicationEntity(app.getApplicationId()); TimelineEvent tEvent = new TimelineEvent(); Map<String, Object> entityInfo = new HashMap<String, Object>(); entityInfo.put(ApplicationMetricsConstants.APP_VIEW_ACLS_ENTITY_INFO, (appViewACLs == null) ? "" : appViewACLs); entity.setOtherInfo(entityInfo); tEvent.setEventType(ApplicationMetricsConstants.ACLS_UPDATED_EVENT_TYPE); tEvent.setTimestamp(updatedTime); entity.addEvent(tEvent); getDispatcher().getEventHandler().handle(new TimelineV1PublishEvent( SystemMetricsEventType.PUBLISH_ENTITY, entity, app.getApplicationId())); } @SuppressWarnings("unchecked") @Override public void appAttemptRegistered(RMAppAttempt appAttempt, long registeredTime) { TimelineEntity entity = createAppAttemptEntity(appAttempt.getAppAttemptId()); TimelineEvent tEvent = new TimelineEvent(); tEvent.setEventType(AppAttemptMetricsConstants.REGISTERED_EVENT_TYPE); tEvent.setTimestamp(registeredTime); Map<String, Object> eventInfo = new HashMap<String, Object>(); eventInfo.put(AppAttemptMetricsConstants.TRACKING_URL_INFO, appAttempt.getTrackingUrl()); eventInfo.put(AppAttemptMetricsConstants.ORIGINAL_TRACKING_URL_INFO, appAttempt.getOriginalTrackingUrl()); eventInfo.put(AppAttemptMetricsConstants.HOST_INFO, appAttempt.getHost()); eventInfo.put(AppAttemptMetricsConstants.RPC_PORT_INFO, appAttempt.getRpcPort()); if (appAttempt.getMasterContainer() != null) { eventInfo.put(AppAttemptMetricsConstants.MASTER_CONTAINER_INFO, appAttempt.getMasterContainer().getId().toString()); } tEvent.setEventInfo(eventInfo); entity.addEvent(tEvent); getDispatcher().getEventHandler().handle( new TimelineV1PublishEvent(SystemMetricsEventType.PUBLISH_ENTITY, entity, appAttempt.getAppAttemptId().getApplicationId())); } @SuppressWarnings("unchecked") @Override public void appAttemptFinished(RMAppAttempt appAttempt, RMAppAttemptState appAttemtpState, RMApp app, long finishedTime) { TimelineEntity entity = createAppAttemptEntity(appAttempt.getAppAttemptId()); TimelineEvent tEvent = new TimelineEvent(); tEvent.setEventType(AppAttemptMetricsConstants.FINISHED_EVENT_TYPE); tEvent.setTimestamp(finishedTime); Map<String, Object> eventInfo = new HashMap<String, Object>(); eventInfo.put(AppAttemptMetricsConstants.TRACKING_URL_INFO, appAttempt.getTrackingUrl()); eventInfo.put(AppAttemptMetricsConstants.ORIGINAL_TRACKING_URL_INFO, appAttempt.getOriginalTrackingUrl()); eventInfo.put(AppAttemptMetricsConstants.DIAGNOSTICS_INFO, appAttempt.getDiagnostics()); eventInfo.put(AppAttemptMetricsConstants.FINAL_STATUS_INFO, app.getFinalApplicationStatus().toString()); eventInfo.put(AppAttemptMetricsConstants.STATE_INFO, RMServerUtils .createApplicationAttemptState(appAttemtpState).toString()); if (appAttempt.getMasterContainer() != null) { eventInfo.put(AppAttemptMetricsConstants.MASTER_CONTAINER_INFO, appAttempt.getMasterContainer().getId().toString()); } tEvent.setEventInfo(eventInfo); entity.addEvent(tEvent); getDispatcher().getEventHandler().handle( new TimelineV1PublishEvent(SystemMetricsEventType.PUBLISH_ENTITY, entity, appAttempt.getAppAttemptId().getApplicationId())); } @SuppressWarnings("unchecked") @Override public void containerCreated(RMContainer container, long createdTime) { TimelineEntity entity = createContainerEntity(container.getContainerId()); Map<String, Object> entityInfo = new HashMap<String, Object>(); entityInfo.put(ContainerMetricsConstants.ALLOCATED_MEMORY_INFO, container.getAllocatedResource().getMemorySize()); entityInfo.put(ContainerMetricsConstants.ALLOCATED_VCORE_INFO, container.getAllocatedResource().getVirtualCores()); entityInfo.put(ContainerMetricsConstants.ALLOCATED_HOST_INFO, container.getAllocatedNode().getHost()); entityInfo.put(ContainerMetricsConstants.ALLOCATED_PORT_INFO, container.getAllocatedNode().getPort()); entityInfo.put(ContainerMetricsConstants.ALLOCATED_PRIORITY_INFO, container.getAllocatedPriority().getPriority()); entityInfo.put( ContainerMetricsConstants.ALLOCATED_HOST_HTTP_ADDRESS_INFO, container.getNodeHttpAddress()); entity.setOtherInfo(entityInfo); TimelineEvent tEvent = new TimelineEvent(); tEvent.setEventType(ContainerMetricsConstants.CREATED_EVENT_TYPE); tEvent.setTimestamp(createdTime); entity.addEvent(tEvent); getDispatcher().getEventHandler().handle(new TimelineV1PublishEvent( SystemMetricsEventType.PUBLISH_ENTITY, entity, container .getContainerId().getApplicationAttemptId().getApplicationId())); } @SuppressWarnings("unchecked") @Override public void containerFinished(RMContainer container, long finishedTime) { TimelineEntity entity = createContainerEntity(container.getContainerId()); TimelineEvent tEvent = new TimelineEvent(); tEvent.setEventType(ContainerMetricsConstants.FINISHED_EVENT_TYPE); tEvent.setTimestamp(finishedTime); Map<String, Object> eventInfo = new HashMap<String, Object>(); eventInfo.put(ContainerMetricsConstants.DIAGNOSTICS_INFO, container.getDiagnosticsInfo()); eventInfo.put(ContainerMetricsConstants.EXIT_STATUS_INFO, container.getContainerExitStatus()); eventInfo.put(ContainerMetricsConstants.STATE_INFO, container.getContainerState().toString()); Map<String, Object> entityInfo = new HashMap<String, Object>(); entityInfo.put(ContainerMetricsConstants.ALLOCATED_HOST_INFO, container.getAllocatedNode().getHost()); entityInfo.put(ContainerMetricsConstants.ALLOCATED_PORT_INFO, container.getAllocatedNode().getPort()); entity.setOtherInfo(entityInfo); tEvent.setEventInfo(eventInfo); entity.addEvent(tEvent); getDispatcher().getEventHandler().handle(new TimelineV1PublishEvent( SystemMetricsEventType.PUBLISH_ENTITY, entity, container .getContainerId().getApplicationAttemptId().getApplicationId())); } private static TimelineEntity createApplicationEntity( ApplicationId applicationId) { TimelineEntity entity = new TimelineEntity(); entity.setEntityType(ApplicationMetricsConstants.ENTITY_TYPE); entity.setEntityId(applicationId.toString()); return entity; } private static TimelineEntity createAppAttemptEntity( ApplicationAttemptId appAttemptId) { TimelineEntity entity = new TimelineEntity(); entity.setEntityType(AppAttemptMetricsConstants.ENTITY_TYPE); entity.setEntityId(appAttemptId.toString()); entity.addPrimaryFilter(AppAttemptMetricsConstants.PARENT_PRIMARY_FILTER, appAttemptId.getApplicationId().toString()); return entity; } private static TimelineEntity createContainerEntity(ContainerId containerId) { TimelineEntity entity = new TimelineEntity(); entity.setEntityType(ContainerMetricsConstants.ENTITY_TYPE); entity.setEntityId(containerId.toString()); entity.addPrimaryFilter(ContainerMetricsConstants.PARENT_PRIMARIY_FILTER, containerId.getApplicationAttemptId().toString()); return entity; } private void putEntity(TimelineEntity entity) { try { if (LOG.isDebugEnabled()) { LOG.debug("Publishing the entity " + entity.getEntityId() + ", JSON-style content: " + TimelineUtils.dumpTimelineRecordtoJSON(entity)); } client.putEntities(entity); } catch (Exception e) { LOG.error("Error when publishing entity [" + entity.getEntityType() + "," + entity.getEntityId() + "]", e); } } private class TimelineV1PublishEvent extends TimelinePublishEvent { private TimelineEntity entity; public TimelineV1PublishEvent(SystemMetricsEventType type, TimelineEntity entity, ApplicationId appId) { super(type, appId); this.entity = entity; } public TimelineEntity getEntity() { return entity; } } private class TimelineV1EventHandler implements EventHandler<TimelineV1PublishEvent> { @Override public void handle(TimelineV1PublishEvent event) { putEntity(event.getEntity()); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.testing; import com.google.common.collect.ImmutableMap; import io.airlift.units.Duration; import io.prestosql.Session; import io.prestosql.client.ClientSelectedRole; import io.prestosql.client.ClientSession; import io.prestosql.client.Column; import io.prestosql.client.QueryError; import io.prestosql.client.QueryStatusInfo; import io.prestosql.client.StatementClient; import io.prestosql.metadata.MetadataUtil; import io.prestosql.metadata.QualifiedObjectName; import io.prestosql.metadata.QualifiedTablePrefix; import io.prestosql.server.testing.TestingPrestoServer; import io.prestosql.spi.QueryId; import io.prestosql.spi.session.ResourceEstimates; import io.prestosql.spi.type.Type; import okhttp3.OkHttpClient; import org.intellij.lang.annotations.Language; import java.io.Closeable; import java.net.URI; import java.time.ZoneId; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Optional; import java.util.concurrent.TimeUnit; import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableMap.toImmutableMap; import static io.prestosql.client.StatementClientFactory.newStatementClient; import static io.prestosql.spi.session.ResourceEstimates.CPU_TIME; import static io.prestosql.spi.session.ResourceEstimates.EXECUTION_TIME; import static io.prestosql.spi.session.ResourceEstimates.PEAK_MEMORY; import static io.prestosql.transaction.TransactionBuilder.transaction; import static java.util.Objects.requireNonNull; public abstract class AbstractTestingPrestoClient<T> implements Closeable { private final TestingPrestoServer prestoServer; private final Session defaultSession; private final OkHttpClient httpClient = new OkHttpClient(); protected AbstractTestingPrestoClient(TestingPrestoServer prestoServer, Session defaultSession) { this.prestoServer = requireNonNull(prestoServer, "prestoServer is null"); this.defaultSession = requireNonNull(defaultSession, "defaultSession is null"); } @Override public void close() { httpClient.dispatcher().executorService().shutdown(); httpClient.connectionPool().evictAll(); } protected abstract ResultsSession<T> getResultSession(Session session); public ResultWithQueryId<T> execute(@Language("SQL") String sql) { return execute(defaultSession, sql); } public ResultWithQueryId<T> execute(Session session, @Language("SQL") String sql) { ResultsSession<T> resultsSession = getResultSession(session); ClientSession clientSession = toClientSession(session, prestoServer.getBaseUrl(), new Duration(2, TimeUnit.MINUTES)); try (StatementClient client = newStatementClient(httpClient, clientSession, sql)) { while (client.isRunning()) { resultsSession.addResults(client.currentStatusInfo(), client.currentData()); client.advance(); } checkState(client.isFinished()); QueryError error = client.finalStatusInfo().getError(); if (error == null) { QueryStatusInfo results = client.finalStatusInfo(); if (results.getUpdateType() != null) { resultsSession.setUpdateType(results.getUpdateType()); } if (results.getUpdateCount() != null) { resultsSession.setUpdateCount(results.getUpdateCount()); } resultsSession.setWarnings(results.getWarnings()); T result = resultsSession.build(client.getSetSessionProperties(), client.getResetSessionProperties()); return new ResultWithQueryId<>(new QueryId(results.getId()), result); } if (error.getFailureInfo() != null) { RuntimeException remoteException = error.getFailureInfo().toException(); throw new RuntimeException(Optional.ofNullable(remoteException.getMessage()).orElseGet(remoteException::toString), remoteException); } throw new RuntimeException("Query failed: " + error.getMessage()); // dump query info to console for debugging (NOTE: not pretty printed) // JsonCodec<QueryInfo> queryInfoJsonCodec = createCodecFactory().prettyPrint().jsonCodec(QueryInfo.class); // log.info("\n" + queryInfoJsonCodec.toJson(queryInfo)); } } private static ClientSession toClientSession(Session session, URI server, Duration clientRequestTimeout) { ImmutableMap.Builder<String, String> properties = ImmutableMap.builder(); properties.putAll(session.getSystemProperties()); for (Entry<String, Map<String, String>> connectorProperties : session.getUnprocessedCatalogProperties().entrySet()) { for (Entry<String, String> entry : connectorProperties.getValue().entrySet()) { properties.put(connectorProperties.getKey() + "." + entry.getKey(), entry.getValue()); } } ImmutableMap.Builder<String, String> resourceEstimates = ImmutableMap.builder(); ResourceEstimates estimates = session.getResourceEstimates(); estimates.getExecutionTime().ifPresent(e -> resourceEstimates.put(EXECUTION_TIME, e.toString())); estimates.getCpuTime().ifPresent(e -> resourceEstimates.put(CPU_TIME, e.toString())); estimates.getPeakMemoryBytes().ifPresent(e -> resourceEstimates.put(PEAK_MEMORY, e.toString())); return new ClientSession( server, session.getIdentity().getUser(), session.getSource().orElse(null), session.getTraceToken(), session.getClientTags(), session.getClientInfo().orElse(null), session.getCatalog().orElse(null), session.getSchema().orElse(null), session.getPath().toString(), ZoneId.of(session.getTimeZoneKey().getId()), false, session.getLocale(), resourceEstimates.build(), properties.build(), session.getPreparedStatements(), session.getIdentity().getRoles().entrySet().stream() .collect(toImmutableMap(Entry::getKey, entry -> new ClientSelectedRole( ClientSelectedRole.Type.valueOf(entry.getValue().getType().toString()), entry.getValue().getRole()))), session.getIdentity().getExtraCredentials(), session.getTransactionId().map(Object::toString).orElse(null), clientRequestTimeout); } public List<QualifiedObjectName> listTables(Session session, String catalog, String schema) { return transaction(prestoServer.getTransactionManager(), prestoServer.getAccessControl()) .readOnly() .execute(session, transactionSession -> { return prestoServer.getMetadata().listTables(transactionSession, new QualifiedTablePrefix(catalog, schema)); }); } public boolean tableExists(Session session, String table) { return transaction(prestoServer.getTransactionManager(), prestoServer.getAccessControl()) .readOnly() .execute(session, transactionSession -> { return MetadataUtil.tableExists(prestoServer.getMetadata(), transactionSession, table); }); } public Session getDefaultSession() { return defaultSession; } public TestingPrestoServer getServer() { return prestoServer; } protected List<Type> getTypes(List<Column> columns) { return columns.stream() .map(Column::getType) .map(prestoServer.getMetadata()::fromSqlType) .collect(toImmutableList()); } }
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.regionserver; import static org.apache.hadoop.util.StringUtils.humanReadableInt; import java.io.IOException; import java.lang.Thread.UncaughtExceptionHandler; import java.lang.management.ManagementFactory; import java.util.ArrayList; import java.util.ConcurrentModificationException; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.concurrent.BlockingQueue; import java.util.concurrent.DelayQueue; import java.util.concurrent.Delayed; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.ReentrantReadWriteLock; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.DroppedSnapshotException; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.RegionReplicaUtil; import org.apache.hadoop.hbase.io.util.HeapMemorySizeUtil; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Counter; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.HasThread; import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil; import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix; import org.apache.htrace.Trace; import org.apache.htrace.TraceScope; import com.google.common.base.Preconditions; /** * Thread that flushes cache on request * * NOTE: This class extends Thread rather than Chore because the sleep time * can be interrupted when there is something to do, rather than the Chore * sleep time which is invariant. * * @see FlushRequester */ @InterfaceAudience.Private class MemStoreFlusher implements FlushRequester { static final Log LOG = LogFactory.getLog(MemStoreFlusher.class); private Configuration conf; // These two data members go together. Any entry in the one must have // a corresponding entry in the other. private final BlockingQueue<FlushQueueEntry> flushQueue = new DelayQueue<FlushQueueEntry>(); private final Map<HRegion, FlushRegionEntry> regionsInQueue = new HashMap<HRegion, FlushRegionEntry>(); private AtomicBoolean wakeupPending = new AtomicBoolean(); private final long threadWakeFrequency; private final HRegionServer server; private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); private final Object blockSignal = new Object(); protected long globalMemStoreLimit; protected float globalMemStoreLimitLowMarkPercent; protected long globalMemStoreLimitLowMark; private long blockingWaitTime; private final Counter updatesBlockedMsHighWater = new Counter(); private final FlushHandler[] flushHandlers; private List<FlushRequestListener> flushRequestListeners = new ArrayList<FlushRequestListener>(1); /** * @param conf * @param server */ public MemStoreFlusher(final Configuration conf, final HRegionServer server) { super(); this.conf = conf; this.server = server; this.threadWakeFrequency = conf.getLong(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000); long max = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getMax(); float globalMemStorePercent = HeapMemorySizeUtil.getGlobalMemStorePercent(conf, true); this.globalMemStoreLimit = (long) (max * globalMemStorePercent); this.globalMemStoreLimitLowMarkPercent = HeapMemorySizeUtil.getGlobalMemStoreLowerMark(conf, globalMemStorePercent); this.globalMemStoreLimitLowMark = (long) (this.globalMemStoreLimit * this.globalMemStoreLimitLowMarkPercent); this.blockingWaitTime = conf.getInt("hbase.hstore.blockingWaitTime", 90000); int handlerCount = conf.getInt("hbase.hstore.flusher.count", 2); this.flushHandlers = new FlushHandler[handlerCount]; LOG.info("globalMemStoreLimit=" + TraditionalBinaryPrefix.long2String(this.globalMemStoreLimit, "", 1) + ", globalMemStoreLimitLowMark=" + TraditionalBinaryPrefix.long2String(this.globalMemStoreLimitLowMark, "", 1) + ", maxHeap=" + TraditionalBinaryPrefix.long2String(max, "", 1)); } public Counter getUpdatesBlockedMsHighWater() { return this.updatesBlockedMsHighWater; } /** * The memstore across all regions has exceeded the low water mark. Pick * one region to flush and flush it synchronously (this is called from the * flush thread) * @return true if successful */ private boolean flushOneForGlobalPressure() { SortedMap<Long, HRegion> regionsBySize = server.getCopyOfOnlineRegionsSortedBySize(); Set<HRegion> excludedRegions = new HashSet<HRegion>(); double secondaryMultiplier = ServerRegionReplicaUtil.getRegionReplicaStoreFileRefreshMultiplier(conf); boolean flushedOne = false; while (!flushedOne) { // Find the biggest region that doesn't have too many storefiles // (might be null!) HRegion bestFlushableRegion = getBiggestMemstoreRegion( regionsBySize, excludedRegions, true); // Find the biggest region, total, even if it might have too many flushes. HRegion bestAnyRegion = getBiggestMemstoreRegion( regionsBySize, excludedRegions, false); // Find the biggest region that is a secondary region HRegion bestRegionReplica = getBiggestMemstoreOfRegionReplica(regionsBySize, excludedRegions); if (bestAnyRegion == null && bestRegionReplica == null) { LOG.error("Above memory mark but there are no flushable regions!"); return false; } HRegion regionToFlush; if (bestFlushableRegion != null && bestAnyRegion.memstoreSize.get() > 2 * bestFlushableRegion.memstoreSize.get()) { // Even if it's not supposed to be flushed, pick a region if it's more than twice // as big as the best flushable one - otherwise when we're under pressure we make // lots of little flushes and cause lots of compactions, etc, which just makes // life worse! if (LOG.isDebugEnabled()) { LOG.debug("Under global heap pressure: " + "Region " + bestAnyRegion.getRegionNameAsString() + " has too many " + "store files, but is " + TraditionalBinaryPrefix.long2String(bestAnyRegion.memstoreSize.get(), "", 1) + " vs best flushable region's " + TraditionalBinaryPrefix.long2String(bestFlushableRegion.memstoreSize.get(), "", 1) + ". Choosing the bigger."); } regionToFlush = bestAnyRegion; } else { if (bestFlushableRegion == null) { regionToFlush = bestAnyRegion; } else { regionToFlush = bestFlushableRegion; } } Preconditions.checkState( (regionToFlush != null && regionToFlush.memstoreSize.get() > 0) || (bestRegionReplica != null && bestRegionReplica.memstoreSize.get() > 0)); if (regionToFlush == null || (bestRegionReplica != null && ServerRegionReplicaUtil.isRegionReplicaStoreFileRefreshEnabled(conf) && (bestRegionReplica.memstoreSize.get() > secondaryMultiplier * regionToFlush.memstoreSize.get()))) { LOG.info("Refreshing storefiles of region " + regionToFlush + " due to global heap pressure. memstore size=" + StringUtils.humanReadableInt( server.getRegionServerAccounting().getGlobalMemstoreSize())); flushedOne = refreshStoreFilesAndReclaimMemory(bestRegionReplica); if (!flushedOne) { LOG.info("Excluding secondary region " + regionToFlush + " - trying to find a different region to refresh files."); excludedRegions.add(bestRegionReplica); } } else { LOG.info("Flush of region " + regionToFlush + " due to global heap pressure. " + "Total Memstore size=" + humanReadableInt(server.getRegionServerAccounting().getGlobalMemstoreSize()) + ", Region memstore size=" + humanReadableInt(regionToFlush.memstoreSize.get())); flushedOne = flushRegion(regionToFlush, true, true); if (!flushedOne) { LOG.info("Excluding unflushable region " + regionToFlush + " - trying to find a different region to flush."); excludedRegions.add(regionToFlush); } } } return true; } private class FlushHandler extends HasThread { private FlushHandler(String name) { super(name); } @Override public void run() { while (!server.isStopped()) { FlushQueueEntry fqe = null; try { wakeupPending.set(false); // allow someone to wake us up again fqe = flushQueue.poll(threadWakeFrequency, TimeUnit.MILLISECONDS); if (fqe == null || fqe instanceof WakeupFlushThread) { if (isAboveLowWaterMark()) { LOG.debug("Flush thread woke up because memory above low water=" + TraditionalBinaryPrefix.long2String(globalMemStoreLimitLowMark, "", 1)); if (!flushOneForGlobalPressure()) { // Wasn't able to flush any region, but we're above low water mark // This is unlikely to happen, but might happen when closing the // entire server - another thread is flushing regions. We'll just // sleep a little bit to avoid spinning, and then pretend that // we flushed one, so anyone blocked will check again Thread.sleep(1000); wakeUpIfBlocking(); } // Enqueue another one of these tokens so we'll wake up again wakeupFlushThread(); } continue; } FlushRegionEntry fre = (FlushRegionEntry) fqe; if (!flushRegion(fre)) { break; } } catch (InterruptedException ex) { continue; } catch (ConcurrentModificationException ex) { continue; } catch (Exception ex) { LOG.error("Cache flusher failed for entry " + fqe, ex); if (!server.checkFileSystem()) { break; } } } synchronized (regionsInQueue) { regionsInQueue.clear(); flushQueue.clear(); } // Signal anyone waiting, so they see the close flag wakeUpIfBlocking(); LOG.info(getName() + " exiting"); } } private void wakeupFlushThread() { if (wakeupPending.compareAndSet(false, true)) { flushQueue.add(new WakeupFlushThread()); } } private HRegion getBiggestMemstoreRegion( SortedMap<Long, HRegion> regionsBySize, Set<HRegion> excludedRegions, boolean checkStoreFileCount) { synchronized (regionsInQueue) { for (HRegion region : regionsBySize.values()) { if (excludedRegions.contains(region)) { continue; } if (region.writestate.flushing || !region.writestate.writesEnabled) { continue; } if (checkStoreFileCount && isTooManyStoreFiles(region)) { continue; } return region; } } return null; } private HRegion getBiggestMemstoreOfRegionReplica(SortedMap<Long, HRegion> regionsBySize, Set<HRegion> excludedRegions) { synchronized (regionsInQueue) { for (HRegion region : regionsBySize.values()) { if (excludedRegions.contains(region)) { continue; } if (RegionReplicaUtil.isDefaultReplica(region.getRegionInfo())) { continue; } return region; } } return null; } private boolean refreshStoreFilesAndReclaimMemory(HRegion region) { try { return region.refreshStoreFiles(); } catch (IOException e) { LOG.warn("Refreshing store files failed with exception", e); } return false; } /** * Return true if global memory usage is above the high watermark */ private boolean isAboveHighWaterMark() { return server.getRegionServerAccounting(). getGlobalMemstoreSize() >= globalMemStoreLimit; } /** * Return true if we're above the high watermark */ private boolean isAboveLowWaterMark() { return server.getRegionServerAccounting(). getGlobalMemstoreSize() >= globalMemStoreLimitLowMark; } @Override public void requestFlush(HRegion r, boolean forceFlushAllStores) { synchronized (regionsInQueue) { if (!regionsInQueue.containsKey(r)) { // This entry has no delay so it will be added at the top of the flush // queue. It'll come out near immediately. FlushRegionEntry fqe = new FlushRegionEntry(r, forceFlushAllStores); this.regionsInQueue.put(r, fqe); this.flushQueue.add(fqe); } } } @Override public void requestDelayedFlush(HRegion r, long delay, boolean forceFlushAllStores) { synchronized (regionsInQueue) { if (!regionsInQueue.containsKey(r)) { // This entry has some delay FlushRegionEntry fqe = new FlushRegionEntry(r, forceFlushAllStores); fqe.requeue(delay); this.regionsInQueue.put(r, fqe); this.flushQueue.add(fqe); } } } public int getFlushQueueSize() { return flushQueue.size(); } /** * Only interrupt once it's done with a run through the work loop. */ void interruptIfNecessary() { lock.writeLock().lock(); try { for (FlushHandler flushHander : flushHandlers) { if (flushHander != null) flushHander.interrupt(); } } finally { lock.writeLock().unlock(); } } synchronized void start(UncaughtExceptionHandler eh) { ThreadFactory flusherThreadFactory = Threads.newDaemonThreadFactory( server.getServerName().toShortString() + "-MemStoreFlusher", eh); for (int i = 0; i < flushHandlers.length; i++) { flushHandlers[i] = new FlushHandler("MemStoreFlusher." + i); flusherThreadFactory.newThread(flushHandlers[i]); flushHandlers[i].start(); } } boolean isAlive() { for (FlushHandler flushHander : flushHandlers) { if (flushHander != null && flushHander.isAlive()) { return true; } } return false; } void join() { for (FlushHandler flushHander : flushHandlers) { if (flushHander != null) { Threads.shutdown(flushHander.getThread()); } } } /** * A flushRegion that checks store file count. If too many, puts the flush * on delay queue to retry later. * @param fqe * @return true if the region was successfully flushed, false otherwise. If * false, there will be accompanying log messages explaining why the region was * not flushed. */ private boolean flushRegion(final FlushRegionEntry fqe) { HRegion region = fqe.region; if (!region.getRegionInfo().isMetaRegion() && isTooManyStoreFiles(region)) { if (fqe.isMaximumWait(this.blockingWaitTime)) { LOG.info("Waited " + (EnvironmentEdgeManager.currentTime() - fqe.createTime) + "ms on a compaction to clean up 'too many store files'; waited " + "long enough... proceeding with flush of " + region.getRegionNameAsString()); } else { // If this is first time we've been put off, then emit a log message. if (fqe.getRequeueCount() <= 0) { // Note: We don't impose blockingStoreFiles constraint on meta regions LOG.warn("Region " + region.getRegionNameAsString() + " has too many " + "store files; delaying flush up to " + this.blockingWaitTime + "ms"); if (!this.server.compactSplitThread.requestSplit(region)) { try { this.server.compactSplitThread.requestSystemCompaction( region, Thread.currentThread().getName()); } catch (IOException e) { e = e instanceof RemoteException ? ((RemoteException)e).unwrapRemoteException() : e; LOG.error( "Cache flush failed for region " + Bytes.toStringBinary(region.getRegionName()), e); } } } // Put back on the queue. Have it come back out of the queue // after a delay of this.blockingWaitTime / 100 ms. this.flushQueue.add(fqe.requeue(this.blockingWaitTime / 100)); // Tell a lie, it's not flushed but it's ok return true; } } return flushRegion(region, false, fqe.isForceFlushAllStores()); } /** * Flush a region. * @param region Region to flush. * @param emergencyFlush Set if we are being force flushed. If true the region * needs to be removed from the flush queue. If false, when we were called * from the main flusher run loop and we got the entry to flush by calling * poll on the flush queue (which removed it). * @param forceFlushAllStores whether we want to flush all store. * @return true if the region was successfully flushed, false otherwise. If * false, there will be accompanying log messages explaining why the region was * not flushed. */ private boolean flushRegion(final HRegion region, final boolean emergencyFlush, boolean forceFlushAllStores) { long startTime = 0; synchronized (this.regionsInQueue) { FlushRegionEntry fqe = this.regionsInQueue.remove(region); // Use the start time of the FlushRegionEntry if available if (fqe != null) { startTime = fqe.createTime; } if (fqe != null && emergencyFlush) { // Need to remove from region from delay queue. When NOT an // emergencyFlush, then item was removed via a flushQueue.poll. flushQueue.remove(fqe); } } if (startTime == 0) { // Avoid getting the system time unless we don't have a FlushRegionEntry; // shame we can't capture the time also spent in the above synchronized // block startTime = EnvironmentEdgeManager.currentTime(); } lock.readLock().lock(); try { notifyFlushRequest(region, emergencyFlush); HRegion.FlushResult flushResult = region.flushcache(forceFlushAllStores); boolean shouldCompact = flushResult.isCompactionNeeded(); // We just want to check the size boolean shouldSplit = region.checkSplit() != null; if (shouldSplit) { this.server.compactSplitThread.requestSplit(region); } else if (shouldCompact) { server.compactSplitThread.requestSystemCompaction( region, Thread.currentThread().getName()); } if (flushResult.isFlushSucceeded()) { long endTime = EnvironmentEdgeManager.currentTime(); server.metricsRegionServer.updateFlushTime(endTime - startTime); } } catch (DroppedSnapshotException ex) { // Cache flush can fail in a few places. If it fails in a critical // section, we get a DroppedSnapshotException and a replay of wal // is required. Currently the only way to do this is a restart of // the server. Abort because hdfs is probably bad (HBASE-644 is a case // where hdfs was bad but passed the hdfs check). server.abort("Replay of WAL required. Forcing server shutdown", ex); return false; } catch (IOException ex) { ex = ex instanceof RemoteException ? ((RemoteException) ex).unwrapRemoteException() : ex; LOG.error( "Cache flush failed" + (region != null ? (" for region " + Bytes.toStringBinary(region.getRegionName())) : ""), ex); if (!server.checkFileSystem()) { return false; } } finally { lock.readLock().unlock(); wakeUpIfBlocking(); } return true; } private void notifyFlushRequest(HRegion region, boolean emergencyFlush) { FlushType type = FlushType.NORMAL; if (emergencyFlush) { type = isAboveHighWaterMark() ? FlushType.ABOVE_HIGHER_MARK : FlushType.ABOVE_LOWER_MARK; } for (FlushRequestListener listener : flushRequestListeners) { listener.flushRequested(type, region); } } private void wakeUpIfBlocking() { synchronized (blockSignal) { blockSignal.notifyAll(); } } private boolean isTooManyStoreFiles(HRegion region) { for (Store store : region.stores.values()) { if (store.hasTooManyStoreFiles()) { return true; } } return false; } /** * Check if the regionserver's memstore memory usage is greater than the * limit. If so, flush regions with the biggest memstores until we're down * to the lower limit. This method blocks callers until we're down to a safe * amount of memstore consumption. */ public void reclaimMemStoreMemory() { TraceScope scope = Trace.startSpan("MemStoreFluser.reclaimMemStoreMemory"); if (isAboveHighWaterMark()) { if (Trace.isTracing()) { scope.getSpan().addTimelineAnnotation("Force Flush. We're above high water mark."); } long start = EnvironmentEdgeManager.currentTime(); synchronized (this.blockSignal) { boolean blocked = false; long startTime = 0; boolean interrupted = false; try { while (isAboveHighWaterMark() && !server.isStopped()) { if (!blocked) { startTime = EnvironmentEdgeManager.currentTime(); LOG.info("Blocking updates on " + server.toString() + ": the global memstore size " + TraditionalBinaryPrefix.long2String(server.getRegionServerAccounting() .getGlobalMemstoreSize(), "", 1) + " is >= than blocking " + TraditionalBinaryPrefix.long2String(globalMemStoreLimit, "", 1) + " size"); } blocked = true; wakeupFlushThread(); try { // we should be able to wait forever, but we've seen a bug where // we miss a notify, so put a 5 second bound on it at least. blockSignal.wait(5 * 1000); } catch (InterruptedException ie) { LOG.warn("Interrupted while waiting"); interrupted = true; } long took = EnvironmentEdgeManager.currentTime() - start; LOG.warn("Memstore is above high water mark and block " + took + "ms"); } } finally { if (interrupted) { Thread.currentThread().interrupt(); } } if(blocked){ final long totalTime = EnvironmentEdgeManager.currentTime() - startTime; if(totalTime > 0){ this.updatesBlockedMsHighWater.add(totalTime); } LOG.info("Unblocking updates for server " + server.toString()); } } } else if (isAboveLowWaterMark()) { wakeupFlushThread(); } scope.close(); } @Override public String toString() { return "flush_queue=" + flushQueue.size(); } public String dumpQueue() { StringBuilder queueList = new StringBuilder(); queueList.append("Flush Queue Queue dump:\n"); queueList.append(" Flush Queue:\n"); java.util.Iterator<FlushQueueEntry> it = flushQueue.iterator(); while(it.hasNext()){ queueList.append(" "+it.next().toString()); queueList.append("\n"); } return queueList.toString(); } /** * Register a MemstoreFlushListener * @param listener */ @Override public void registerFlushRequestListener(final FlushRequestListener listener) { this.flushRequestListeners.add(listener); } /** * Unregister the listener from MemstoreFlushListeners * @param listener * @return true when passed listener is unregistered successfully. */ @Override public boolean unregisterFlushRequestListener(final FlushRequestListener listener) { return this.flushRequestListeners.remove(listener); } /** * Sets the global memstore limit to a new size. * @param globalMemStoreSize */ @Override public void setGlobalMemstoreLimit(long globalMemStoreSize) { this.globalMemStoreLimit = globalMemStoreSize; this.globalMemStoreLimitLowMark = (long) (this.globalMemStoreLimitLowMarkPercent * globalMemStoreSize); reclaimMemStoreMemory(); } public long getMemoryLimit() { return this.globalMemStoreLimit; } interface FlushQueueEntry extends Delayed { } /** * Token to insert into the flush queue that ensures that the flusher does not sleep */ static class WakeupFlushThread implements FlushQueueEntry { @Override public long getDelay(TimeUnit unit) { return 0; } @Override public int compareTo(Delayed o) { return -1; } @Override public boolean equals(Object obj) { return (this == obj); } } /** * Datastructure used in the flush queue. Holds region and retry count. * Keeps tabs on how old this object is. Implements {@link Delayed}. On * construction, the delay is zero. When added to a delay queue, we'll come * out near immediately. Call {@link #requeue(long)} passing delay in * milliseconds before readding to delay queue if you want it to stay there * a while. */ static class FlushRegionEntry implements FlushQueueEntry { private final HRegion region; private final long createTime; private long whenToExpire; private int requeueCount = 0; private boolean forceFlushAllStores; FlushRegionEntry(final HRegion r, boolean forceFlushAllStores) { this.region = r; this.createTime = EnvironmentEdgeManager.currentTime(); this.whenToExpire = this.createTime; this.forceFlushAllStores = forceFlushAllStores; } /** * @param maximumWait * @return True if we have been delayed > <code>maximumWait</code> milliseconds. */ public boolean isMaximumWait(final long maximumWait) { return (EnvironmentEdgeManager.currentTime() - this.createTime) > maximumWait; } /** * @return Count of times {@link #requeue(long)} was called; i.e this is * number of times we've been requeued. */ public int getRequeueCount() { return this.requeueCount; } /** * @return whether we need to flush all stores. */ public boolean isForceFlushAllStores() { return forceFlushAllStores; } /** * @param when When to expire, when to come up out of the queue. * Specify in milliseconds. This method adds EnvironmentEdgeManager.currentTime() * to whatever you pass. * @return This. */ public FlushRegionEntry requeue(final long when) { this.whenToExpire = EnvironmentEdgeManager.currentTime() + when; this.requeueCount++; return this; } @Override public long getDelay(TimeUnit unit) { return unit.convert(this.whenToExpire - EnvironmentEdgeManager.currentTime(), TimeUnit.MILLISECONDS); } @Override public int compareTo(Delayed other) { // Delay is compared first. If there is a tie, compare region's hash code int ret = Long.valueOf(getDelay(TimeUnit.MILLISECONDS) - other.getDelay(TimeUnit.MILLISECONDS)).intValue(); if (ret != 0) { return ret; } FlushQueueEntry otherEntry = (FlushQueueEntry) other; return hashCode() - otherEntry.hashCode(); } @Override public String toString() { return "[flush region " + Bytes.toStringBinary(region.getRegionName()) + "]"; } @Override public int hashCode() { int hash = (int) getDelay(TimeUnit.MILLISECONDS); return hash ^ region.hashCode(); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } Delayed other = (Delayed) obj; return compareTo(other) == 0; } } } enum FlushType { NORMAL, ABOVE_LOWER_MARK, ABOVE_HIGHER_MARK; }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.ec2.model; import java.io.Serializable; /** * <p> * Describes an event in the history of the Spot fleet request. * </p> */ public class HistoryRecord implements Serializable, Cloneable { /** * The date and time of the event, in UTC format (for example, * <i>YYYY</i>-<i>MM</i>-<i>DD</i>T<i>HH</i>:<i>MM</i>:<i>SS</i>Z). */ private java.util.Date timestamp; /** * The event type. <ul> <li> <p><code>error</code> - Indicates an error * with the Spot fleet request. </li> <li> * <p><code>fleetRequestChange</code> - Indicates a change in the status * or configuration of the Spot fleet request. </li> <li> * <p><code>instanceChange</code> - Indicates that an instance was * launched or terminated. </li> </ul> * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>instanceChange, fleetRequestChange, error */ private String eventType; /** * Information about the event. */ private EventInformation eventInformation; /** * The date and time of the event, in UTC format (for example, * <i>YYYY</i>-<i>MM</i>-<i>DD</i>T<i>HH</i>:<i>MM</i>:<i>SS</i>Z). * * @return The date and time of the event, in UTC format (for example, * <i>YYYY</i>-<i>MM</i>-<i>DD</i>T<i>HH</i>:<i>MM</i>:<i>SS</i>Z). */ public java.util.Date getTimestamp() { return timestamp; } /** * The date and time of the event, in UTC format (for example, * <i>YYYY</i>-<i>MM</i>-<i>DD</i>T<i>HH</i>:<i>MM</i>:<i>SS</i>Z). * * @param timestamp The date and time of the event, in UTC format (for example, * <i>YYYY</i>-<i>MM</i>-<i>DD</i>T<i>HH</i>:<i>MM</i>:<i>SS</i>Z). */ public void setTimestamp(java.util.Date timestamp) { this.timestamp = timestamp; } /** * The date and time of the event, in UTC format (for example, * <i>YYYY</i>-<i>MM</i>-<i>DD</i>T<i>HH</i>:<i>MM</i>:<i>SS</i>Z). * <p> * Returns a reference to this object so that method calls can be chained together. * * @param timestamp The date and time of the event, in UTC format (for example, * <i>YYYY</i>-<i>MM</i>-<i>DD</i>T<i>HH</i>:<i>MM</i>:<i>SS</i>Z). * * @return A reference to this updated object so that method calls can be chained * together. */ public HistoryRecord withTimestamp(java.util.Date timestamp) { this.timestamp = timestamp; return this; } /** * The event type. <ul> <li> <p><code>error</code> - Indicates an error * with the Spot fleet request. </li> <li> * <p><code>fleetRequestChange</code> - Indicates a change in the status * or configuration of the Spot fleet request. </li> <li> * <p><code>instanceChange</code> - Indicates that an instance was * launched or terminated. </li> </ul> * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>instanceChange, fleetRequestChange, error * * @return The event type. <ul> <li> <p><code>error</code> - Indicates an error * with the Spot fleet request. </li> <li> * <p><code>fleetRequestChange</code> - Indicates a change in the status * or configuration of the Spot fleet request. </li> <li> * <p><code>instanceChange</code> - Indicates that an instance was * launched or terminated. </li> </ul> * * @see EventType */ public String getEventType() { return eventType; } /** * The event type. <ul> <li> <p><code>error</code> - Indicates an error * with the Spot fleet request. </li> <li> * <p><code>fleetRequestChange</code> - Indicates a change in the status * or configuration of the Spot fleet request. </li> <li> * <p><code>instanceChange</code> - Indicates that an instance was * launched or terminated. </li> </ul> * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>instanceChange, fleetRequestChange, error * * @param eventType The event type. <ul> <li> <p><code>error</code> - Indicates an error * with the Spot fleet request. </li> <li> * <p><code>fleetRequestChange</code> - Indicates a change in the status * or configuration of the Spot fleet request. </li> <li> * <p><code>instanceChange</code> - Indicates that an instance was * launched or terminated. </li> </ul> * * @see EventType */ public void setEventType(String eventType) { this.eventType = eventType; } /** * The event type. <ul> <li> <p><code>error</code> - Indicates an error * with the Spot fleet request. </li> <li> * <p><code>fleetRequestChange</code> - Indicates a change in the status * or configuration of the Spot fleet request. </li> <li> * <p><code>instanceChange</code> - Indicates that an instance was * launched or terminated. </li> </ul> * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>instanceChange, fleetRequestChange, error * * @param eventType The event type. <ul> <li> <p><code>error</code> - Indicates an error * with the Spot fleet request. </li> <li> * <p><code>fleetRequestChange</code> - Indicates a change in the status * or configuration of the Spot fleet request. </li> <li> * <p><code>instanceChange</code> - Indicates that an instance was * launched or terminated. </li> </ul> * * @return A reference to this updated object so that method calls can be chained * together. * * @see EventType */ public HistoryRecord withEventType(String eventType) { this.eventType = eventType; return this; } /** * The event type. <ul> <li> <p><code>error</code> - Indicates an error * with the Spot fleet request. </li> <li> * <p><code>fleetRequestChange</code> - Indicates a change in the status * or configuration of the Spot fleet request. </li> <li> * <p><code>instanceChange</code> - Indicates that an instance was * launched or terminated. </li> </ul> * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>instanceChange, fleetRequestChange, error * * @param eventType The event type. <ul> <li> <p><code>error</code> - Indicates an error * with the Spot fleet request. </li> <li> * <p><code>fleetRequestChange</code> - Indicates a change in the status * or configuration of the Spot fleet request. </li> <li> * <p><code>instanceChange</code> - Indicates that an instance was * launched or terminated. </li> </ul> * * @see EventType */ public void setEventType(EventType eventType) { this.eventType = eventType.toString(); } /** * The event type. <ul> <li> <p><code>error</code> - Indicates an error * with the Spot fleet request. </li> <li> * <p><code>fleetRequestChange</code> - Indicates a change in the status * or configuration of the Spot fleet request. </li> <li> * <p><code>instanceChange</code> - Indicates that an instance was * launched or terminated. </li> </ul> * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>instanceChange, fleetRequestChange, error * * @param eventType The event type. <ul> <li> <p><code>error</code> - Indicates an error * with the Spot fleet request. </li> <li> * <p><code>fleetRequestChange</code> - Indicates a change in the status * or configuration of the Spot fleet request. </li> <li> * <p><code>instanceChange</code> - Indicates that an instance was * launched or terminated. </li> </ul> * * @return A reference to this updated object so that method calls can be chained * together. * * @see EventType */ public HistoryRecord withEventType(EventType eventType) { this.eventType = eventType.toString(); return this; } /** * Information about the event. * * @return Information about the event. */ public EventInformation getEventInformation() { return eventInformation; } /** * Information about the event. * * @param eventInformation Information about the event. */ public void setEventInformation(EventInformation eventInformation) { this.eventInformation = eventInformation; } /** * Information about the event. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param eventInformation Information about the event. * * @return A reference to this updated object so that method calls can be chained * together. */ public HistoryRecord withEventInformation(EventInformation eventInformation) { this.eventInformation = eventInformation; return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getTimestamp() != null) sb.append("Timestamp: " + getTimestamp() + ","); if (getEventType() != null) sb.append("EventType: " + getEventType() + ","); if (getEventInformation() != null) sb.append("EventInformation: " + getEventInformation() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getTimestamp() == null) ? 0 : getTimestamp().hashCode()); hashCode = prime * hashCode + ((getEventType() == null) ? 0 : getEventType().hashCode()); hashCode = prime * hashCode + ((getEventInformation() == null) ? 0 : getEventInformation().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof HistoryRecord == false) return false; HistoryRecord other = (HistoryRecord)obj; if (other.getTimestamp() == null ^ this.getTimestamp() == null) return false; if (other.getTimestamp() != null && other.getTimestamp().equals(this.getTimestamp()) == false) return false; if (other.getEventType() == null ^ this.getEventType() == null) return false; if (other.getEventType() != null && other.getEventType().equals(this.getEventType()) == false) return false; if (other.getEventInformation() == null ^ this.getEventInformation() == null) return false; if (other.getEventInformation() != null && other.getEventInformation().equals(this.getEventInformation()) == false) return false; return true; } @Override public HistoryRecord clone() { try { return (HistoryRecord) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.irc; import java.io.UnsupportedEncodingException; import java.net.URI; import java.net.URISyntaxException; import java.net.URLDecoder; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import org.apache.camel.RuntimeCamelException; import org.apache.camel.spi.Metadata; import org.apache.camel.spi.UriParam; import org.apache.camel.spi.UriParams; import org.apache.camel.spi.UriPath; import org.apache.camel.support.jsse.SSLContextParameters; import org.apache.camel.util.StringHelper; import org.apache.camel.util.URISupport; import org.apache.camel.util.UnsafeUriCharactersEncoder; import org.schwering.irc.lib.ssl.SSLDefaultTrustManager; import org.schwering.irc.lib.ssl.SSLTrustManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @UriParams public class IrcConfiguration implements Cloneable { private static final Logger LOG = LoggerFactory.getLogger(IrcConfiguration.class); private boolean usingSSL; private List<IrcChannel> channelList = new ArrayList<>(); @UriPath @Metadata(required = true) private String hostname; @UriPath private int port; private int[] ports = {6667, 6668, 6669}; @UriParam(label = "security", secret = true) private String password; @UriParam(label = "common") private String nickname; @UriParam(label = "common") private String channels; @UriParam(label = "common") private String keys; @UriParam(label = "common") private String realname; @UriParam(label = "security", secret = true) private String username; @UriParam(label = "security") private SSLTrustManager trustManager = new SSLDefaultTrustManager(); @UriParam(defaultValue = "true") @Deprecated private boolean persistent = true; @UriParam(defaultValue = "true", label = "advanced") private boolean colors = true; @UriParam(defaultValue = "true", label = "filter") private boolean onNick = true; @UriParam(defaultValue = "true", label = "filter") private boolean onQuit = true; @UriParam(defaultValue = "true", label = "filter") private boolean onJoin = true; @UriParam(defaultValue = "true", label = "filter") private boolean onKick = true; @UriParam(defaultValue = "true", label = "filter") private boolean onMode = true; @UriParam(defaultValue = "true", label = "filter") private boolean onPart = true; @UriParam(label = "filter") private boolean onReply; @UriParam(defaultValue = "true", label = "filter") private boolean onTopic = true; @UriParam(defaultValue = "true", label = "filter") private boolean onPrivmsg = true; @UriParam(defaultValue = "true") private boolean autoRejoin = true; @UriParam(label = "common") private boolean namesOnJoin; @UriParam(label = "security") private SSLContextParameters sslContextParameters; @UriParam(label = "security", secret = true) private String nickPassword; @UriParam(defaultValue = "5000") private long commandTimeout = 5000L; public IrcConfiguration() { } public IrcConfiguration(String hostname, String nickname, String displayname, String channels) { this(hostname, null, null, nickname, displayname, channels); } public IrcConfiguration(String hostname, String username, String password, String nickname, String displayname, String channels) { this.channels = channels; this.hostname = hostname; this.username = username; this.password = password; this.nickname = nickname; this.realname = displayname; } public IrcConfiguration copy() { try { return (IrcConfiguration) clone(); } catch (CloneNotSupportedException e) { throw new RuntimeCamelException(e); } } public String getCacheKey() { return hostname + ":" + nickname; } /* * Return space separated list of channel names without pwd */ public String getSpaceSeparatedChannelNames() { StringBuilder retval = new StringBuilder(); for (IrcChannel channel : channelList) { retval.append(retval.length() == 0 ? "" : " ").append(channel.getName()); } return retval.toString(); } public void configure(String uriStr) throws URISyntaxException, UnsupportedEncodingException { // fix provided URI and handle that we can use # to indicate the IRC room if (uriStr.startsWith("ircs")) { setUsingSSL(true); if (!uriStr.startsWith("ircs://")) { uriStr = uriStr.replace("ircs:", "ircs://"); } } else if (!uriStr.startsWith("irc://")) { uriStr = uriStr.replace("irc:", "irc://"); } if (uriStr.contains("?")) { uriStr = StringHelper.before(uriStr, "?"); } URI uri = new URI(uriStr); // Because we can get a "sanitized" URI, we need to deal with the situation where the // user info includes the username and password together or else we get a mangled username // that includes the user's secret being sent to the server. String userInfo = uri.getUserInfo(); String username = null; String password = null; if (userInfo != null) { int colonIndex = userInfo.indexOf(":"); if (colonIndex != -1) { username = userInfo.substring(0, colonIndex); password = userInfo.substring(colonIndex + 1); } else { username = userInfo; } } if (uri.getPort() != -1) { setPorts(new int[] {uri.getPort()}); setPort(uri.getPort()); } setNickname(username); setUsername(username); setRealname(username); setPassword(password); setHostname(uri.getHost()); String path = uri.getPath(); if (path != null && !path.isEmpty()) { LOG.warn("Channel {} should not be specified in the URI path. Use an @channel query parameter instead.", path); } } public List<IrcChannel> getChannelList() { return channelList; } public IrcChannel findChannel(String name) { for (IrcChannel channel : channelList) { if (channel.getName().equals(name)) { return channel; } } return null; } /** * The trust manager used to verify the SSL server's certificate. */ public void setTrustManager(SSLTrustManager trustManager) { this.trustManager = trustManager; } public SSLTrustManager getTrustManager() { return trustManager; } public boolean getUsingSSL() { return usingSSL; } private void setUsingSSL(boolean usingSSL) { this.usingSSL = usingSSL; } public String getHostname() { return hostname; } /** * Hostname for the IRC chat server */ public void setHostname(String hostname) { this.hostname = hostname; } public String getPassword() { return password; } /** * The IRC server password. */ public void setPassword(String password) { this.password = password; } public String getNickname() { return nickname; } /** * The nickname used in chat. */ public void setNickname(String nickname) { this.nickname = nickname; } public String getRealname() { return realname; } /** * The IRC user's actual name. */ public void setRealname(String realname) { this.realname = realname; } public String getUsername() { return username; } /** * Comma separated list of IRC channels. */ public String getChannels() { return channels; } public void setChannels(String channels) { this.channels = channels; createChannels(); } /** * Comma separated list of keys for channels. */ public String getKeys() { return keys; } public void setKeys(String keys) { this.keys = keys; createChannels(); } private void createChannels() { channelList.clear(); if (channels == null) { return; } String[] chs = channels.split(","); String[] ks = keys != null ? keys.split(",") : null; int count = chs.length; for (int i = 0; i < count; i++) { String channel = chs[i].trim(); String key = ks != null && ks.length > i ? ks[i].trim() : null; if (channel.startsWith("#") && !channel.startsWith("##")) { channel = channel.substring(1); } if (key != null && !key.isEmpty()) { channel += "!" + key; } channelList.add(createChannel(channel)); } } /** * The IRC server user name. */ public void setUsername(String username) { this.username = username; } public int[] getPorts() { return ports; } /** * Port numbers for the IRC chat server */ public void setPorts(int[] ports) { this.ports = ports; } public int getPort() { return port; } /** * Port number for the IRC chat server. If no port is configured then a default port of either 6667, 6668 or 6669 is used. */ public void setPort(int port) { this.port = port; } public boolean isPersistent() { return persistent; } /** * Use persistent messages. * @deprecated not in use */ @Deprecated public void setPersistent(boolean persistent) { this.persistent = persistent; } public boolean isColors() { return colors; } /** * Whether or not the server supports color codes. */ public void setColors(boolean colors) { this.colors = colors; } public boolean isOnNick() { return onNick; } /** * Handle nickname change events. */ public void setOnNick(boolean onNick) { this.onNick = onNick; } public boolean isOnQuit() { return onQuit; } /** * Handle user quit events. */ public void setOnQuit(boolean onQuit) { this.onQuit = onQuit; } public boolean isOnJoin() { return onJoin; } /** * Handle user join events. */ public void setOnJoin(boolean onJoin) { this.onJoin = onJoin; } public boolean isOnKick() { return onKick; } /** * Handle kick events. */ public void setOnKick(boolean onKick) { this.onKick = onKick; } public boolean isOnMode() { return onMode; } /** * Handle mode change events. */ public void setOnMode(boolean onMode) { this.onMode = onMode; } public boolean isOnPart() { return onPart; } /** * Handle user part events. */ public void setOnPart(boolean onPart) { this.onPart = onPart; } public boolean isOnReply() { return onReply; } /** * Whether or not to handle general responses to commands or informational messages. */ public void setOnReply(boolean onReply) { this.onReply = onReply; } public boolean isOnTopic() { return onTopic; } /** * Handle topic change events. */ public void setOnTopic(boolean onTopic) { this.onTopic = onTopic; } public boolean isOnPrivmsg() { return onPrivmsg; } /** * Handle private message events. */ public void setOnPrivmsg(boolean onPrivmsg) { this.onPrivmsg = onPrivmsg; } public boolean isAutoRejoin() { return autoRejoin; } /** * Whether to auto re-join when being kicked */ public void setAutoRejoin(boolean autoRejoin) { this.autoRejoin = autoRejoin; } public SSLContextParameters getSslContextParameters() { return sslContextParameters; } /** * Used for configuring security using SSL. * Reference to a org.apache.camel.support.jsse.SSLContextParameters in the Registry. * This reference overrides any configured SSLContextParameters at the component level. * Note that this setting overrides the trustManager option. */ public void setSslContextParameters(SSLContextParameters sslContextParameters) { this.sslContextParameters = sslContextParameters; } /** * Your IRC server nickname password. */ public String getNickPassword() { return nickPassword; } public void setNickPassword(String nickPassword) { this.nickPassword = nickPassword; } /** * Delay in milliseconds before sending commands after the connection is established. * @param timeout timeout value in milliseconds */ public void setCommandTimeout(long timeout) { this.commandTimeout = timeout; } public long getCommandTimeout() { return commandTimeout; } public boolean isNamesOnJoin() { return namesOnJoin; } /** * Sends <code>NAMES</code> command to channel after joining it.<br> * {@link #onReply} has to be <code>true</code> in order to process the * result which will have the header value <code>irc.num = '353'</code>. */ public void setNamesOnJoin(boolean namesOnJoin) { this.namesOnJoin = namesOnJoin; } @Override public String toString() { return "IrcConfiguration[hostname: " + hostname + ", ports=" + Arrays.toString(ports) + ", username=" + username + "]"; } private static IrcChannel createChannel(String channelInfo) { String[] pair = channelInfo.split("!"); return new IrcChannel(pair[0], pair.length > 1 ? pair[1] : null); } public static String sanitize(String uri) { //symbol # has to be encoded. otherwise value after '#' won't be propagated into parameters return uri.replace("#", "%23"); } private static String formatQuery(Map<String, Object> params) { if (params == null || params.size() == 0) { return ""; } StringBuilder result = new StringBuilder(); for (Map.Entry<String, Object> pair : params.entrySet()) { Object value = pair.getValue(); // the value may be a list since the same key has multiple values if (value instanceof List) { List<?> list = (List<?>)value; for (Object s : list) { addQueryParameter(result, pair.getKey(), s); } } else { addQueryParameter(result, pair.getKey(), value); } } return result.toString(); } private static void addQueryParameter(StringBuilder sb, String key, Object value) { sb.append(sb.length() == 0 ? "" : "&"); sb.append(key); if (value != null) { String s = value.toString(); sb.append(s.isEmpty() ? "" : "=" + UnsafeUriCharactersEncoder.encode(s)); } } }
package com.google.android.exoplayer2.upstream.cache; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import android.test.InstrumentationTestCase; import android.util.SparseArray; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.util.Util; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.util.Collection; import java.util.Set; /** * Tests {@link CachedContentIndex}. */ public class CachedContentIndexTest extends InstrumentationTestCase { private final byte[] testIndexV1File = { 0, 0, 0, 1, // version 0, 0, 0, 0, // flags 0, 0, 0, 2, // number_of_CachedContent 0, 0, 0, 5, // cache_id 0, 5, 65, 66, 67, 68, 69, // cache_key 0, 0, 0, 0, 0, 0, 0, 10, // original_content_length 0, 0, 0, 2, // cache_id 0, 5, 75, 76, 77, 78, 79, // cache_key 0, 0, 0, 0, 0, 0, 10, 0, // original_content_length (byte) 0xF6, (byte) 0xFB, 0x50, 0x41 // hashcode_of_CachedContent_array }; private CachedContentIndex index; private File cacheDir; @Override public void setUp() throws Exception { super.setUp(); cacheDir = Util.createTempDirectory(getInstrumentation().getContext(), "ExoPlayerTest"); index = new CachedContentIndex(cacheDir); } @Override protected void tearDown() throws Exception { Util.recursiveDelete(cacheDir); super.tearDown(); } public void testAddGetRemove() throws Exception { final String key1 = "key1"; final String key2 = "key2"; final String key3 = "key3"; // Add two CachedContents with add methods CachedContent cachedContent1 = new CachedContent(5, key1, 10); index.addNew(cachedContent1); CachedContent cachedContent2 = index.getOrAdd(key2); assertThat(cachedContent1.id != cachedContent2.id).isTrue(); // add a span File cacheSpanFile = SimpleCacheSpanTest .createCacheSpanFile(cacheDir, cachedContent1.id, 10, 20, 30); SimpleCacheSpan span = SimpleCacheSpan.createCacheEntry(cacheSpanFile, index); assertThat(span).isNotNull(); cachedContent1.addSpan(span); // Check if they are added and get method returns null if the key isn't found assertThat(index.get(key1)).isEqualTo(cachedContent1); assertThat(index.get(key2)).isEqualTo(cachedContent2); assertThat(index.get(key3)).isNull(); // test getAll() Collection<CachedContent> cachedContents = index.getAll(); assertThat(cachedContents).containsExactly(cachedContent1, cachedContent2); // test getKeys() Set<String> keys = index.getKeys(); assertThat(keys).containsExactly(key1, key2); // test getKeyForId() assertThat(index.getKeyForId(cachedContent1.id)).isEqualTo(key1); assertThat(index.getKeyForId(cachedContent2.id)).isEqualTo(key2); // test remove() index.maybeRemove(key2); index.maybeRemove(key3); assertThat(index.get(key1)).isEqualTo(cachedContent1); assertThat(index.get(key2)).isNull(); assertThat(cacheSpanFile.exists()).isTrue(); // test removeEmpty() index.addNew(cachedContent2); index.removeEmpty(); assertThat(index.get(key1)).isEqualTo(cachedContent1); assertThat(index.get(key2)).isNull(); assertThat(cacheSpanFile.exists()).isTrue(); } public void testStoreAndLoad() throws Exception { assertStoredAndLoadedEqual(index, new CachedContentIndex(cacheDir)); } public void testLoadV1() throws Exception { FileOutputStream fos = new FileOutputStream(new File(cacheDir, CachedContentIndex.FILE_NAME)); fos.write(testIndexV1File); fos.close(); index.load(); assertThat(index.getAll()).hasSize(2); assertThat(index.assignIdForKey("ABCDE")).isEqualTo(5); assertThat(index.getContentLength("ABCDE")).isEqualTo(10); assertThat(index.assignIdForKey("KLMNO")).isEqualTo(2); assertThat(index.getContentLength("KLMNO")).isEqualTo(2560); } public void testStoreV1() throws Exception { index.addNew(new CachedContent(2, "KLMNO", 2560)); index.addNew(new CachedContent(5, "ABCDE", 10)); index.store(); byte[] buffer = new byte[testIndexV1File.length]; FileInputStream fos = new FileInputStream(new File(cacheDir, CachedContentIndex.FILE_NAME)); assertThat(fos.read(buffer)).isEqualTo(testIndexV1File.length); assertThat(fos.read()).isEqualTo(-1); fos.close(); // TODO: The order of the CachedContent stored in index file isn't defined so this test may fail // on a different implementation of the underlying set assertThat(buffer).isEqualTo(testIndexV1File); } public void testAssignIdForKeyAndGetKeyForId() throws Exception { final String key1 = "key1"; final String key2 = "key2"; int id1 = index.assignIdForKey(key1); int id2 = index.assignIdForKey(key2); assertThat(index.getKeyForId(id1)).isEqualTo(key1); assertThat(index.getKeyForId(id2)).isEqualTo(key2); assertThat(id1 != id2).isTrue(); assertThat(index.assignIdForKey(key1)).isEqualTo(id1); assertThat(index.assignIdForKey(key2)).isEqualTo(id2); } public void testSetGetContentLength() throws Exception { final String key1 = "key1"; assertThat(index.getContentLength(key1)).isEqualTo(C.LENGTH_UNSET); index.setContentLength(key1, 10); assertThat(index.getContentLength(key1)).isEqualTo(10); } public void testGetNewId() throws Exception { SparseArray<String> idToKey = new SparseArray<>(); assertThat(CachedContentIndex.getNewId(idToKey)).isEqualTo(0); idToKey.put(10, ""); assertThat(CachedContentIndex.getNewId(idToKey)).isEqualTo(11); idToKey.put(Integer.MAX_VALUE, ""); assertThat(CachedContentIndex.getNewId(idToKey)).isEqualTo(0); idToKey.put(0, ""); assertThat(CachedContentIndex.getNewId(idToKey)).isEqualTo(1); } public void testEncryption() throws Exception { byte[] key = "Bar12345Bar12345".getBytes(C.UTF8_NAME); // 128 bit key byte[] key2 = "Foo12345Foo12345".getBytes(C.UTF8_NAME); // 128 bit key assertStoredAndLoadedEqual(new CachedContentIndex(cacheDir, key), new CachedContentIndex(cacheDir, key)); // Rename the index file from the test above File file1 = new File(cacheDir, CachedContentIndex.FILE_NAME); File file2 = new File(cacheDir, "file2compare"); assertThat(file1.renameTo(file2)).isTrue(); // Write a new index file assertStoredAndLoadedEqual(new CachedContentIndex(cacheDir, key), new CachedContentIndex(cacheDir, key)); assertThat(file1.length()).isEqualTo(file2.length()); // Assert file content is different FileInputStream fis1 = new FileInputStream(file1); FileInputStream fis2 = new FileInputStream(file2); for (int b; (b = fis1.read()) == fis2.read(); ) { assertThat(b != -1).isTrue(); } boolean threw = false; try { assertStoredAndLoadedEqual(new CachedContentIndex(cacheDir, key), new CachedContentIndex(cacheDir, key2)); } catch (AssertionError e) { threw = true; } assertWithMessage("Encrypted index file can not be read with different encryption key") .that(threw) .isTrue(); try { assertStoredAndLoadedEqual(new CachedContentIndex(cacheDir, key), new CachedContentIndex(cacheDir)); } catch (AssertionError e) { threw = true; } assertWithMessage("Encrypted index file can not be read without encryption key") .that(threw) .isTrue(); // Non encrypted index file can be read even when encryption key provided. assertStoredAndLoadedEqual(new CachedContentIndex(cacheDir), new CachedContentIndex(cacheDir, key)); // Test multiple store() calls CachedContentIndex index = new CachedContentIndex(cacheDir, key); index.addNew(new CachedContent(15, "key3", 110)); index.store(); assertStoredAndLoadedEqual(index, new CachedContentIndex(cacheDir, key)); } public void testRemoveEmptyNotLockedCachedContent() throws Exception { CachedContent cachedContent = new CachedContent(5, "key1", 10); index.addNew(cachedContent); index.maybeRemove(cachedContent.key); assertThat(index.get(cachedContent.key)).isNull(); } public void testCantRemoveNotEmptyCachedContent() throws Exception { CachedContent cachedContent = new CachedContent(5, "key1", 10); index.addNew(cachedContent); File cacheSpanFile = SimpleCacheSpanTest.createCacheSpanFile(cacheDir, cachedContent.id, 10, 20, 30); SimpleCacheSpan span = SimpleCacheSpan.createCacheEntry(cacheSpanFile, index); cachedContent.addSpan(span); index.maybeRemove(cachedContent.key); assertThat(index.get(cachedContent.key)).isNotNull(); } public void testCantRemoveLockedCachedContent() throws Exception { CachedContent cachedContent = new CachedContent(5, "key1", 10); cachedContent.setLocked(true); index.addNew(cachedContent); index.maybeRemove(cachedContent.key); assertThat(index.get(cachedContent.key)).isNotNull(); } private void assertStoredAndLoadedEqual(CachedContentIndex index, CachedContentIndex index2) throws IOException { index.addNew(new CachedContent(5, "key1", 10)); index.getOrAdd("key2"); index.store(); index2.load(); Set<String> keys = index.getKeys(); Set<String> keys2 = index2.getKeys(); assertThat(keys2).isEqualTo(keys); for (String key : keys) { assertThat(index2.getContentLength(key)).isEqualTo(index.getContentLength(key)); assertThat(index2.get(key).getSpans()).isEqualTo(index.get(key).getSpans()); } } }
/* * This file is part of Unusuals, licensed under the MIT license (MIT). * * Copyright (c) 2014-2018 Max Roncace <me@caseif.net> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package net.caseif.unusuals; import com.google.common.collect.ImmutableSet; import net.caseif.unusuals.handlers.BukkitParticleHandler; import net.caseif.unusuals.handlers.IParticleHandler; import net.caseif.unusuals.handlers.NmsParticleHandler; import net.caseif.unusuals.nms.CraftBukkitHook; import net.caseif.unusuals.typeprovider.BukkitParticleTypeProvider; import net.caseif.unusuals.typeprovider.IParticleTypeProvider; import net.caseif.unusuals.typeprovider.NmsParticleTypeProvider; import org.apache.commons.lang.WordUtils; import org.bukkit.Bukkit; import org.bukkit.ChatColor; import org.bukkit.Material; import org.bukkit.command.Command; import org.bukkit.command.CommandSender; import org.bukkit.configuration.ConfigurationSection; import org.bukkit.entity.Player; import org.bukkit.event.EventHandler; import org.bukkit.event.EventPriority; import org.bukkit.event.Listener; import org.bukkit.event.entity.PlayerDeathEvent; import org.bukkit.event.inventory.InventoryClickEvent; import org.bukkit.event.inventory.InventoryType; import org.bukkit.event.inventory.InventoryType.SlotType; import org.bukkit.event.player.PlayerJoinEvent; import org.bukkit.event.player.PlayerRespawnEvent; import org.bukkit.inventory.ItemStack; import org.bukkit.inventory.meta.ItemMeta; import org.bukkit.plugin.java.JavaPlugin; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.logging.Logger; public class Main extends JavaPlugin implements Listener { private static final ImmutableSet<Material> HEADWEAR = ImmutableSet.of( Material.LEATHER_HELMET, Material.CHAINMAIL_HELMET, Material.IRON_HELMET, Material.GOLD_HELMET, Material.DIAMOND_HELMET, Material.PUMPKIN, Material.SKULL_ITEM ); public static JavaPlugin plugin; public static Logger log; private static HashMap<UUID, UnusualEffect> players = new HashMap<UUID, UnusualEffect>(); private static final ChatColor UNUSUAL_COLOR = ChatColor.DARK_PURPLE; private static final ChatColor EFFECT_COLOR = ChatColor.GRAY; static IParticleHandler handler; private static IParticleTypeProvider typeProvider; private static HashMap<String, UnusualEffect> effects = new HashMap<String, UnusualEffect>(); public void onEnable() { plugin = this; log = getLogger(); try { Class.forName("org.bukkit.Particle"); handler = new BukkitParticleHandler(); typeProvider = new BukkitParticleTypeProvider(); } catch (ClassNotFoundException ex) { try { Class.forName("org.bukkit.craftbukkit.Main"); handler = new NmsParticleHandler(new CraftBukkitHook()); typeProvider = new NmsParticleTypeProvider(); } catch (ClassNotFoundException ex2) { log.severe("Incompatible server software! Cannot continue, disabling..."); Bukkit.getPluginManager().disablePlugin(this); return; } } if (plugin == null || !plugin.isEnabled()) { return; } Bukkit.getPluginManager().registerEvents(this, this); saveDefaultConfig(); // updater if (getConfig().getBoolean("enable-updater")) { new Updater(this, 80091, this.getFile(), Updater.UpdateType.DEFAULT, true); } // submit metrics if (getConfig().getBoolean("enable-metrics")) { try { Metrics metrics = new Metrics(this); metrics.start(); } catch (IOException ex) { log.warning("Failed to enable plugin metrics!"); } } ConfigurationSection cs = getConfig().getConfigurationSection("effects"); if (cs != null) { String[] nonSections = new String[]{"particles", "speed", "count", "radius"}; for (String k : cs.getKeys(false)) { // root effects ConfigurationSection effectCs = cs.getConfigurationSection(k); if (effectCs != null) { String effectName = effectCs.getName(); List<ParticleEffect> pEffects = new ArrayList<ParticleEffect>(); ParticleEffect eff = parseEffect(effectCs); if (eff != null) { pEffects.add(eff); } keyLoop: for (String subKey : effectCs.getKeys(true)) { // subkeys of effects for (String ns : nonSections) { if (subKey.endsWith(ns)) { continue keyLoop; } } ConfigurationSection subCs = effectCs.getConfigurationSection(subKey); if (subCs != null) { ParticleEffect subEff = parseEffect(subCs); if (subEff != null) { pEffects.add(subEff); } } } if (pEffects.isEmpty()) { getLogger().warning("Incomplete unusual effect definition for " + effectName + ", ignoring."); continue; } effects.put(effectName, new UnusualEffect(effectName, pEffects)); } } } log.info("Loaded " + effects.size() + " effects"); for (Player p : Bukkit.getOnlinePlayers()) { checkForUnusual(p, p.getInventory().getHelmet()); } Bukkit.getScheduler().runTaskTimer(this, new Runnable() { public void run() { Iterator<Map.Entry<UUID, UnusualEffect>> it = players.entrySet().iterator(); while (it.hasNext()) { Map.Entry<UUID, UnusualEffect> e = it.next(); Player pl = Bukkit.getPlayer(e.getKey()); if (pl != null) { e.getValue().display(pl); } else { it.remove(); } } } }, 0L, getConfig().getLong("effect-interval")); log.info(this + " has been enabled!"); } private ParticleEffect parseEffect(ConfigurationSection subCs) { if (subCs.contains("particles") && subCs.contains("speed") && subCs.contains("count") && subCs.contains("radius")) { Object type = typeProvider.getTypeFromId(subCs.getString("particles")); if (type != null) { return new ParticleEffect(type, (float) subCs.getDouble("speed"), subCs.getInt("count"), (float) subCs.getDouble("radius")); } } return null; } public void onDisable() { log.info(this + " has been disabled!"); log = null; plugin = null; } private ItemStack createUnusual(Material type, String effect) { UnusualEffect uEffect = effects.get(effect); if (uEffect == null) { throw new IllegalArgumentException("Effect \"" + effect + "\" does not exist!"); } ItemStack is = new ItemStack(type, 1); ItemMeta meta = is.getItemMeta(); meta.setDisplayName(UNUSUAL_COLOR + "Unusual " + WordUtils.capitalize(type.toString().toLowerCase().replace("_", " "))); List<String> lore = new ArrayList<String>(); lore.add(EFFECT_COLOR + "Effect: " + effect); meta.setLore(lore); is.setItemMeta(meta); return is; } @Override public boolean onCommand(CommandSender sender, Command cmd, String label, String[] args) { if (label.equalsIgnoreCase("unusual") || label.equalsIgnoreCase("unusuals")) { if (args.length > 0) { if (args[0].equalsIgnoreCase("spawn")) { if (sender instanceof Player) { if (sender.hasPermission("unusual.spawn")) { if (args.length > 1) { Material mat = Material.matchMaterial(args[1]); if (mat != null) { if (!HEADWEAR.contains(mat)) { sender.sendMessage(ChatColor.RED + "Item must be headwear!"); return true; } if (args.length > 2) { StringBuilder effectName = new StringBuilder(); for (int i = 2; i < args.length; i++) { effectName.append(args[i]).append(i < args.length - 1 ? " " : ""); } try { ((Player) sender).getInventory().addItem(createUnusual(mat, effectName.toString())); sender.sendMessage(ChatColor.DARK_PURPLE + "Enjoy your Unusual!"); } catch (IllegalArgumentException ex) { if (ex.getMessage().contains("particle")) { sender.sendMessage(ChatColor.RED + "The specified effect has an invalid particle type. " + "Please report this to an administrator."); } else { sender.sendMessage(ChatColor.RED + "Invalid effect! Usage: /unusual spawn " + mat.toString() + " [effect name]"); } } } else { sender.sendMessage(ChatColor.RED + "Too few arguments! Usage: /unusual spawn " + mat.toString() + " [effect name]"); } } else { sender.sendMessage(ChatColor.RED + "Invalid material! Usage: /unusual spawn [materal] [effect name]"); } } else { sender.sendMessage(ChatColor.RED + "Too few arguments! Usage: /unusual spawn [material] [effect name]"); } } else { sender.sendMessage(ChatColor.RED + "You do not have permission to use this command!"); } } else { sender.sendMessage(ChatColor.RED + "You must be a player to use this command!"); } } else if (args[0].equalsIgnoreCase("reload")) { if (sender.hasPermission("unsuual.reload")) { Bukkit.getPluginManager().disablePlugin(plugin); reloadConfig(); Bukkit.getPluginManager().enablePlugin(Bukkit.getPluginManager().getPlugin("Unusuals")); sender.sendMessage(ChatColor.GREEN + "[Unusuals] Successfully reloaded!"); } else { sender.sendMessage(ChatColor.RED + "You do not have permission to use this command!"); } } else { sender.sendMessage(ChatColor.RED + "Invalid arguments! Usage: /unusual [args]"); } } else { sender.sendMessage(ChatColor.LIGHT_PURPLE + "This server is running Unusuals v" + plugin.getDescription().getVersion() + " by Maxim Roncace"); } return true; } return false; } @EventHandler(priority = EventPriority.MONITOR, ignoreCancelled = true) public void onInventoryClick(InventoryClickEvent event) { if (event.getInventory().getHolder() instanceof Player) { if (event.getSlotType() == SlotType.ARMOR && ((event.getInventory().getType() == InventoryType.PLAYER && event.getSlot() == 5) || // wtf minecraft (event.getInventory().getType() == InventoryType.CRAFTING && event.getSlot() == 39))) { if (!Main.checkForUnusual((Player) event.getWhoClicked(), event.getCursor())) { players.remove(event.getWhoClicked().getUniqueId()); // remove the unusual effect } } else { if (event.getClick().isShiftClick() && HEADWEAR.contains(event.getCurrentItem().getType())) { if (event.getSlotType() == SlotType.ARMOR) { players.remove(event.getWhoClicked().getUniqueId()); } else { Main.checkForUnusual((Player) event.getWhoClicked(), event.getCurrentItem()); } } } } } @EventHandler(priority = EventPriority.MONITOR, ignoreCancelled = true) public void onPlayerJoin(PlayerJoinEvent event) { checkForUnusual(event.getPlayer(), event.getPlayer().getInventory().getHelmet()); } @EventHandler(priority = EventPriority.MONITOR) public void onPlayerDeath(PlayerDeathEvent event) { players.remove(event.getEntity().getUniqueId()); } @EventHandler(priority = EventPriority.MONITOR) public void onPlayerRespawn(PlayerRespawnEvent event) { checkForUnusual(event.getPlayer(), event.getPlayer().getInventory().getHelmet()); } private static boolean isUnusual(ItemStack itemstack) { return itemstack != null && itemstack.getItemMeta() != null && itemstack.getItemMeta().getLore() != null && !itemstack.getItemMeta().getLore().isEmpty() && itemstack.getItemMeta().getDisplayName().startsWith(UNUSUAL_COLOR + "Unusual ") && itemstack.getItemMeta().getLore().get(0).startsWith(EFFECT_COLOR + "Effect: "); } private static boolean checkForUnusual(Player player, ItemStack itemstack) { if (isUnusual(itemstack)) { String effectName = itemstack.getItemMeta().getLore().get(0).replace(EFFECT_COLOR + "Effect: ", "");// extract the effect name UnusualEffect uEffect = effects.get(effectName); if (uEffect != null) { // make sure the effect is loaded players.put(player.getUniqueId(), uEffect); return true; } } return false; } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: load_balancer.proto package io.grpc.grpclb; /** * <pre> * Contains server information. When the drop field is not true, use the other * fields. * </pre> * * Protobuf type {@code grpc.lb.v1.Server} */ public final class Server extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:grpc.lb.v1.Server) ServerOrBuilder { private static final long serialVersionUID = 0L; // Use Server.newBuilder() to construct. private Server(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Server() { ipAddress_ = com.google.protobuf.ByteString.EMPTY; port_ = 0; loadBalanceToken_ = ""; drop_ = false; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private Server( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownFieldProto3( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { ipAddress_ = input.readBytes(); break; } case 16: { port_ = input.readInt32(); break; } case 26: { java.lang.String s = input.readStringRequireUtf8(); loadBalanceToken_ = s; break; } case 32: { drop_ = input.readBool(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return io.grpc.grpclb.LoadBalancerProto.internal_static_grpc_lb_v1_Server_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return io.grpc.grpclb.LoadBalancerProto.internal_static_grpc_lb_v1_Server_fieldAccessorTable .ensureFieldAccessorsInitialized( io.grpc.grpclb.Server.class, io.grpc.grpclb.Server.Builder.class); } public static final int IP_ADDRESS_FIELD_NUMBER = 1; private com.google.protobuf.ByteString ipAddress_; /** * <pre> * A resolved address for the server, serialized in network-byte-order. It may * either be an IPv4 or IPv6 address. * </pre> * * <code>bytes ip_address = 1;</code> */ public com.google.protobuf.ByteString getIpAddress() { return ipAddress_; } public static final int PORT_FIELD_NUMBER = 2; private int port_; /** * <pre> * A resolved port number for the server. * </pre> * * <code>int32 port = 2;</code> */ public int getPort() { return port_; } public static final int LOAD_BALANCE_TOKEN_FIELD_NUMBER = 3; private volatile java.lang.Object loadBalanceToken_; /** * <pre> * An opaque but printable token given to the frontend for each pick. All * frontend requests for that pick must include the token in its initial * metadata. The token is used by the backend to verify the request and to * allow the backend to report load to the gRPC LB system. The token is also * used in client stats for reporting dropped calls. * </pre> * * <code>string load_balance_token = 3;</code> */ public java.lang.String getLoadBalanceToken() { java.lang.Object ref = loadBalanceToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); loadBalanceToken_ = s; return s; } } /** * <pre> * An opaque but printable token given to the frontend for each pick. All * frontend requests for that pick must include the token in its initial * metadata. The token is used by the backend to verify the request and to * allow the backend to report load to the gRPC LB system. The token is also * used in client stats for reporting dropped calls. * </pre> * * <code>string load_balance_token = 3;</code> */ public com.google.protobuf.ByteString getLoadBalanceTokenBytes() { java.lang.Object ref = loadBalanceToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); loadBalanceToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int DROP_FIELD_NUMBER = 4; private boolean drop_; /** * <pre> * Indicates whether this particular request should be dropped by the client. * If the request is dropped, there will be a corresponding entry in * ClientStats.calls_finished_with_drop. * </pre> * * <code>bool drop = 4;</code> */ public boolean getDrop() { return drop_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!ipAddress_.isEmpty()) { output.writeBytes(1, ipAddress_); } if (port_ != 0) { output.writeInt32(2, port_); } if (!getLoadBalanceTokenBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, loadBalanceToken_); } if (drop_ != false) { output.writeBool(4, drop_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!ipAddress_.isEmpty()) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, ipAddress_); } if (port_ != 0) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(2, port_); } if (!getLoadBalanceTokenBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, loadBalanceToken_); } if (drop_ != false) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(4, drop_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof io.grpc.grpclb.Server)) { return super.equals(obj); } io.grpc.grpclb.Server other = (io.grpc.grpclb.Server) obj; boolean result = true; result = result && getIpAddress() .equals(other.getIpAddress()); result = result && (getPort() == other.getPort()); result = result && getLoadBalanceToken() .equals(other.getLoadBalanceToken()); result = result && (getDrop() == other.getDrop()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + IP_ADDRESS_FIELD_NUMBER; hash = (53 * hash) + getIpAddress().hashCode(); hash = (37 * hash) + PORT_FIELD_NUMBER; hash = (53 * hash) + getPort(); hash = (37 * hash) + LOAD_BALANCE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getLoadBalanceToken().hashCode(); hash = (37 * hash) + DROP_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( getDrop()); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static io.grpc.grpclb.Server parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grpc.grpclb.Server parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grpc.grpclb.Server parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grpc.grpclb.Server parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grpc.grpclb.Server parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grpc.grpclb.Server parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grpc.grpclb.Server parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static io.grpc.grpclb.Server parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static io.grpc.grpclb.Server parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static io.grpc.grpclb.Server parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static io.grpc.grpclb.Server parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static io.grpc.grpclb.Server parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(io.grpc.grpclb.Server prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Contains server information. When the drop field is not true, use the other * fields. * </pre> * * Protobuf type {@code grpc.lb.v1.Server} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:grpc.lb.v1.Server) io.grpc.grpclb.ServerOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return io.grpc.grpclb.LoadBalancerProto.internal_static_grpc_lb_v1_Server_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return io.grpc.grpclb.LoadBalancerProto.internal_static_grpc_lb_v1_Server_fieldAccessorTable .ensureFieldAccessorsInitialized( io.grpc.grpclb.Server.class, io.grpc.grpclb.Server.Builder.class); } // Construct using io.grpc.grpclb.Server.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); ipAddress_ = com.google.protobuf.ByteString.EMPTY; port_ = 0; loadBalanceToken_ = ""; drop_ = false; return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return io.grpc.grpclb.LoadBalancerProto.internal_static_grpc_lb_v1_Server_descriptor; } public io.grpc.grpclb.Server getDefaultInstanceForType() { return io.grpc.grpclb.Server.getDefaultInstance(); } public io.grpc.grpclb.Server build() { io.grpc.grpclb.Server result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public io.grpc.grpclb.Server buildPartial() { io.grpc.grpclb.Server result = new io.grpc.grpclb.Server(this); result.ipAddress_ = ipAddress_; result.port_ = port_; result.loadBalanceToken_ = loadBalanceToken_; result.drop_ = drop_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.setField(field, value); } public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof io.grpc.grpclb.Server) { return mergeFrom((io.grpc.grpclb.Server)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(io.grpc.grpclb.Server other) { if (other == io.grpc.grpclb.Server.getDefaultInstance()) return this; if (other.getIpAddress() != com.google.protobuf.ByteString.EMPTY) { setIpAddress(other.getIpAddress()); } if (other.getPort() != 0) { setPort(other.getPort()); } if (!other.getLoadBalanceToken().isEmpty()) { loadBalanceToken_ = other.loadBalanceToken_; onChanged(); } if (other.getDrop() != false) { setDrop(other.getDrop()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { io.grpc.grpclb.Server parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (io.grpc.grpclb.Server) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private com.google.protobuf.ByteString ipAddress_ = com.google.protobuf.ByteString.EMPTY; /** * <pre> * A resolved address for the server, serialized in network-byte-order. It may * either be an IPv4 or IPv6 address. * </pre> * * <code>bytes ip_address = 1;</code> */ public com.google.protobuf.ByteString getIpAddress() { return ipAddress_; } /** * <pre> * A resolved address for the server, serialized in network-byte-order. It may * either be an IPv4 or IPv6 address. * </pre> * * <code>bytes ip_address = 1;</code> */ public Builder setIpAddress(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ipAddress_ = value; onChanged(); return this; } /** * <pre> * A resolved address for the server, serialized in network-byte-order. It may * either be an IPv4 or IPv6 address. * </pre> * * <code>bytes ip_address = 1;</code> */ public Builder clearIpAddress() { ipAddress_ = getDefaultInstance().getIpAddress(); onChanged(); return this; } private int port_ ; /** * <pre> * A resolved port number for the server. * </pre> * * <code>int32 port = 2;</code> */ public int getPort() { return port_; } /** * <pre> * A resolved port number for the server. * </pre> * * <code>int32 port = 2;</code> */ public Builder setPort(int value) { port_ = value; onChanged(); return this; } /** * <pre> * A resolved port number for the server. * </pre> * * <code>int32 port = 2;</code> */ public Builder clearPort() { port_ = 0; onChanged(); return this; } private java.lang.Object loadBalanceToken_ = ""; /** * <pre> * An opaque but printable token given to the frontend for each pick. All * frontend requests for that pick must include the token in its initial * metadata. The token is used by the backend to verify the request and to * allow the backend to report load to the gRPC LB system. The token is also * used in client stats for reporting dropped calls. * </pre> * * <code>string load_balance_token = 3;</code> */ public java.lang.String getLoadBalanceToken() { java.lang.Object ref = loadBalanceToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); loadBalanceToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * An opaque but printable token given to the frontend for each pick. All * frontend requests for that pick must include the token in its initial * metadata. The token is used by the backend to verify the request and to * allow the backend to report load to the gRPC LB system. The token is also * used in client stats for reporting dropped calls. * </pre> * * <code>string load_balance_token = 3;</code> */ public com.google.protobuf.ByteString getLoadBalanceTokenBytes() { java.lang.Object ref = loadBalanceToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); loadBalanceToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * An opaque but printable token given to the frontend for each pick. All * frontend requests for that pick must include the token in its initial * metadata. The token is used by the backend to verify the request and to * allow the backend to report load to the gRPC LB system. The token is also * used in client stats for reporting dropped calls. * </pre> * * <code>string load_balance_token = 3;</code> */ public Builder setLoadBalanceToken( java.lang.String value) { if (value == null) { throw new NullPointerException(); } loadBalanceToken_ = value; onChanged(); return this; } /** * <pre> * An opaque but printable token given to the frontend for each pick. All * frontend requests for that pick must include the token in its initial * metadata. The token is used by the backend to verify the request and to * allow the backend to report load to the gRPC LB system. The token is also * used in client stats for reporting dropped calls. * </pre> * * <code>string load_balance_token = 3;</code> */ public Builder clearLoadBalanceToken() { loadBalanceToken_ = getDefaultInstance().getLoadBalanceToken(); onChanged(); return this; } /** * <pre> * An opaque but printable token given to the frontend for each pick. All * frontend requests for that pick must include the token in its initial * metadata. The token is used by the backend to verify the request and to * allow the backend to report load to the gRPC LB system. The token is also * used in client stats for reporting dropped calls. * </pre> * * <code>string load_balance_token = 3;</code> */ public Builder setLoadBalanceTokenBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); loadBalanceToken_ = value; onChanged(); return this; } private boolean drop_ ; /** * <pre> * Indicates whether this particular request should be dropped by the client. * If the request is dropped, there will be a corresponding entry in * ClientStats.calls_finished_with_drop. * </pre> * * <code>bool drop = 4;</code> */ public boolean getDrop() { return drop_; } /** * <pre> * Indicates whether this particular request should be dropped by the client. * If the request is dropped, there will be a corresponding entry in * ClientStats.calls_finished_with_drop. * </pre> * * <code>bool drop = 4;</code> */ public Builder setDrop(boolean value) { drop_ = value; onChanged(); return this; } /** * <pre> * Indicates whether this particular request should be dropped by the client. * If the request is dropped, there will be a corresponding entry in * ClientStats.calls_finished_with_drop. * </pre> * * <code>bool drop = 4;</code> */ public Builder clearDrop() { drop_ = false; onChanged(); return this; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFieldsProto3(unknownFields); } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:grpc.lb.v1.Server) } // @@protoc_insertion_point(class_scope:grpc.lb.v1.Server) private static final io.grpc.grpclb.Server DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new io.grpc.grpclb.Server(); } public static io.grpc.grpclb.Server getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<Server> PARSER = new com.google.protobuf.AbstractParser<Server>() { public Server parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new Server(input, extensionRegistry); } }; public static com.google.protobuf.Parser<Server> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<Server> getParserForType() { return PARSER; } public io.grpc.grpclb.Server getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright 2015 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.template.soy.pysrc.internal; import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.inject.assistedinject.Assisted; import com.google.inject.assistedinject.AssistedInject; import com.google.template.soy.error.ErrorReporter; import com.google.template.soy.msgs.internal.IcuSyntaxUtils; import com.google.template.soy.msgs.internal.MsgUtils; import com.google.template.soy.msgs.internal.MsgUtils.MsgPartsAndIds; import com.google.template.soy.msgs.restricted.SoyMsgPart; import com.google.template.soy.msgs.restricted.SoyMsgPart.Case; import com.google.template.soy.msgs.restricted.SoyMsgPlaceholderPart; import com.google.template.soy.msgs.restricted.SoyMsgPluralCaseSpec; import com.google.template.soy.msgs.restricted.SoyMsgPluralPart; import com.google.template.soy.msgs.restricted.SoyMsgRawTextPart; import com.google.template.soy.pysrc.internal.GenPyExprsVisitor.GenPyExprsVisitorFactory; import com.google.template.soy.pysrc.restricted.PyExpr; import com.google.template.soy.pysrc.restricted.PyExprUtils; import com.google.template.soy.pysrc.restricted.PyFunctionExprBuilder; import com.google.template.soy.pysrc.restricted.PyStringExpr; import com.google.template.soy.soytree.AbstractParentSoyNode; import com.google.template.soy.soytree.CallNode; import com.google.template.soy.soytree.MsgHtmlTagNode; import com.google.template.soy.soytree.MsgNode; import com.google.template.soy.soytree.MsgPlaceholderNode; import com.google.template.soy.soytree.MsgPluralNode; import com.google.template.soy.soytree.MsgSelectNode; import com.google.template.soy.soytree.PrintNode; import com.google.template.soy.soytree.SoyNode.MsgPlaceholderInitialNode; import com.google.template.soy.soytree.SoyNode.MsgSubstUnitNode; import com.google.template.soy.soytree.SoyNode.ParentSoyNode; import java.util.LinkedHashMap; import java.util.Map; /** * Class to generate python code for one {@link MsgNode}. * */ public final class MsgFuncGenerator { /** Factory for assisted injection **/ public static interface MsgFuncGeneratorFactory { MsgFuncGenerator create( MsgNode node, LocalVariableStack localVarExprs, ErrorReporter errorReporter); } /** The msg node to generate the function calls from. */ private final MsgNode msgNode; /** The generated msg id with the same algorithm for translation service. */ private final long msgId; private final ImmutableList<SoyMsgPart> msgParts; private final GenPyExprsVisitor genPyExprsVisitor; /** The function builder for the prepare_*() method **/ private final PyFunctionExprBuilder prepareFunc; /** The function builder for the render_*() method **/ private final PyFunctionExprBuilder renderFunc; private final TranslateToPyExprVisitor translateToPyExprVisitor; @AssistedInject MsgFuncGenerator( GenPyExprsVisitorFactory genPyExprsVisitorFactory, @Assisted MsgNode msgNode, @Assisted LocalVariableStack localVarExprs, @Assisted ErrorReporter errorReporter) { this.msgNode = msgNode; this.genPyExprsVisitor = genPyExprsVisitorFactory.create(localVarExprs, errorReporter); this.translateToPyExprVisitor = new TranslateToPyExprVisitor(localVarExprs, errorReporter); String translator = PyExprUtils.TRANSLATOR_NAME; if (this.msgNode.isPlrselMsg()) { if (this.msgNode.isPluralMsg()) { this.prepareFunc = new PyFunctionExprBuilder(translator + ".prepare_plural"); this.renderFunc = new PyFunctionExprBuilder(translator + ".render_plural"); } else { this.prepareFunc = new PyFunctionExprBuilder(translator + ".prepare_icu"); this.renderFunc = new PyFunctionExprBuilder(translator + ".render_icu"); } } else if (this.msgNode.isRawTextMsg()) { this.prepareFunc = new PyFunctionExprBuilder(translator + ".prepare_literal"); this.renderFunc = new PyFunctionExprBuilder(translator + ".render_literal"); } else { this.prepareFunc = new PyFunctionExprBuilder(translator + ".prepare"); this.renderFunc = new PyFunctionExprBuilder(translator + ".render"); } MsgPartsAndIds msgPartsAndIds = MsgUtils.buildMsgPartsAndComputeMsgIdForDualFormat(msgNode); Preconditions.checkNotNull(msgPartsAndIds); this.msgId = msgPartsAndIds.id; this.msgParts = msgPartsAndIds.parts; Preconditions.checkState(!msgParts.isEmpty()); } /** * Return the PyStringExpr for the render function call, because we know render always return a * string in Python runtime. */ PyStringExpr getPyExpr() { if (this.msgNode.isPlrselMsg()) { return this.msgNode.isPluralMsg() ? pyFuncForPluralMsg() : pyFuncForSelectMsg(); } else { return this.msgNode.isRawTextMsg() ? pyFuncForRawTextMsg() : pyFuncForGeneralMsg(); } } private PyStringExpr pyFuncForRawTextMsg() { String pyMsgText = processMsgPartsHelper(msgParts, escaperForPyFormatString); prepareFunc.addArg(msgId) .addArg(pyMsgText); return renderFunc.addArg(prepareFunc.asPyExpr()) .asPyStringExpr(); } private PyStringExpr pyFuncForGeneralMsg() { String pyMsgText = processMsgPartsHelper(msgParts, escaperForPyFormatString); Map<PyExpr, PyExpr> nodePyVarToPyExprMap = collectVarNameListAndToPyExprMap(); prepareFunc.addArg(msgId) .addArg(pyMsgText) .addArg(PyExprUtils.convertIterableToPyTupleExpr(nodePyVarToPyExprMap.keySet())); return renderFunc.addArg(prepareFunc.asPyExpr()) .addArg(PyExprUtils.convertMapToPyExpr(nodePyVarToPyExprMap)) .asPyStringExpr(); } private PyStringExpr pyFuncForPluralMsg() { SoyMsgPluralPart pluralPart = (SoyMsgPluralPart) msgParts.get(0); MsgPluralNode pluralNode = msgNode.getRepPluralNode(pluralPart.getPluralVarName()); Map<PyExpr, PyExpr> nodePyVarToPyExprMap = collectVarNameListAndToPyExprMap(); Map<PyExpr, PyExpr> caseSpecStrToMsgTexts = new LinkedHashMap<>(); for (Case<SoyMsgPluralCaseSpec> pluralCase : pluralPart.getCases()) { caseSpecStrToMsgTexts.put( new PyStringExpr("'" + pluralCase.spec() + "'"), new PyStringExpr("'" + processMsgPartsHelper(pluralCase.parts(), nullEscaper) + "'")); } prepareFunc.addArg(msgId) .addArg(PyExprUtils.convertMapToPyExpr(caseSpecStrToMsgTexts)) .addArg(PyExprUtils.convertIterableToPyTupleExpr(nodePyVarToPyExprMap.keySet())); // Translates {@link MsgPluralNode#pluralExpr} into a Python lookup expression. // Note that pluralExpr represent the Soy expression inside the attributes of a plural tag. PyExpr pluralPyExpr = translateToPyExprVisitor.exec(pluralNode.getExpr()); return renderFunc.addArg(prepareFunc.asPyExpr()) .addArg(pluralPyExpr) .addArg(PyExprUtils.convertMapToPyExpr(nodePyVarToPyExprMap)) .asPyStringExpr(); } private PyStringExpr pyFuncForSelectMsg() { Map<PyExpr, PyExpr> nodePyVarToPyExprMap = collectVarNameListAndToPyExprMap(); ImmutableList<SoyMsgPart> msgPartsInIcuSyntax = IcuSyntaxUtils.convertMsgPartsToEmbeddedIcuSyntax(msgParts, true); String pyMsgText = processMsgPartsHelper(msgPartsInIcuSyntax, nullEscaper); prepareFunc.addArg(msgId) .addArg(pyMsgText) .addArg(PyExprUtils.convertIterableToPyTupleExpr(nodePyVarToPyExprMap.keySet())); return renderFunc.addArg(prepareFunc.asPyExpr()) .addArg(PyExprUtils.convertMapToPyExpr(nodePyVarToPyExprMap)) .asPyStringExpr(); } /** * Private helper to process and collect all variables used within this msg node for code * generation. * * @return A Map populated with all the variables used with in this message node, using * {@link MsgPlaceholderInitialNode#genBasePhName}. */ private Map<PyExpr, PyExpr> collectVarNameListAndToPyExprMap() { Map<PyExpr, PyExpr> nodePyVarToPyExprMap = new LinkedHashMap<>(); for (Map.Entry<String, MsgSubstUnitNode> entry : msgNode.getVarNameToRepNodeMap().entrySet()) { MsgSubstUnitNode substUnitNode = entry.getValue(); PyExpr substPyExpr = null; if (substUnitNode instanceof MsgPlaceholderNode) { MsgPlaceholderInitialNode phInitialNode = (MsgPlaceholderInitialNode) ((AbstractParentSoyNode<?>) substUnitNode).getChild(0); if (phInitialNode instanceof PrintNode || phInitialNode instanceof CallNode) { substPyExpr = PyExprUtils.concatPyExprs(genPyExprsVisitor.exec(phInitialNode)) .toPyString(); } // when the placeholder is generated by HTML tags if (phInitialNode instanceof MsgHtmlTagNode) { substPyExpr = PyExprUtils.concatPyExprs( genPyExprsVisitor.execOnChildren((ParentSoyNode<?>) phInitialNode)) .toPyString(); } } else if (substUnitNode instanceof MsgPluralNode) { // Translates {@link MsgPluralNode#pluralExpr} into a Python lookup expression. // Note that {@code pluralExpr} represents the soy expression of the {@code plural} attr, // i.e. the {@code $numDrafts} in {@code {plural $numDrafts}...{/plural}}. substPyExpr = translateToPyExprVisitor.exec(((MsgPluralNode) substUnitNode).getExpr()); } else if (substUnitNode instanceof MsgSelectNode) { substPyExpr = translateToPyExprVisitor.exec(((MsgSelectNode) substUnitNode).getExpr()); } if (substPyExpr != null) { nodePyVarToPyExprMap.put(new PyStringExpr("'" + entry.getKey() + "'"), substPyExpr); } } return nodePyVarToPyExprMap; } /** * Private helper to build valid Python string for a list of {@link SoyMsgPart}s. * * <p>It only processes {@link SoyMsgRawTextPart} and {@link SoyMsgPlaceholderPart} and ignores * others, because we didn't generate a direct string for plural and select nodes. * * <p>For {@link SoyMsgRawTextPart}, it appends the raw text and applies necessary escaping; For * {@link SoyMsgPlaceholderPart}, it turns the placeholder's variable name into Python replace * format. * * @param parts The SoyMsgPart parts to convert. * @param escaper A Function which provides escaping for raw text. * * @return A String representing all the {@code parts} in Python. */ private static String processMsgPartsHelper(ImmutableList<SoyMsgPart> parts, Function<String, String> escaper) { StringBuilder rawMsgTextSb = new StringBuilder(); for (SoyMsgPart part : parts) { if (part instanceof SoyMsgRawTextPart) { rawMsgTextSb.append(escaper.apply( ((SoyMsgRawTextPart) part).getRawText())); } if (part instanceof SoyMsgPlaceholderPart) { String phName = ((SoyMsgPlaceholderPart) part).getPlaceholderName(); rawMsgTextSb.append("{" + phName + "}"); } } return rawMsgTextSb.toString(); } /** * A mapper to apply escaping for python format string. * * <p>It escapes '{' and '}' to '{{' and '}}' in the String. * @see "https://docs.python.org/2/library/string.html#formatstrings" */ private static Function<String, String> escaperForPyFormatString = new Function<String, String>() { @Override public String apply(String str) { return str.replaceAll("\\{", "{{").replaceAll("\\}", "}}").replaceAll("'", "\\\\'"); } }; /** * A mapper which does nothing. */ private static Function<String, String> nullEscaper = new Function<String, String>() { @Override public String apply(String str) { return str; } }; }
// PayeesPanel package org.javamoney.examples.ez.money.gui.dialog.preferences; import static org.javamoney.examples.ez.money.model.DataTypeKeys.PAYEE; import static org.javamoney.examples.ez.money.utility.IDHelper.MessageKeys.IN_USE; import static org.javamoney.examples.ez.money.utility.IDHelper.MessageKeys.UNABLE_TO_REMOVE; import java.awt.GridBagConstraints; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import org.javamoney.examples.ez.money.model.persisted.payee.Payee; import org.javamoney.examples.ez.money.utility.IDHelper; import org.javamoney.examples.ez.money.utility.TransactionHelper; /** * This class facilitates managing the payees. */ public final class PayeesPanel extends DataElementPanel { /** * Constructs a new preferences panel. */ public PayeesPanel() { super(PreferencesKeys.PAYEES); buildPanel(); } /** * This method updates this panel's collection. */ @Override public void updateView() { displayCollectables(); getChooser().selectFirst(); } ////////////////////////////////////////////////////////////////////////////// // Start of protected methods. ////////////////////////////////////////////////////////////////////////////// /** * This method prompts the user for a new unique identifier. */ @Override protected void edit() { Payee payee = (Payee)getChooser().getSelectedElement(); String identifier = IDHelper.promptForEdit(PAYEE, payee.getIdentifier()); if(identifier != null) { String temp = payee.getIdentifier(); // Store for mass update. boolean result = getCollection().changeIdentifier(payee, identifier); if(result == false) { // It is safe to assume it already existed. if((result = IDHelper.confirmMerge()) == true) { getCollection().remove(payee); // Since elements are not case sensitive, add it again just incase it // doesn't exist but failed to change. getCollection().add(new Payee(identifier)); payee = (Payee)getCollection().get(identifier); } } if(result == true) { // Update all transactions, the view, and select the edited element. TransactionHelper.massUpdate(TransactionHelper.MassUpdateFieldKeys.PAYEE, temp, identifier); displayCollectables(); getChooser().setSelectedCollectable(payee); } } } ////////////////////////////////////////////////////////////////////////////// // Start of private methods. ////////////////////////////////////////////////////////////////////////////// private void add() { String identifier = IDHelper.promptForAdd(PAYEE); if(identifier != null) { Payee payee = new Payee(identifier); if(getCollection().add(payee) == true) { // Update view and select the new element. displayCollectables(); getChooser().setSelectedCollectable(payee); } else { // It is safe to assume it already existed. IDHelper.showMessage(IN_USE, PAYEE); } } } private void buildPanel() { ActionHandler handler = new ActionHandler(); // Build panel. setFill(GridBagConstraints.BOTH); add(createChooserPanel(handler), 0, 0, 1, 1, 100, 100); add(createButtonPanel(handler), 0, 1, 1, 1, 0, 0); } private void displayCollectables() { getChooser().displayCollectables(); enableLinks(); showProperChooserPanel(); } private void remove() { if(IDHelper.confirmRemoval(PAYEE) == true) { Payee payee = (Payee)getChooser().getSelectedElement(); if(getCollection().remove(payee) == true) { // Update all transactions and the view. TransactionHelper.massUpdate(TransactionHelper.MassUpdateFieldKeys.PAYEE, payee.getIdentifier(), ""); displayCollectables(); if(getChooser().length() != 0) { getChooser().selectFirst(); } } else { IDHelper.showMessage(UNABLE_TO_REMOVE, PAYEE); } } } ////////////////////////////////////////////////////////////////////////////// // Start of inner classes. ////////////////////////////////////////////////////////////////////////////// private class ActionHandler implements ActionListener { public void actionPerformed(ActionEvent event) { String command = event.getActionCommand(); if(command.equals(ACTION_ADD) == true) { add(); } else if(command.equals(ACTION_EDIT) == true) { edit(); } else if(command.equals(ACTION_REMOVE) == true) { remove(); } } } }
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ui; import com.intellij.featureStatistics.FeatureUsageTracker; import com.intellij.ide.DataManager; import com.intellij.ide.ui.UISettings; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.CommonDataKeys; import com.intellij.openapi.actionSystem.CustomShortcutSet; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.wm.ToolWindowManager; import com.intellij.openapi.wm.ex.ToolWindowManagerAdapter; import com.intellij.openapi.wm.ex.ToolWindowManagerEx; import com.intellij.openapi.wm.ex.ToolWindowManagerListener; import com.intellij.psi.codeStyle.NameUtil; import com.intellij.ui.speedSearch.SpeedSearchSupply; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.text.AttributeSet; import javax.swing.text.BadLocationException; import javax.swing.text.PlainDocument; import java.awt.*; import java.awt.event.FocusAdapter; import java.awt.event.FocusEvent; import java.awt.event.KeyAdapter; import java.awt.event.KeyEvent; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeSupport; import java.util.ListIterator; import java.util.NoSuchElementException; public abstract class SpeedSearchBase<Comp extends JComponent> extends SpeedSearchSupply { private static final Logger LOG = Logger.getInstance("#com.intellij.ui.SpeedSearchBase"); private SearchPopup mySearchPopup; private JLayeredPane myPopupLayeredPane; protected final Comp myComponent; private final ToolWindowManagerListener myWindowManagerListener = new MyToolWindowManagerListener(); private final PropertyChangeSupport myChangeSupport = new PropertyChangeSupport(this); private String myRecentEnteredPrefix; private SpeedSearchComparator myComparator = new SpeedSearchComparator(false); private boolean myClearSearchOnNavigateNoMatch = false; @NonNls protected static final String ENTERED_PREFIX_PROPERTY_NAME = "enteredPrefix"; public SpeedSearchBase(Comp component) { myComponent = component; myComponent.addFocusListener(new FocusAdapter() { @Override public void focusLost(FocusEvent e) { manageSearchPopup(null); } }); myComponent.addKeyListener(new KeyAdapter() { @Override public void keyTyped(KeyEvent e) { processKeyEvent(e); } @Override public void keyPressed(KeyEvent e) { processKeyEvent(e); } }); new AnAction() { @Override public void actionPerformed(AnActionEvent e) { final String prefix = getEnteredPrefix(); assert prefix != null; final String[] strings = NameUtil.splitNameIntoWords(prefix); final String last = strings[strings.length - 1]; final int i = prefix.lastIndexOf(last); mySearchPopup.mySearchField.setText(prefix.substring(0, i).trim()); } @Override public void update(AnActionEvent e) { e.getPresentation().setEnabled(isPopupActive() && !StringUtil.isEmpty(getEnteredPrefix())); } }.registerCustomShortcutSet(CustomShortcutSet.fromString(SystemInfo.isMac ? "meta BACK_SPACE" : "control BACK_SPACE"), myComponent); installSupplyTo(component); } public static boolean hasActiveSpeedSearch(JComponent component) { return getSupply(component) != null; } public void setClearSearchOnNavigateNoMatch(boolean clearSearchOnNavigateNoMatch) { myClearSearchOnNavigateNoMatch = clearSearchOnNavigateNoMatch; } @Override public boolean isPopupActive() { return mySearchPopup != null && mySearchPopup.isVisible(); } @Override public Iterable<TextRange> matchingFragments(@NotNull String text) { if (!isPopupActive()) return null; final SpeedSearchComparator comparator = getComparator(); final String recentSearchText = comparator.getRecentSearchText(); return StringUtil.isNotEmpty(recentSearchText) ? comparator.matchingFragments(recentSearchText, text) : null; } /** * Returns visual (view) selection index. */ protected abstract int getSelectedIndex(); protected abstract Object[] getAllElements(); @Nullable protected abstract String getElementText(Object element); protected int getElementCount() { return getAllElements().length; } /** * Should convert given view index to model index */ protected int convertIndexToModel(final int viewIndex) { return viewIndex; } /** * @param element Element to select. Don't forget to convert model index to view index if needed (i.e. table.convertRowIndexToView(modelIndex), etc). * @param selectedText search text */ protected abstract void selectElement(Object element, String selectedText); protected ListIterator<Object> getElementIterator(int startingIndex) { return new ViewIterator(this, startingIndex < 0 ? getElementCount() : startingIndex); } public void addChangeListener(@NotNull PropertyChangeListener listener) { myChangeSupport.addPropertyChangeListener(listener); } public void removeChangeListener(@NotNull PropertyChangeListener listener) { myChangeSupport.removePropertyChangeListener(listener); } private void fireStateChanged() { String enteredPrefix = getEnteredPrefix(); myChangeSupport.firePropertyChange(ENTERED_PREFIX_PROPERTY_NAME, myRecentEnteredPrefix, enteredPrefix); myRecentEnteredPrefix = enteredPrefix; } protected boolean isMatchingElement(Object element, String pattern) { String str = getElementText(element); return str != null && compare(str, pattern); } protected boolean compare(String text, String pattern) { return myComparator.matchingFragments(pattern, text) != null; } public SpeedSearchComparator getComparator() { return myComparator; } public void setComparator(final SpeedSearchComparator comparator) { myComparator = comparator; } @Nullable private Object findNextElement(String s) { final String _s = s.trim(); final int selectedIndex = getSelectedIndex(); final ListIterator<?> it = getElementIterator(selectedIndex + 1); final Object current; if (it.hasPrevious()) { current = it.previous(); it.next(); } else current = null; while (it.hasNext()) { final Object element = it.next(); if (isMatchingElement(element, _s)) return element; } if (UISettings.getInstance().CYCLE_SCROLLING) { final ListIterator<Object> i = getElementIterator(0); while (i.hasNext()) { final Object element = i.next(); if (isMatchingElement(element, _s)) return element; } } return ( current != null && isMatchingElement(current, _s) ) ? current : null; } @Nullable private Object findPreviousElement(String s) { final String _s = s.trim(); final int selectedIndex = getSelectedIndex(); if (selectedIndex < 0) return null; final ListIterator<?> it = getElementIterator(selectedIndex); final Object current; if (it.hasNext()) { current = it.next(); it.previous(); } else current = null; while (it.hasPrevious()) { final Object element = it.previous(); if (isMatchingElement(element, _s)) return element; } if (UISettings.getInstance().CYCLE_SCROLLING) { final ListIterator<Object> i = getElementIterator(getElementCount()); while (i.hasPrevious()) { final Object element = i.previous(); if (isMatchingElement(element, _s)) return element; } } return selectedIndex != -1 && isMatchingElement(current, _s) ? current : null; } @Nullable protected Object findElement(String s) { final String _s = s.trim(); int selectedIndex = getSelectedIndex(); if (selectedIndex < 0) { selectedIndex = 0; } final ListIterator<Object> it = getElementIterator(selectedIndex); while (it.hasNext()) { final Object element = it.next(); if (isMatchingElement(element, _s)) return element; } if (selectedIndex > 0) { while (it.hasPrevious()) it.previous(); while (it.hasNext() && it.nextIndex() != selectedIndex) { final Object element = it.next(); if (isMatchingElement(element, _s)) return element; } } return null; } @Nullable private Object findFirstElement(String s) { final String _s = s.trim(); for (ListIterator<?> it = getElementIterator(0); it.hasNext();) { final Object element = it.next(); if (isMatchingElement(element, _s)) return element; } return null; } @Nullable private Object findLastElement(String s) { final String _s = s.trim(); for (ListIterator<?> it = getElementIterator(-1); it.hasPrevious();) { final Object element = it.previous(); if (isMatchingElement(element, _s)) return element; } return null; } public void hidePopup() { manageSearchPopup(null); } protected void processKeyEvent(KeyEvent e) { if (e.isAltDown()) return; if (mySearchPopup != null) { mySearchPopup.processKeyEvent(e); return; } if (!isSpeedSearchEnabled()) return; if (e.getID() == KeyEvent.KEY_TYPED) { if (!UIUtil.isReallyTypedEvent(e)) return; char c = e.getKeyChar(); if (Character.isLetterOrDigit(c) || c == '_' || c == '*' || c == '/' || c == ':' || c == '.' || c == '#') { manageSearchPopup(new SearchPopup(String.valueOf(c))); e.consume(); } } } public Comp getComponent() { return myComponent; } protected boolean isSpeedSearchEnabled() { return true; } @Override @Nullable public String getEnteredPrefix() { return mySearchPopup != null ? mySearchPopup.mySearchField.getText() : null; } @Override public void refreshSelection() { if ( mySearchPopup != null ) mySearchPopup.refreshSelection(); } private class SearchPopup extends JPanel { private final SearchField mySearchField; public SearchPopup(String initialString) { final Color foregroundColor = UIUtil.getToolTipForeground(); Color color1 = new JBColor(UIUtil.getToolTipBackground().brighter(), Gray._111); mySearchField = new SearchField(); final JLabel searchLabel = new JLabel(" " + UIBundle.message("search.popup.search.for.label") + " "); searchLabel.setFont(searchLabel.getFont().deriveFont(Font.BOLD)); searchLabel.setForeground(foregroundColor); mySearchField.setBorder(null); mySearchField.setBackground(color1); mySearchField.setForeground(foregroundColor); mySearchField.setDocument(new PlainDocument() { @Override public void insertString(int offs, String str, AttributeSet a) throws BadLocationException { String oldText; try { oldText = getText(0, getLength()); } catch (BadLocationException e1) { oldText = ""; } String newText = oldText.substring(0, offs) + str + oldText.substring(offs); super.insertString(offs, str, a); if (findElement(newText) == null) { mySearchField.setForeground(JBColor.RED); } else { mySearchField.setForeground(foregroundColor); } } }); mySearchField.setText(initialString); setBorder(BorderFactory.createLineBorder(Color.gray, 1)); setBackground(color1); setLayout(new BorderLayout()); add(searchLabel, BorderLayout.WEST); add(mySearchField, BorderLayout.EAST); Object element = findElement(mySearchField.getText()); onSearchFieldUpdated(initialString); updateSelection(element); } @Override public void processKeyEvent(KeyEvent e) { mySearchField.processKeyEvent(e); if (e.isConsumed()) { String s = mySearchField.getText(); onSearchFieldUpdated(s); int keyCode = e.getKeyCode(); Object element; if (isUpDownHomeEnd(keyCode)) { element = findTargetElement(keyCode, s); if (myClearSearchOnNavigateNoMatch && element == null) { manageSearchPopup(null); element = findTargetElement(keyCode, ""); } } else { element = findElement(s); } updateSelection(element); } } @Nullable private Object findTargetElement(int keyCode, String searchPrefix) { if (keyCode == KeyEvent.VK_UP) { return findPreviousElement(searchPrefix); } else if (keyCode == KeyEvent.VK_DOWN) { return findNextElement(searchPrefix); } else if (keyCode == KeyEvent.VK_HOME) { return findFirstElement(searchPrefix); } else { assert keyCode == KeyEvent.VK_END; return findLastElement(searchPrefix); } } public void refreshSelection () { updateSelection(findElement(mySearchField.getText())); } private void updateSelection(Object element) { if (element != null) { selectElement(element, mySearchField.getText()); mySearchField.setForeground(UIUtil.getLabelForeground()); } else { mySearchField.setForeground(JBColor.red); } if (mySearchPopup != null) { mySearchPopup.setSize(mySearchPopup.getPreferredSize()); mySearchPopup.validate(); } fireStateChanged(); } } protected void onSearchFieldUpdated(String pattern) { } private class SearchField extends JTextField { SearchField() { setFocusable(false); } @Override public Dimension getPreferredSize() { Dimension dim = super.getPreferredSize(); dim.width = getFontMetrics(getFont()).stringWidth(getText()) + 10; return dim; } /** * I made this method public in order to be able to call it from the outside. * This is needed for delegating calls. */ @Override public void processKeyEvent(KeyEvent e) { int i = e.getKeyCode(); if (i == KeyEvent.VK_BACK_SPACE && getDocument().getLength() == 0) { e.consume(); return; } if ( i == KeyEvent.VK_ENTER || i == KeyEvent.VK_ESCAPE || i == KeyEvent.VK_PAGE_UP || i == KeyEvent.VK_PAGE_DOWN || i == KeyEvent.VK_LEFT || i == KeyEvent.VK_RIGHT ) { manageSearchPopup(null); if (i == KeyEvent.VK_ESCAPE) { e.consume(); } return; } if (isUpDownHomeEnd(i)) { e.consume(); return; } super.processKeyEvent(e); if (i == KeyEvent.VK_BACK_SPACE) { e.consume(); } } } private static boolean isUpDownHomeEnd(int keyCode) { return keyCode == KeyEvent.VK_HOME || keyCode == KeyEvent.VK_END || keyCode == KeyEvent.VK_UP || keyCode == KeyEvent.VK_DOWN; } private void manageSearchPopup(@Nullable SearchPopup searchPopup) { final Project project; if (ApplicationManager.getApplication() != null && !ApplicationManager.getApplication().isDisposed()) { project = CommonDataKeys.PROJECT.getData(DataManager.getInstance().getDataContext(myComponent)); } else { project = null; } if (mySearchPopup != null) { myPopupLayeredPane.remove(mySearchPopup); myPopupLayeredPane.validate(); myPopupLayeredPane.repaint(); myPopupLayeredPane = null; if (project != null) { ((ToolWindowManagerEx)ToolWindowManager.getInstance(project)).removeToolWindowManagerListener(myWindowManagerListener); } } else if (searchPopup != null) { FeatureUsageTracker.getInstance().triggerFeatureUsed("ui.tree.speedsearch"); } if (!myComponent.isShowing()) { mySearchPopup = null; } else { mySearchPopup = searchPopup; } fireStateChanged(); if (mySearchPopup == null || !myComponent.isDisplayable()) return; if (project != null) { ((ToolWindowManagerEx)ToolWindowManager.getInstance(project)).addToolWindowManagerListener(myWindowManagerListener); } JRootPane rootPane = myComponent.getRootPane(); if (rootPane != null) { myPopupLayeredPane = rootPane.getLayeredPane(); } else { myPopupLayeredPane = null; } if (myPopupLayeredPane == null) { LOG.error(toString() + " in " + myComponent); return; } myPopupLayeredPane.add(mySearchPopup, JLayeredPane.POPUP_LAYER); if (myPopupLayeredPane == null) return; // See # 27482. Somewho it does happen... Point lPaneP = myPopupLayeredPane.getLocationOnScreen(); Point componentP = getComponentLocationOnScreen(); Rectangle r = getComponentVisibleRect(); Dimension prefSize = mySearchPopup.getPreferredSize(); Window window = (Window)SwingUtilities.getAncestorOfClass(Window.class, myComponent); Point windowP; if (window instanceof JDialog) { windowP = ((JDialog)window).getContentPane().getLocationOnScreen(); } else if (window instanceof JFrame) { windowP = ((JFrame)window).getContentPane().getLocationOnScreen(); } else { windowP = window.getLocationOnScreen(); } int y = r.y + componentP.y - lPaneP.y - prefSize.height; y = Math.max(y, windowP.y - lPaneP.y); mySearchPopup.setLocation(componentP.x - lPaneP.x + r.x, y); mySearchPopup.setSize(prefSize); mySearchPopup.setVisible(true); mySearchPopup.validate(); } protected Rectangle getComponentVisibleRect() { return myComponent.getVisibleRect(); } protected Point getComponentLocationOnScreen() { return myComponent.getLocationOnScreen(); } private class MyToolWindowManagerListener extends ToolWindowManagerAdapter { @Override public void stateChanged() { manageSearchPopup(null); } } protected class ViewIterator implements ListIterator<Object> { private final SpeedSearchBase mySpeedSearch; private int myCurrentIndex; private final Object[] myElements; public ViewIterator(@NotNull final SpeedSearchBase speedSearch, final int startIndex) { mySpeedSearch = speedSearch; myCurrentIndex = startIndex; myElements = speedSearch.getAllElements(); if (startIndex < 0 || startIndex > myElements.length) { throw new IndexOutOfBoundsException("Index: " + startIndex + " in: " + SpeedSearchBase.this.getClass()); } } @Override public boolean hasPrevious() { return myCurrentIndex != 0; } @Override public Object previous() { final int i = myCurrentIndex - 1; if (i < 0) throw new NoSuchElementException(); final Object previous = myElements[mySpeedSearch.convertIndexToModel(i)]; myCurrentIndex = i; return previous; } @Override public int nextIndex() { return myCurrentIndex; } @Override public int previousIndex() { return myCurrentIndex - 1; } @Override public boolean hasNext() { return myCurrentIndex != myElements.length; } @Override public Object next() { if (myCurrentIndex + 1 > myElements.length) throw new NoSuchElementException(); return myElements[mySpeedSearch.convertIndexToModel(myCurrentIndex++)]; } @Override public void remove() { throw new UnsupportedOperationException("Not implemented in: " + getClass().getCanonicalName()); } @Override public void set(Object o) { throw new UnsupportedOperationException("Not implemented in: " + getClass().getCanonicalName()); } @Override public void add(Object o) { throw new UnsupportedOperationException("Not implemented in: " + getClass().getCanonicalName()); } } }
package edu.wisc.cs.jam.js; import edu.wisc.cs.jam.SourceManager; import edu.wisc.cs.jam.Dbg; import edu.wisc.cs.jam.Scope; import edu.wisc.cs.jam.Traversal; import edu.wisc.cs.jam.Traversal.Traverser; import edu.wisc.cs.jam.Transform; import edu.wisc.cs.jam.Exp; // This class contains utility functions related to JavaScript source // manipulation. It can be instantiated directly (perhaps to create // unique variable names) or it is subclassed by several specific // transformations. public class JSTransform implements Transform { // Generate unique variable names unique throughout the analysis. private static long varUnique = 0; public static final String prefix = "v"; public static void resetVariableNames() { varUnique = 0; } public String newVariableName() { return prefix + varUnique++; } public String newVariableName(String pref) { return pref + varUnique++; } @Override public void run(SourceManager src) { Dbg.warn("Base JSTransform does nothing when run"); } protected Exp createNameNode(SourceManager sm, Scope s) { // Ensure that the generated name is unique. String name = null; while (s.isDeclared(name = newVariableName(), true)) {} // Create the new node. Exp e = JSExp.createName(sm, name); return e; } public class DeanonymizeFunction implements Traverser { SourceManager sm; public DeanonymizeFunction(SourceManager src) { super(); sm = src; } // Give an anonymous function a name. protected void deanonymize(Traversal t, Exp fn, Exp parent) { // Create a name for the function. Exp newName = createNameNode(sm, t.getScope()); // Replace the current (empty) name with the generated one. fn.replaceChild(fn.getChild(0), newName); } @Override public boolean shouldTraverse(Traversal t, Exp e, Exp parent) { return true; } @Override public void visit(Traversal t, Exp e, Exp parent) { if (ExpUtil.isAnonymousFunction(e)) { deanonymize(t, e, parent); sm.reportCodeChange(); } } } public class FunctionElevate implements Traverser { SourceManager sm; public FunctionElevate(SourceManager src) { sm = src; } /* * Closure will convert function statements within control * structures into variables initialized with a function expression. * So we pull functions statements out. * * if (..) { * function f() { * return v; * } * x = v; * } * * so change it to... * * function f() { * return v; * } * if (..) { * x = v; * } * * The semantics of this may differ in different browsers, but * under Spidermonkey's interpretation, this is actually fine. A * function declaration is added to the global scope whether or not * it's containing block is executed. * * However, V8 works differently. A function declaration is added to * the global scope iff the containing block is executed. * * In either case, the calling context correctly determines the * value of free variables within the function after flattening. */ protected void elevateFunction(Traversal t, Exp e, Exp parent) { assert !ExpUtil.isAnonymousFunction(e) : "Encountered unconverted anonymous function"; if (!ExpUtil.isStatementBlock(parent)) { Exp stmt = ExpUtil.getEnclosingStatement(parent); Exp topBlock = stmt.getParent(); assert ExpUtil.isStatementBlock(topBlock); Exp nextUp = topBlock.getParent(); while (!topBlock.isScript() && !nextUp.isFunction()) { //Dbg.dbg("ELEVATE: " + e.getFirstChild() + " / " + e + " / " + parent + " / " + stmt + " / " + topBlock + " / " + blockParent); if (ExpUtil.isStatementBlock(nextUp)) { topBlock = nextUp; } nextUp = nextUp.getParent(); } Exp fn = e.getFirstChild(); assert fn.isName() : "Function with non-NAME name: " + fn; Exp fname = fn.clone(); parent.replaceChild(e, fname); topBlock.addChildToFront(e); } } @Override public boolean shouldTraverse(Traversal t, Exp e, Exp parent) { return true; } @Override public void visit(Traversal t, Exp e, Exp parent) { if (e.isFunction() && !parent.is(JSExp.GETTER_DEF) && !parent.is(JSExp.SETTER_DEF)) { elevateFunction(t, e, parent); } } } // Add explicit returns to functions without them. public class ReturnExplicit implements Traverser { protected SourceManager sm; public ReturnExplicit(SourceManager src) { super(); sm = src; } @Override public boolean shouldTraverse(Traversal t, Exp e, Exp parent) { return true; } protected void addReturn(Traversal t, Exp e, Exp parent) { assert parent.isFunction(); assert e.is(JSExp.BLOCK); Exp ret = new JSExp(sm, JSExp.RETURN); e.addChildToBack(ret); } @Override public void visit(Traversal t, Exp e, Exp parent) { if (e.isFunction()) { Exp body = e.getLastChild(); assert body.is(JSExp.BLOCK); boolean needsReturn = false; if (body.getChildCount() == 0) needsReturn = true; if (!needsReturn) { Exp last = body.getLastChild(); if (!last.isReturn() && !last.isThrow()) { needsReturn = true; } } if (needsReturn) { addReturn(t, body, e); sm.reportCodeChange(); } } } } // Break up string literals that are too large for XSB. public class ArrayLiteralConverter implements Traverser { public static final int MAX_ENTRIES = 500; SourceManager sm; public ArrayLiteralConverter(SourceManager src) { sm = src; } @Override public boolean shouldTraverse(Traversal t, Exp e, Exp parent) { return true; } /* * Convert an array literal with many entries "a = [e0,...,eNm1]" into * a staged construction: * var v0 = [e0,...,eMAXm1]; * var v1 = [eMAX,...,eNm1]; * a = v0.concat(v1); */ protected void convertArrayLiteral(Traversal t, Exp e, Exp parent) { assert e.is(JSExp.ARRAYLIT); int entryCnt = e.getChildCount(); if (entryCnt > MAX_ENTRIES) { Exp part0 = new JSExp(sm, JSExp.ARRAYLIT); for (int i=0; i<MAX_ENTRIES; i++) { part0.addChildToBack(e.getFirstChild().detachFromParent()); } // Generate a new variable to hold the first part. Exp tmp0 = createNameNode(sm, t.getScope()); // Create a reference to use in place of the original array. Exp tmpUse0 = tmp0.clone(); // Create a var initializer for the new variable. Exp tmpInit0 = new JSExp(JSExp.VAR, tmp0); // And set it as the initialization value. tmpInit0.getFirstChild().addChildToBack(part0); Exp tmp1 = createNameNode(sm, t.getScope()); Exp tmpUse1 = tmp1.clone(); Exp tmpInit1 = new JSExp(JSExp.VAR, tmp1); // Create the concat node. Exp concatAcc = new JSExp(JSExp.GETPROP, tmpUse0, JSExp.createString(sm, "concat")); Exp concatCall = new JSExp(JSExp.CALL, concatAcc, tmpUse1); Exp stmt = ExpUtil.getEnclosingStatement(e); // Replace the original statement with the concatenation. parent.replaceChild(e, concatCall); // Set remainder of the original array to the temp variable. tmpInit1.getFirstChild().addChildToBack(e); stmt.getParent().addChildBefore(tmpInit0, stmt); stmt.getParent().addChildBefore(tmpInit1, stmt); // Recursively break up the array. convertArrayLiteral(t, e, tmpInit1.getFirstChild()); } } @Override public void visit(Traversal t, Exp e, Exp parent) { if (e.is(JSExp.ARRAYLIT) && e.getChildCount() > MAX_ENTRIES) { convertArrayLiteral(t, e, parent); } } } // Break up string literals that are too large for XSB. public class StringConverter implements Traverser { public static final int MAX_LENGTH = 5000; SourceManager sm; public StringConverter(SourceManager src) { sm = src; } @Override public boolean shouldTraverse(Traversal t, Exp e, Exp parent) { return true; } /* * Convert a really long string "ABC...XYZ" into a concatenation: * var v0 = "ABC"; * var v1 = v0 + "..."; * var v2 = v1 + "XYZ"; */ protected void convertString(Traversal t, Exp e, Exp parent) { assert e.isString(); String full = e.getString(); int len = full.length(); if (len > MAX_LENGTH) { String part0 = full.substring(0, len - MAX_LENGTH); String part1 = full.substring(len - MAX_LENGTH, len); Exp node0 = JSExp.createString(sm, part0); Exp node1 = JSExp.createString(sm, part1); // Generate a new variable. Exp tmp = createNameNode(sm, t.getScope()); // Create a reference to use in place of the original string. Exp tmpUse = tmp.clone(); // Create a var initializer for the new variable. Exp tmpInit = new JSExp(JSExp.VAR, tmp); // Create the concatenation node. Exp concat = new JSExp(JSExp.ADD, node0, node1); // And set it as the initialization value. tmpInit.getFirstChild().addChildToBack(concat); Exp stmt = ExpUtil.getEnclosingStatement(e); stmt.getParent().addChildBefore(tmpInit, stmt); // Insert the temporary reference into the original statement. parent.replaceChild(e, tmpUse); // Recursively break up the initial part. convertString(t, node0, concat); } } @Override public void visit(Traversal t, Exp e, Exp parent) { if (e.isString()) { String s = e.getString(); convertString(t, e, parent); } } } // Break up statements of the form // // a.b = c.d(); // // into // // var tmp = c.d(); // a.b = tmp; // public class SplitSetsAndCalls implements Traverser { SourceManager sm; public SplitSetsAndCalls(SourceManager src) { sm = src; } @Override public boolean shouldTraverse(Traversal t, Exp e, Exp parent) { return true; } protected void splitSetCall(Traversal t, Exp rhs, Exp assn) { assert assn.isAssign(); Exp tmpName = createNameNode(sm, t.getScope()); Exp tmpRef = tmpName.clone(); assn.replaceChild(rhs, tmpRef); tmpName.addChildToBack(rhs); Exp tmpInit = new JSExp(JSExp.VAR, tmpName); Exp stmt = ExpUtil.getEnclosingStatement(assn); Exp stmtParent = stmt.getParent(); stmtParent.addChildBefore(tmpInit, stmt); } @Override public void visit(Traversal t, Exp e, Exp parent) { if (e.isAssign()) { Exp lhs = e.cloneAssignLHS(); Exp rhs = ExpUtil.getAssignRHS(e); if (lhs.isAccessor() && rhs.isInvoke()) { splitSetCall(t, rhs, e); } } } } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.refactoring.rename; import com.intellij.codeInsight.CodeInsightUtilBase; import com.intellij.ide.actions.CopyReferenceAction; import com.intellij.lang.Language; import com.intellij.lang.LanguageNamesValidation; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.undo.BasicUndoableAction; import com.intellij.openapi.command.undo.UndoManager; import com.intellij.openapi.command.undo.UndoableAction; import com.intellij.openapi.command.undo.UnexpectedUndoException; import com.intellij.openapi.editor.Document; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.*; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.pom.PomTargetPsiElement; import com.intellij.psi.*; import com.intellij.psi.meta.PsiMetaData; import com.intellij.psi.meta.PsiMetaOwner; import com.intellij.psi.meta.PsiWritableMetaData; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.refactoring.RefactoringBundle; import com.intellij.refactoring.listeners.RefactoringElementListener; import com.intellij.refactoring.listeners.UndoRefactoringElementListener; import com.intellij.refactoring.util.*; import com.intellij.usageView.UsageInfo; import com.intellij.usageView.UsageInfoFactory; import com.intellij.util.IncorrectOperationException; import java.util.HashMap; import com.intellij.util.containers.MultiMap; import consulo.annotation.access.RequiredReadAction; import consulo.logging.Logger; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.*; public class RenameUtil { private static final Logger LOG = Logger.getInstance(RenameUtil.class); private RenameUtil() { } @Nonnull @RequiredReadAction public static UsageInfo[] findUsages(final PsiElement element, final String newName, boolean searchInStringsAndComments, boolean searchForTextOccurrences, Map<? extends PsiElement, String> allRenames) { final List<UsageInfo> result = Collections.synchronizedList(new ArrayList<UsageInfo>()); PsiManager manager = element.getManager(); GlobalSearchScope projectScope = GlobalSearchScope.projectScope(manager.getProject()); RenamePsiElementProcessor processor = RenamePsiElementProcessor.forElement(element); Collection<PsiReference> refs = processor.findReferences(element, searchInStringsAndComments); for (final PsiReference ref : refs) { if (ref == null) { LOG.error("null reference from processor " + processor); continue; } PsiElement referenceElement = ref.getElement(); result.add(new MoveRenameUsageInfo(referenceElement, ref, ref.getRangeInElement().getStartOffset(), ref.getRangeInElement().getEndOffset(), element, ref.resolve() == null)); } processor.findCollisions(element, newName, allRenames, result); final PsiElement searchForInComments = processor.getElementToSearchInStringsAndComments(element); if (searchInStringsAndComments && searchForInComments != null) { String stringToSearch = ElementDescriptionUtil.getElementDescription(searchForInComments, NonCodeSearchDescriptionLocation.STRINGS_AND_COMMENTS); if (stringToSearch.length() > 0) { final String stringToReplace = getStringToReplace(element, newName, false, processor); UsageInfoFactory factory = new NonCodeUsageInfoFactory(searchForInComments, stringToReplace); TextOccurrencesUtil.addUsagesInStringsAndComments(searchForInComments, stringToSearch, result, factory); } } if (searchForTextOccurrences && searchForInComments != null) { String stringToSearch = ElementDescriptionUtil.getElementDescription(searchForInComments, NonCodeSearchDescriptionLocation.NON_JAVA); if (stringToSearch.length() > 0) { final String stringToReplace = getStringToReplace(element, newName, true, processor); addTextOccurrence(searchForInComments, result, projectScope, stringToSearch, stringToReplace); } final Pair<String, String> additionalStringToSearch = processor.getTextOccurrenceSearchStrings(searchForInComments, newName); if (additionalStringToSearch != null && additionalStringToSearch.first.length() > 0) { addTextOccurrence(searchForInComments, result, projectScope, additionalStringToSearch.first, additionalStringToSearch.second); } } return result.toArray(new UsageInfo[result.size()]); } private static void addTextOccurrence(final PsiElement element, final List<UsageInfo> result, final GlobalSearchScope projectScope, final String stringToSearch, final String stringToReplace) { UsageInfoFactory factory = new UsageInfoFactory() { @Override public UsageInfo createUsageInfo(@Nonnull PsiElement usage, int startOffset, int endOffset) { TextRange textRange = usage.getTextRange(); int start = textRange == null ? 0 : textRange.getStartOffset(); return NonCodeUsageInfo.create(usage.getContainingFile(), start + startOffset, start + endOffset, element, stringToReplace); } }; TextOccurrencesUtil.addTextOccurences(element, stringToSearch, projectScope, result, factory); } public static void buildPackagePrefixChangedMessage(final VirtualFile[] virtualFiles, StringBuffer message, final String qualifiedName) { if (virtualFiles.length > 0) { message.append(RefactoringBundle.message("package.occurs.in.package.prefixes.of.the.following.source.folders.n", qualifiedName)); for (final VirtualFile virtualFile : virtualFiles) { message.append(virtualFile.getPresentableUrl()).append("\n"); } message.append(RefactoringBundle.message("these.package.prefixes.will.be.changed")); } } private static String getStringToReplace(PsiElement element, String newName, boolean nonJava, final RenamePsiElementProcessor theProcessor) { if (element instanceof PsiMetaOwner) { final PsiMetaOwner psiMetaOwner = (PsiMetaOwner)element; final PsiMetaData metaData = psiMetaOwner.getMetaData(); if (metaData != null) { return metaData.getName(); } } if (theProcessor != null) { String result = theProcessor.getQualifiedNameAfterRename(element, newName, nonJava); if (result != null) { return result; } } if (element instanceof PsiNamedElement) { return newName; } else { LOG.error("Unknown element type"); return null; } } public static void checkRename(PsiElement element, String newName) throws IncorrectOperationException { if (element instanceof PsiCheckedRenameElement) { ((PsiCheckedRenameElement)element).checkSetName(newName); } } public static void doRename(final PsiElement element, String newName, UsageInfo[] usages, final Project project, @Nullable final RefactoringElementListener listener) throws IncorrectOperationException{ final RenamePsiElementProcessor processor = RenamePsiElementProcessor.forElement(element); final String fqn = element instanceof PsiFile ? ((PsiFile)element).getVirtualFile().getPath() : CopyReferenceAction.elementToFqn(element); if (fqn != null) { UndoableAction action = new BasicUndoableAction() { @Override public void undo() throws UnexpectedUndoException { if (listener instanceof UndoRefactoringElementListener) { ((UndoRefactoringElementListener)listener).undoElementMovedOrRenamed(element, fqn); } } @Override public void redo() throws UnexpectedUndoException { } }; UndoManager.getInstance(project).undoableActionPerformed(action); } processor.renameElement(element, newName, usages, listener); } public static void showErrorMessage(final IncorrectOperationException e, final PsiElement element, final Project project) { // may happen if the file or package cannot be renamed. e.g. locked by another application if (ApplicationManager.getApplication().isUnitTestMode()) { throw new RuntimeException(e); //LOG.error(e); //return; } ApplicationManager.getApplication().invokeLater(new Runnable() { @Override public void run() { final String helpID = RenamePsiElementProcessor.forElement(element).getHelpID(element); String message = e.getMessage(); if (StringUtil.isEmpty(message)) { message = RefactoringBundle.message("rename.not.supported"); } CommonRefactoringUtil.showErrorMessage(RefactoringBundle.message("rename.title"), message, helpID, project); } }); } public static void doRenameGenericNamedElement(@Nonnull PsiElement namedElement, String newName, UsageInfo[] usages, @Nullable RefactoringElementListener listener) throws IncorrectOperationException { PsiWritableMetaData writableMetaData = null; if (namedElement instanceof PsiMetaOwner) { final PsiMetaData metaData = ((PsiMetaOwner)namedElement).getMetaData(); if (metaData instanceof PsiWritableMetaData) { writableMetaData = (PsiWritableMetaData)metaData; } } if (writableMetaData == null && !(namedElement instanceof PsiNamedElement)) { LOG.error("Unknown element type:" + namedElement); } boolean hasBindables = false; for (UsageInfo usage : usages) { if (!(usage.getReference() instanceof BindablePsiReference)) { rename(usage, newName); } else { hasBindables = true; } } if (writableMetaData != null) { writableMetaData.setName(newName); } else { PsiElement namedElementAfterRename = ((PsiNamedElement)namedElement).setName(newName); if (namedElementAfterRename != null) namedElement = namedElementAfterRename; } if (hasBindables) { for (UsageInfo usage : usages) { final PsiReference ref = usage.getReference(); if (ref instanceof BindablePsiReference) { try { ref.bindToElement(namedElement); } catch (IncorrectOperationException e) {//fall back to old scheme ref.handleElementRename(newName); } } } } if (listener != null) { listener.elementRenamed(namedElement); } } public static void rename(UsageInfo info, String newName) throws IncorrectOperationException { if (info.getElement() == null) return; PsiReference ref = info.getReference(); if (ref == null) return; ref.handleElementRename(newName); } @Nullable public static List<UnresolvableCollisionUsageInfo> removeConflictUsages(Set<UsageInfo> usages) { final List<UnresolvableCollisionUsageInfo> result = new ArrayList<UnresolvableCollisionUsageInfo>(); for (Iterator<UsageInfo> iterator = usages.iterator(); iterator.hasNext();) { UsageInfo usageInfo = iterator.next(); if (usageInfo instanceof UnresolvableCollisionUsageInfo) { result.add((UnresolvableCollisionUsageInfo)usageInfo); iterator.remove(); } } return result.isEmpty() ? null : result; } public static void addConflictDescriptions(UsageInfo[] usages, MultiMap<PsiElement, String> conflicts) { for (UsageInfo usage : usages) { if (usage instanceof UnresolvableCollisionUsageInfo) { conflicts.putValue(usage.getElement(), ((UnresolvableCollisionUsageInfo)usage).getDescription()); } } } public static void renameNonCodeUsages(@Nonnull Project project, @Nonnull NonCodeUsageInfo[] usages) { PsiDocumentManager.getInstance(project).commitAllDocuments(); Map<Document, List<UsageOffset>> docsToOffsetsMap = new HashMap<Document, List<UsageOffset>>(); final PsiDocumentManager psiDocumentManager = PsiDocumentManager.getInstance(project); for (NonCodeUsageInfo usage : usages) { PsiElement element = usage.getElement(); if (element == null) continue; element = CodeInsightUtilBase.forcePsiPostprocessAndRestoreElement(element); if (element == null) continue; final ProperTextRange rangeInElement = usage.getRangeInElement(); if (rangeInElement == null) continue; final PsiFile containingFile = element.getContainingFile(); final Document document = psiDocumentManager.getDocument(containingFile); final Segment segment = usage.getSegment(); LOG.assertTrue(segment != null); int fileOffset = segment.getStartOffset(); List<UsageOffset> list = docsToOffsetsMap.get(document); if (list == null) { list = new ArrayList<UsageOffset>(); docsToOffsetsMap.put(document, list); } list.add(new UsageOffset(fileOffset, fileOffset + rangeInElement.getLength(), usage.newText)); } for (Document document : docsToOffsetsMap.keySet()) { List<UsageOffset> list = docsToOffsetsMap.get(document); LOG.assertTrue(list != null, document); UsageOffset[] offsets = list.toArray(new UsageOffset[list.size()]); Arrays.sort(offsets); for (int i = offsets.length - 1; i >= 0; i--) { UsageOffset usageOffset = offsets[i]; document.replaceString(usageOffset.startOffset, usageOffset.endOffset, usageOffset.newText); } PsiDocumentManager.getInstance(project).commitDocument(document); } PsiDocumentManager.getInstance(project).commitAllDocuments(); } public static boolean isValidName(final Project project, final PsiElement psiElement, final String newName) { if (newName == null || newName.length() == 0) { return false; } final Condition<String> inputValidator = RenameInputValidatorRegistry.getInputValidator(psiElement); if (inputValidator != null) { return inputValidator.value(newName); } if (psiElement instanceof PsiFile || psiElement instanceof PsiDirectory) { return newName.indexOf('\\') < 0 && newName.indexOf('/') < 0; } if (psiElement instanceof PomTargetPsiElement) { return !StringUtil.isEmptyOrSpaces(newName); } final PsiFile file = psiElement.getContainingFile(); final Language elementLanguage = psiElement.getLanguage(); final Language fileLanguage = file == null ? null : file.getLanguage(); Language language = fileLanguage == null ? elementLanguage : fileLanguage.isKindOf(elementLanguage) ? fileLanguage : elementLanguage; return LanguageNamesValidation.INSTANCE.forLanguage(language).isIdentifier(newName.trim(), project); } private static class UsageOffset implements Comparable<UsageOffset> { final int startOffset; final int endOffset; final String newText; public UsageOffset(int startOffset, int endOffset, String newText) { this.startOffset = startOffset; this.endOffset = endOffset; this.newText = newText; } @Override public int compareTo(final UsageOffset o) { return startOffset - o.startOffset; } } }
/* * Copyright 2014 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp.lint; import com.google.javascript.jscomp.CheckLevel; import com.google.javascript.jscomp.Compiler; import com.google.javascript.jscomp.CompilerOptions; import com.google.javascript.jscomp.CompilerOptions.LanguageMode; import com.google.javascript.jscomp.CompilerPass; import com.google.javascript.jscomp.DiagnosticGroups; import com.google.javascript.jscomp.TypeICompilerTestCase; /** * Test case for {@link CheckNullableReturn}. * */ public final class CheckNullableReturnTest extends TypeICompilerTestCase { private static final String EXTERNS = DEFAULT_EXTERNS + "/** @constructor */ function SomeType() {}"; @Override protected CompilerPass getProcessor(Compiler compiler) { return new CheckNullableReturn(compiler); } @Override protected CompilerOptions getOptions(CompilerOptions options) { super.getOptions(options); options.setWarningLevel(DiagnosticGroups.LINT_CHECKS, CheckLevel.WARNING); return options; } @Override protected int getNumRepetitions() { return 1; } public void testSimpleWarning() { testError(LINE_JOINER.join( "/** @return {SomeType} */", "function f() {", " return new SomeType();", "}")); } public void testNullableReturn() { testBodyOk("return null;"); testBodyOk("if (a) { return null; } return {};"); testBodyOk("switch(1) { case 12: return null; } return {};"); testBodyOk( "/** @return {number} */ function f() { return 42; }; return null;"); } public void testNotNullableReturn() { // Empty function body. Ignore this case. The remainder of the functions in // this test have non-empty bodies. this.mode = TypeInferenceMode.OTI_ONLY; testBodyOk(""); this.mode = TypeInferenceMode.BOTH; // Simple case. testBodyError("return {};"); // This implementation of an abstract method should not be reported. testBodyOk("throw new Error('Not implemented');"); // Nested function. testBodyError( "/** @return {number} */ function f() { return 1; }; return {};"); testBodyError("switch(1) { default: return {}; } return null;"); testBodyError("switch(g) { case 1: return {}; default: return {}; } return null;"); } public void testFinallyStatements() { testBodyOk("try { return null; } finally { return {}; }"); testBodyOk("try { } finally { return null; }"); testBodyOk("try { return {}; } finally { return null; }"); testBodyOk("try { return null; } finally { return {}; }"); this.mode = TypeInferenceMode.OTI_ONLY; testBodyError("try { } catch (e) { return null; } finally { return {}; }"); } public void testKnownConditions() { testBodyOk("if (true) return {}; return null;"); testBodyOk("if (true) return null; else return {};"); testBodyOk("if (false) return {}; return null;"); testBodyOk("if (false) return null; else return {};"); testBodyError("if (1) return {}; return {x: 42};"); testBodyOk("if (1) { return null; } else { return {}; }"); testBodyOk("if (0) return {}; return null;"); testBodyOk("if (0) { return null; } else { return {}; }"); testBodyOk("if (3) return null; else return {};"); } public void testKnownWhileLoop() { testBodyError("while (1) return {}"); testBodyError("while (1) { if (x) { return {}; } else { return {}; }}"); testBodyError("while (0) {} return {}"); // Not known. this.mode = TypeInferenceMode.OTI_ONLY; testBodyError("while(x) { return {}; }"); } public void testTwoBranches() { testError(LINE_JOINER.join( "/** @return {SomeType} */", "function f() {", " if (foo) {", " return new SomeType();", " } else {", " return new SomeType();", " }", "}")); setAcceptedLanguage(LanguageMode.ECMASCRIPT6); testError(LINE_JOINER.join( "var obj = {", " /** @return {SomeType} */", " f() {", " if (foo) {", " return new SomeType();", " } else {", " return new SomeType();", " }", " }", "}")); } public void testTryCatch() { testError(LINE_JOINER.join( "/** @return {SomeType} */", "function f() {", " try {", " return new SomeType();", " } catch (e) {", " return new SomeType();", " }", "}")); setAcceptedLanguage(LanguageMode.ECMASCRIPT6); testError(LINE_JOINER.join( "var obj = {", " /** @return {SomeType} */", " f() {", " try {", " return new SomeType();", " } catch (e) {", " return new SomeType();", " }", " }", "}")); testBodyOk(LINE_JOINER.join( "try {", " if (a) throw '';", "} catch (e) {", " return null;", "}", "return {}")); testBodyOk(LINE_JOINER.join( "try {", " return bar();", "} catch (e) {", "} finally { return baz(); }")); } public void testNoExplicitReturn() { this.mode = TypeInferenceMode.OTI_ONLY; testError(LINE_JOINER.join( "/** @return {SomeType} */", "function f() {", " if (foo) {", " return new SomeType();", " }", "}")); } public void testNoWarningIfCanReturnNull() { testOk(LINE_JOINER.join( "/** @return {SomeType} */", "function f() {", " if (foo) {", " return new SomeType();", " } else {", " return null;", " }", "}")); } public void testNoWarningOnEmptyFunction() { this.mode = TypeInferenceMode.OTI_ONLY; testOk(LINE_JOINER.join( "/** @return {SomeType} */", "function f() {}")); setAcceptedLanguage(LanguageMode.ECMASCRIPT6); testOk(LINE_JOINER.join( "var obj = {", " /** @return {SomeType} */\n", " f() {}", "}")); } public void testNoWarningOnXOrNull() { testOk(LINE_JOINER.join( "/**", " * @param {!Array.<!Object>} arr", " * @return {Object}", " */", "function f4(arr) {", " return arr[0] || null;", "}")); setAcceptedLanguage(LanguageMode.ECMASCRIPT6); testOk(LINE_JOINER.join( "var obj = {", " /**", " * @param {!Array.<!Object>} arr", " * @return {Object}", " */", " f4(arr) {", " return arr[0] || null;", " }", "}")); } public void testNonfunctionTypeDoesntCrash() { enableClosurePass(); test(DEFAULT_EXTERNS, "goog.forwardDeclare('FunType'); /** @type {!FunType} */ (function() { return; })", (String) null, null, null); } private static String createFunction(String body) { return "/** @return {?Object} */ function foo() {" + body + "}"; } private static String createShorthandFunctionInObjLit(String body) { return "var obj = {/** @return {?Object} */ foo() {" + body + "}}"; } private void testOk(String js) { testSame(EXTERNS, js, null); } private void testError(String js) { testSame(EXTERNS, js, CheckNullableReturn.NULLABLE_RETURN_WITH_NAME); } private void testBodyOk(String body) { testOk(createFunction(body)); setAcceptedLanguage(LanguageMode.ECMASCRIPT6); testOk(createShorthandFunctionInObjLit(body)); } private void testBodyError(String body) { testError(createFunction(body)); setAcceptedLanguage(LanguageMode.ECMASCRIPT6); testError(createShorthandFunctionInObjLit(body)); } }
package com.battlelancer.seriesguide.ui; import android.content.Intent; import android.database.Cursor; import android.os.AsyncTask; import android.os.Bundle; import android.os.Handler; import android.support.v4.app.Fragment; import android.support.v4.app.LoaderManager; import android.support.v4.content.CursorLoader; import android.support.v4.content.Loader; import android.support.v4.os.AsyncTaskCompat; import android.text.Spannable; import android.text.SpannableStringBuilder; import android.text.TextUtils; import android.text.format.DateUtils; import android.text.style.TextAppearanceSpan; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.Button; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; import butterknife.BindView; import butterknife.ButterKnife; import butterknife.Unbinder; import com.battlelancer.seriesguide.R; import com.battlelancer.seriesguide.SgApp; import com.battlelancer.seriesguide.api.Action; import com.battlelancer.seriesguide.backend.HexagonTools; import com.battlelancer.seriesguide.enums.EpisodeFlags; import com.battlelancer.seriesguide.extensions.ActionsHelper; import com.battlelancer.seriesguide.extensions.EpisodeActionsContract; import com.battlelancer.seriesguide.extensions.ExtensionManager; import com.battlelancer.seriesguide.loaders.EpisodeActionsLoader; import com.battlelancer.seriesguide.provider.SeriesGuideContract.Episodes; import com.battlelancer.seriesguide.provider.SeriesGuideContract.ListItemTypes; import com.battlelancer.seriesguide.provider.SeriesGuideContract.Seasons; import com.battlelancer.seriesguide.provider.SeriesGuideContract.Shows; import com.battlelancer.seriesguide.provider.SeriesGuideDatabase.Tables; import com.battlelancer.seriesguide.settings.DisplaySettings; import com.battlelancer.seriesguide.settings.TraktCredentials; import com.battlelancer.seriesguide.thetvdbapi.TvdbTools; import com.battlelancer.seriesguide.ui.dialogs.CheckInDialogFragment; import com.battlelancer.seriesguide.ui.dialogs.ManageListsDialogFragment; import com.battlelancer.seriesguide.ui.dialogs.RateDialogFragment; import com.battlelancer.seriesguide.util.EpisodeTools; import com.battlelancer.seriesguide.util.LanguageTools; import com.battlelancer.seriesguide.util.ServiceUtils; import com.battlelancer.seriesguide.util.ShareUtils; import com.battlelancer.seriesguide.util.TextTools; import com.battlelancer.seriesguide.util.TimeTools; import com.battlelancer.seriesguide.util.TraktRatingsTask; import com.battlelancer.seriesguide.util.TraktTools; import com.battlelancer.seriesguide.util.Utils; import com.squareup.picasso.Callback; import com.squareup.picasso.Picasso; import com.uwetrottmann.androidutils.CheatSheet; import java.util.Date; import java.util.List; import java.util.Locale; import org.greenrobot.eventbus.EventBus; import org.greenrobot.eventbus.Subscribe; import org.greenrobot.eventbus.ThreadMode; import timber.log.Timber; /** * Displays details about a single episode like summary, ratings and episode image if available. */ public class EpisodeDetailsFragment extends Fragment implements EpisodeActionsContract { private static final String TAG = "Episode Details"; private static final String KEY_EPISODE_TVDB_ID = "episodeTvdbId"; private Handler mHandler = new Handler(); private TraktRatingsTask mTraktTask; protected int mEpisodeFlag; protected boolean mCollected; protected int mShowTvdbId; protected int mSeasonNumber; protected int mEpisodeNumber; private String mEpisodeTitle; private String mShowTitle; private int mShowRunTime; private long mEpisodeReleaseTime; @BindView(R.id.containerEpisode) View mEpisodeContainer; @BindView(R.id.containerRatings) View mRatingsContainer; @BindView(R.id.containerEpisodeActions) LinearLayout mActionsContainer; @BindView(R.id.containerEpisodeImage) View mImageContainer; @BindView(R.id.imageViewEpisode) ImageView mEpisodeImage; @BindView(R.id.textViewEpisodeTitle) TextView mTitle; @BindView(R.id.textViewEpisodeDescription) TextView mDescription; @BindView(R.id.textViewEpisodeReleaseTime) TextView mReleaseTime; @BindView(R.id.textViewEpisodeReleaseDate) TextView mReleaseDate; @BindView(R.id.textViewEpisodeLastEdit) TextView mLastEdit; @BindView(R.id.labelEpisodeGuestStars) View mLabelGuestStars; @BindView(R.id.textViewEpisodeGuestStars) TextView mGuestStars; @BindView(R.id.textViewEpisodeDirectors) TextView mDirectors; @BindView(R.id.textViewEpisodeWriters) TextView mWriters; @BindView(R.id.labelEpisodeDvd) View mLabelDvd; @BindView(R.id.textViewEpisodeDvd) TextView mDvd; @BindView(R.id.textViewRatingsValue) TextView mTextRating; @BindView(R.id.textViewRatingsVotes) TextView mTextRatingVotes; @BindView(R.id.textViewRatingsUser) TextView mTextUserRating; @BindView(R.id.dividerEpisodeButtons) View dividerEpisodeButtons; @BindView(R.id.buttonEpisodeCheckin) Button buttonCheckin; @BindView(R.id.buttonEpisodeWatched) Button buttonWatch; @BindView(R.id.buttonEpisodeCollected) Button buttonCollect; @BindView(R.id.buttonEpisodeSkip) Button buttonSkip; @BindView(R.id.buttonShowInfoIMDB) View mImdbButton; @BindView(R.id.buttonTVDB) View mTvdbButton; @BindView(R.id.buttonTrakt) View mTraktButton; @BindView(R.id.buttonWebSearch) View mWebSearchButton; @BindView(R.id.buttonShouts) Button mCommentsButton; private Unbinder unbinder; /** * Data which has to be passed when creating this fragment. */ public interface InitBundle { /** * Integer extra. */ String EPISODE_TVDBID = "episode_tvdbid"; /** * Boolean extra. */ String IS_IN_MULTIPANE_LAYOUT = "multipane"; } public static EpisodeDetailsFragment newInstance(int episodeId, boolean isInMultiPaneLayout) { EpisodeDetailsFragment f = new EpisodeDetailsFragment(); // Supply index input as an argument. Bundle args = new Bundle(); args.putInt(InitBundle.EPISODE_TVDBID, episodeId); args.putBoolean(InitBundle.IS_IN_MULTIPANE_LAYOUT, isInMultiPaneLayout); f.setArguments(args); return f; } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View v = inflater.inflate(R.layout.fragment_episode, container, false); unbinder = ButterKnife.bind(this, v); mEpisodeContainer.setVisibility(View.GONE); // comments button Utils.setVectorCompoundDrawable(getActivity().getTheme(), mCommentsButton, R.attr.drawableComments); // web search button unused, is available as extension mWebSearchButton.setVisibility(View.GONE); return v; } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); getLoaderManager().initLoader(EpisodesActivity.EPISODE_LOADER_ID, null, mEpisodeDataLoaderCallbacks); setHasOptionsMenu(true); } @Override public void onResume() { super.onResume(); BaseNavDrawerActivity.ServiceActiveEvent event = EventBus.getDefault() .getStickyEvent(BaseNavDrawerActivity.ServiceActiveEvent.class); setEpisodeButtonsEnabled(event == null); EventBus.getDefault().register(this); loadEpisodeActionsDelayed(); } @Override public void onPause() { super.onPause(); if (mHandler != null) { mHandler.removeCallbacks(mEpisodeActionsRunnable); } EventBus.getDefault().unregister(this); } @Override public void onDestroyView() { super.onDestroyView(); // Always cancel the request here, this is safe to call even if the image has been loaded. // This ensures that the anonymous callback we have does not prevent the fragment from // being garbage collected. It also prevents our callback from getting invoked even after the // fragment is destroyed. Picasso.with(getContext()).cancelRequest(mEpisodeImage); unbinder.unbind(); } @Override public void onDestroy() { if (mTraktTask != null) { mTraktTask.cancel(true); mTraktTask = null; } super.onDestroy(); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { super.onCreateOptionsMenu(menu, inflater); boolean isLightTheme = SeriesGuidePreferences.THEME == R.style.Theme_SeriesGuide_Light; // multi-pane layout has non-transparent action bar, adjust icon color boolean isInMultipane = getArguments().getBoolean(InitBundle.IS_IN_MULTIPANE_LAYOUT); inflater.inflate(isLightTheme && !isInMultipane ? R.menu.episodedetails_menu_light : R.menu.episodedetails_menu, menu); } @Override public boolean onOptionsItemSelected(MenuItem item) { int itemId = item.getItemId(); if (itemId == R.id.menu_share) { shareEpisode(); return true; } else if (itemId == R.id.menu_manage_lists) { ManageListsDialogFragment.showListsDialog(getEpisodeTvdbId(), ListItemTypes.EPISODE, getFragmentManager()); Utils.trackAction(getActivity(), TAG, "Manage lists"); return true; } else if (itemId == R.id.menu_action_episode_calendar) { ShareUtils.suggestCalendarEvent(getActivity(), mShowTitle, TextTools.getNextEpisodeString(getActivity(), mSeasonNumber, mEpisodeNumber, mEpisodeTitle), mEpisodeReleaseTime, mShowRunTime); Utils.trackAction(getActivity(), TAG, "Add to calendar"); return true; } return super.onOptionsItemSelected(item); } private int getEpisodeTvdbId() { return getArguments().getInt(InitBundle.EPISODE_TVDBID); } /** * If episode was watched, flags as unwatched. Otherwise, flags as watched. */ private void onToggleWatched() { changeEpisodeFlag(EpisodeTools.isWatched(mEpisodeFlag) ? EpisodeFlags.UNWATCHED : EpisodeFlags.WATCHED); } /** * If episode was skipped, flags as unwatched. Otherwise, flags as skipped. */ private void onToggleSkipped() { changeEpisodeFlag(EpisodeTools.isSkipped(mEpisodeFlag) ? EpisodeFlags.UNWATCHED : EpisodeFlags.SKIPPED); } private void changeEpisodeFlag(int episodeFlag) { mEpisodeFlag = episodeFlag; EpisodeTools.episodeWatched(SgApp.from(getActivity()), mShowTvdbId, getEpisodeTvdbId(), mSeasonNumber, mEpisodeNumber, episodeFlag); } private void onToggleCollected() { mCollected = !mCollected; EpisodeTools.episodeCollected(SgApp.from(getActivity()), mShowTvdbId, getEpisodeTvdbId(), mSeasonNumber, mEpisodeNumber, mCollected); } @Override @Subscribe(threadMode = ThreadMode.MAIN) public void onEventMainThread(ExtensionManager.EpisodeActionReceivedEvent event) { if (getEpisodeTvdbId() == event.episodeTvdbId) { loadEpisodeActionsDelayed(); } } @Subscribe(threadMode = ThreadMode.MAIN) public void onEventEpisodeTask(BaseNavDrawerActivity.ServiceActiveEvent event) { setEpisodeButtonsEnabled(false); } @Subscribe(threadMode = ThreadMode.MAIN) public void onEventEpisodeTask(BaseNavDrawerActivity.ServiceCompletedEvent event) { setEpisodeButtonsEnabled(true); } private void setEpisodeButtonsEnabled(boolean enabled) { buttonWatch.setEnabled(enabled); buttonCollect.setEnabled(enabled); buttonSkip.setEnabled(enabled); buttonCheckin.setEnabled(enabled); } private LoaderManager.LoaderCallbacks<Cursor> mEpisodeDataLoaderCallbacks = new LoaderManager.LoaderCallbacks<Cursor>() { @Override public Loader<Cursor> onCreateLoader(int id, Bundle args) { return new CursorLoader(getActivity(), Episodes.buildEpisodeWithShowUri(String .valueOf(getEpisodeTvdbId())), DetailsQuery.PROJECTION, null, null, null); } @Override public void onLoadFinished(Loader<Cursor> loader, Cursor data) { if (!isAdded()) { return; } populateEpisodeData(data); } @Override public void onLoaderReset(Loader<Cursor> loader) { // do nothing (we are never holding onto the cursor } }; private void populateEpisodeData(Cursor cursor) { if (cursor == null || !cursor.moveToFirst()) { // no data to display if (mEpisodeContainer != null) { mEpisodeContainer.setVisibility(View.GONE); } return; } mShowTvdbId = cursor.getInt(DetailsQuery.SHOW_ID); mSeasonNumber = cursor.getInt(DetailsQuery.SEASON); mEpisodeNumber = cursor.getInt(DetailsQuery.NUMBER); mShowRunTime = cursor.getInt(DetailsQuery.SHOW_RUNTIME); mEpisodeReleaseTime = cursor.getLong(DetailsQuery.FIRST_RELEASE_MS); // title and description mEpisodeFlag = cursor.getInt(DetailsQuery.WATCHED); mEpisodeTitle = cursor.getString(DetailsQuery.TITLE); boolean hideDetails = EpisodeTools.isUnwatched(mEpisodeFlag) && DisplaySettings.preventSpoilers(getContext()); if (hideDetails) { // just show the episode number "1x02" mTitle.setText(TextTools.getEpisodeNumber(getContext(), mSeasonNumber, mEpisodeNumber)); } else { mTitle.setText(mEpisodeTitle); } String overview = cursor.getString(DetailsQuery.OVERVIEW); if (TextUtils.isEmpty(overview)) { // no description available, show no translation available message mDescription.setText(getString(R.string.no_translation, LanguageTools.getShowLanguageStringFor(getContext(), cursor.getString(DetailsQuery.SHOW_LANGUAGE)), getString(R.string.tvdb))); } else { if (hideDetails) { mDescription.setText(R.string.no_spoilers); } else { mDescription.setText(overview); } } // show title mShowTitle = cursor.getString(DetailsQuery.SHOW_TITLE); // release date, also build release time and day SpannableStringBuilder timeAndNumbersText = new SpannableStringBuilder(); if (mEpisodeReleaseTime != -1) { Date actualRelease = TimeTools.applyUserOffset(getContext(), mEpisodeReleaseTime); mReleaseDate.setText(TimeTools.formatToLocalDateAndDay(getContext(), actualRelease)); String dateTime; if (DisplaySettings.isDisplayExactDate(getContext())) { // "31. October 2010" dateTime = TimeTools.formatToLocalDate(getContext(), actualRelease); } else { // "in 15 mins" dateTime = TimeTools.formatToLocalRelativeTime(getContext(), actualRelease); } // append day: "in 15 mins (Fri)" timeAndNumbersText.append(getString(R.string.release_date_and_day, dateTime, TimeTools.formatToLocalDay(actualRelease)).toUpperCase(Locale.getDefault())); timeAndNumbersText.append(" "); } else { mReleaseDate.setText(R.string.unknown); } // absolute number (e.g. relevant for Anime): "ABSOLUTE 142" int numberStartIndex = timeAndNumbersText.length(); int absoluteNumber = cursor.getInt(DetailsQuery.NUMBER_ABSOLUTE); if (absoluteNumber > 0) { timeAndNumbersText .append(getString(R.string.episode_number_absolute)) .append(" ") .append(String.valueOf(absoluteNumber)); // de-emphasize number timeAndNumbersText.setSpan(new TextAppearanceSpan(getActivity(), R.style.TextAppearance_Caption_Dim), numberStartIndex, timeAndNumbersText.length(), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE ); } mReleaseTime.setText(timeAndNumbersText); // guest stars Utils.setLabelValueOrHide(mLabelGuestStars, mGuestStars, TextTools.splitAndKitTVDBStrings(cursor.getString(DetailsQuery.GUESTSTARS)) ); // DVD episode number Utils.setLabelValueOrHide(mLabelDvd, mDvd, cursor.getDouble(DetailsQuery.NUMBER_DVD)); // directors Utils.setValueOrPlaceholder(mDirectors, TextTools.splitAndKitTVDBStrings(cursor .getString(DetailsQuery.DIRECTORS))); // writers Utils.setValueOrPlaceholder(mWriters, TextTools.splitAndKitTVDBStrings(cursor .getString(DetailsQuery.WRITERS))); // last TVDb edit date long lastEditSeconds = cursor.getLong(DetailsQuery.LAST_EDITED); if (lastEditSeconds > 0) { mLastEdit.setText(DateUtils.formatDateTime(getActivity(), lastEditSeconds * 1000, DateUtils.FORMAT_SHOW_DATE | DateUtils.FORMAT_SHOW_TIME)); } else { mLastEdit.setText(R.string.unknown); } // ratings mRatingsContainer.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { rateEpisode(); } }); CheatSheet.setup(mRatingsContainer, R.string.action_rate); // trakt rating mTextRating.setText( TraktTools.buildRatingString(cursor.getDouble(DetailsQuery.RATING_GLOBAL))); mTextRatingVotes.setText(TraktTools.buildRatingVotesString(getActivity(), cursor.getInt(DetailsQuery.RATING_VOTES))); // user rating mTextUserRating.setText(TraktTools.buildUserRatingString(getActivity(), cursor.getInt(DetailsQuery.RATING_USER))); loadTraktRatings(); // episode image final String imagePath = cursor.getString(DetailsQuery.IMAGE); mImageContainer.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(getActivity(), FullscreenImageActivity.class); intent.putExtra(FullscreenImageActivity.EXTRA_IMAGE, TvdbTools.buildScreenshotUrl(imagePath)); Utils.startActivityWithAnimation(getActivity(), intent, v); } }); loadImage(imagePath, hideDetails); // check in button final int episodeTvdbId = cursor.getInt(DetailsQuery._ID); buttonCheckin.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { // display a check-in dialog CheckInDialogFragment f = CheckInDialogFragment.newInstance(getActivity(), episodeTvdbId); if (f != null && isResumed()) { f.show(getFragmentManager(), "checkin-dialog"); Utils.trackAction(getActivity(), TAG, "Check-In"); } } }); CheatSheet.setup(buttonCheckin); // hide check-in if not connected to trakt or hexagon is enabled boolean isConnectedToTrakt = TraktCredentials.get(getActivity()).hasCredentials(); boolean displayCheckIn = isConnectedToTrakt && !HexagonTools.isSignedIn(getActivity()); buttonCheckin.setVisibility(displayCheckIn ? View.VISIBLE : View.GONE); dividerEpisodeButtons.setVisibility(displayCheckIn ? View.VISIBLE : View.GONE); // watched button boolean isWatched = EpisodeTools.isWatched(mEpisodeFlag); Utils.setCompoundDrawablesRelativeWithIntrinsicBounds(buttonWatch, 0, isWatched ? Utils.resolveAttributeToResourceId(getActivity().getTheme(), R.attr.drawableWatched) : Utils.resolveAttributeToResourceId(getActivity().getTheme(), R.attr.drawableWatch), 0, 0); buttonWatch.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { onToggleWatched(); Utils.trackAction(getActivity(), TAG, "Toggle watched"); } }); buttonWatch.setText(isWatched ? R.string.action_unwatched : R.string.action_watched); CheatSheet.setup(buttonWatch, isWatched ? R.string.action_unwatched : R.string.action_watched); // collected button mCollected = cursor.getInt(DetailsQuery.COLLECTED) == 1; Utils.setCompoundDrawablesRelativeWithIntrinsicBounds(buttonCollect, 0, mCollected ? R.drawable.ic_collected : Utils.resolveAttributeToResourceId(getActivity().getTheme(), R.attr.drawableCollect), 0, 0); buttonCollect.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { onToggleCollected(); Utils.trackAction(getActivity(), TAG, "Toggle collected"); } }); buttonCollect.setText(mCollected ? R.string.action_collection_remove : R.string.action_collection_add); CheatSheet.setup(buttonCollect, mCollected ? R.string.action_collection_remove : R.string.action_collection_add); // skip button boolean isSkipped = EpisodeTools.isSkipped(mEpisodeFlag); if (isWatched) { // if watched do not allow skipping buttonSkip.setVisibility(View.INVISIBLE); } else { buttonSkip.setVisibility(View.VISIBLE); Utils.setCompoundDrawablesRelativeWithIntrinsicBounds(buttonSkip, 0, isSkipped ? R.drawable.ic_skipped : Utils.resolveAttributeToResourceId(getActivity().getTheme(), R.attr.drawableSkip), 0, 0); buttonSkip.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { onToggleSkipped(); Utils.trackAction(getActivity(), TAG, "Toggle skipped"); } }); buttonSkip.setText(isSkipped ? R.string.action_dont_skip : R.string.action_skip); CheatSheet.setup(buttonSkip, isSkipped ? R.string.action_dont_skip : R.string.action_skip); } // service buttons ServiceUtils.setUpTraktEpisodeButton(mTraktButton, getEpisodeTvdbId(), TAG); // IMDb String imdbId = cursor.getString(DetailsQuery.IMDBID); if (TextUtils.isEmpty(imdbId)) { // fall back to show IMDb id imdbId = cursor.getString(DetailsQuery.SHOW_IMDBID); } ServiceUtils.setUpImdbButton(imdbId, mImdbButton, TAG); // TVDb final int seasonTvdbId = cursor.getInt(DetailsQuery.SEASON_ID); ServiceUtils.setUpTvdbButton(mShowTvdbId, seasonTvdbId, getEpisodeTvdbId(), mTvdbButton, TAG); // trakt comments mCommentsButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(getActivity(), TraktCommentsActivity.class); intent.putExtras(TraktCommentsActivity.createInitBundleEpisode(mEpisodeTitle, getEpisodeTvdbId() )); Utils.startActivityWithAnimation(getActivity(), intent, v); Utils.trackAction(v.getContext(), TAG, "Comments"); } }); mEpisodeContainer.setVisibility(View.VISIBLE); } private void loadTraktRatings() { if (mTraktTask == null || mTraktTask.getStatus() == AsyncTask.Status.FINISHED) { mTraktTask = new TraktRatingsTask(SgApp.from(getActivity()), mShowTvdbId, getEpisodeTvdbId(), mSeasonNumber, mEpisodeNumber); AsyncTaskCompat.executeParallel(mTraktTask); } } private void rateEpisode() { RateDialogFragment.displayRateDialog(getActivity(), getFragmentManager(), getEpisodeTvdbId()); Utils.trackAction(getActivity(), TAG, "Rate (trakt)"); } private void shareEpisode() { if (mEpisodeTitle == null || mShowTitle == null) { return; } ShareUtils.shareEpisode(getActivity(), getEpisodeTvdbId(), mSeasonNumber, mEpisodeNumber, mShowTitle, mEpisodeTitle); Utils.trackAction(getActivity(), TAG, "Share"); } private void loadImage(String imagePath, boolean hideDetails) { // immediately hide container if there is no image if (TextUtils.isEmpty(imagePath)) { mImageContainer.setVisibility(View.GONE); return; } if (hideDetails) { // show image placeholder mEpisodeImage.setScaleType(ImageView.ScaleType.CENTER_INSIDE); mEpisodeImage.setImageResource(R.drawable.ic_image_missing); } else { // try loading image mImageContainer.setVisibility(View.VISIBLE); ServiceUtils.loadWithPicasso(getActivity(), TvdbTools.buildScreenshotUrl(imagePath)) .error(R.drawable.ic_image_missing) .into(mEpisodeImage, new Callback() { @Override public void onSuccess() { mEpisodeImage.setScaleType(ImageView.ScaleType.CENTER_CROP); } @Override public void onError() { mEpisodeImage.setScaleType(ImageView.ScaleType.CENTER_INSIDE); } } ); } } private LoaderManager.LoaderCallbacks<List<Action>> mEpisodeActionsLoaderCallbacks = new LoaderManager.LoaderCallbacks<List<Action>>() { @Override public Loader<List<Action>> onCreateLoader(int id, Bundle args) { int episodeTvdbId = args.getInt(KEY_EPISODE_TVDB_ID); return new EpisodeActionsLoader(getActivity(), episodeTvdbId); } @Override public void onLoadFinished(Loader<List<Action>> loader, List<Action> data) { if (!isAdded()) { return; } if (data == null) { Timber.e("onLoadFinished: did not receive valid actions for %s", getEpisodeTvdbId()); } else { Timber.d("onLoadFinished: received %s actions for %s", data.size(), getEpisodeTvdbId()); } ActionsHelper.populateActions(getActivity().getLayoutInflater(), getActivity().getTheme(), mActionsContainer, data, TAG); } @Override public void onLoaderReset(Loader<List<Action>> loader) { // do nothing, we are not holding onto the actions list } }; public void loadEpisodeActions() { Bundle args = new Bundle(); args.putInt(KEY_EPISODE_TVDB_ID, getEpisodeTvdbId()); getLoaderManager().restartLoader(EpisodesActivity.ACTIONS_LOADER_ID, args, mEpisodeActionsLoaderCallbacks); } Runnable mEpisodeActionsRunnable = new Runnable() { @Override public void run() { loadEpisodeActions(); } }; public void loadEpisodeActionsDelayed() { mHandler.removeCallbacks(mEpisodeActionsRunnable); mHandler.postDelayed(mEpisodeActionsRunnable, EpisodeActionsContract.ACTION_LOADER_DELAY_MILLIS); } interface DetailsQuery { String[] PROJECTION = new String[] { Tables.EPISODES + "." + Episodes._ID, Episodes.NUMBER, Episodes.ABSOLUTE_NUMBER, Episodes.DVDNUMBER, Seasons.REF_SEASON_ID, Episodes.SEASON, Episodes.IMDBID, Episodes.TITLE, Episodes.OVERVIEW, Episodes.FIRSTAIREDMS, Episodes.DIRECTORS, Episodes.GUESTSTARS, Episodes.WRITERS, Episodes.IMAGE, Tables.EPISODES + "." + Episodes.RATING_GLOBAL, Episodes.RATING_VOTES, Episodes.RATING_USER, Episodes.WATCHED, Episodes.COLLECTED, Episodes.LAST_EDITED, Shows.REF_SHOW_ID, Shows.IMDBID, Shows.TITLE, Shows.RUNTIME, Shows.LANGUAGE }; int _ID = 0; int NUMBER = 1; int NUMBER_ABSOLUTE = 2; int NUMBER_DVD = 3; int SEASON_ID = 4; int SEASON = 5; int IMDBID = 6; int TITLE = 7; int OVERVIEW = 8; int FIRST_RELEASE_MS = 9; int DIRECTORS = 10; int GUESTSTARS = 11; int WRITERS = 12; int IMAGE = 13; int RATING_GLOBAL = 14; int RATING_VOTES = 15; int RATING_USER = 16; int WATCHED = 17; int COLLECTED = 18; int LAST_EDITED = 19; int SHOW_ID = 20; int SHOW_IMDBID = 21; int SHOW_TITLE = 22; int SHOW_RUNTIME = 23; int SHOW_LANGUAGE = 24; } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.jps.incremental.java; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.io.FileFilters; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.util.ExceptionUtil; import com.intellij.util.SystemProperties; import com.intellij.util.concurrency.SequentialTaskExecutor; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.JBIterable; import com.intellij.util.execution.ParametersListUtil; import com.intellij.util.io.PersistentEnumeratorBase; import gnu.trove.THashMap; import gnu.trove.THashSet; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.jps.ModuleChunk; import org.jetbrains.jps.PathUtils; import org.jetbrains.jps.ProjectPaths; import org.jetbrains.jps.api.GlobalOptions; import org.jetbrains.jps.builders.BuildRootIndex; import org.jetbrains.jps.builders.DirtyFilesHolder; import org.jetbrains.jps.builders.FileProcessor; import org.jetbrains.jps.builders.impl.DirtyFilesHolderBase; import org.jetbrains.jps.builders.java.JavaBuilderExtension; import org.jetbrains.jps.builders.java.JavaBuilderUtil; import org.jetbrains.jps.builders.java.JavaCompilingTool; import org.jetbrains.jps.builders.java.JavaSourceRootDescriptor; import org.jetbrains.jps.builders.logging.ProjectBuilderLogger; import org.jetbrains.jps.builders.storage.BuildDataCorruptedException; import org.jetbrains.jps.cmdline.ProjectDescriptor; import org.jetbrains.jps.incremental.*; import org.jetbrains.jps.incremental.messages.BuildMessage; import org.jetbrains.jps.incremental.messages.CompilerMessage; import org.jetbrains.jps.incremental.messages.ProgressMessage; import org.jetbrains.jps.javac.*; import org.jetbrains.jps.model.JpsDummyElement; import org.jetbrains.jps.model.JpsProject; import org.jetbrains.jps.model.java.JavaModuleIndex; import org.jetbrains.jps.model.java.JpsJavaExtensionService; import org.jetbrains.jps.model.java.JpsJavaSdkType; import org.jetbrains.jps.model.java.LanguageLevel; import org.jetbrains.jps.model.java.compiler.*; import org.jetbrains.jps.model.library.sdk.JpsSdk; import org.jetbrains.jps.model.module.JpsModule; import org.jetbrains.jps.model.module.JpsModuleType; import org.jetbrains.jps.model.serialization.JpsModelSerializationDataService; import org.jetbrains.jps.model.serialization.PathMacroUtil; import org.jetbrains.jps.service.JpsServiceManager; import org.jetbrains.jps.service.SharedThreadPool; import javax.tools.*; import java.io.File; import java.io.FileFilter; import java.io.IOException; import java.net.ServerSocket; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Executor; import java.util.concurrent.Future; import java.util.stream.Collectors; /** * @author Eugene Zhuravlev * @since 21.09.2011 */ public class JavaBuilder extends ModuleLevelBuilder { private static final Logger LOG = Logger.getInstance("#org.jetbrains.jps.incremental.java.JavaBuilder"); private static final String JAVA_EXTENSION = "java"; public static final String BUILDER_NAME = "java"; public static final Key<Boolean> IS_ENABLED = Key.create("_java_compiler_enabled_"); public static final FileFilter JAVA_SOURCES_FILTER = FileFilters.withExtension(JAVA_EXTENSION); private static final Key<Integer> JAVA_COMPILER_VERSION_KEY = GlobalContextKey.create("_java_compiler_version_"); private static final Key<Boolean> PREFER_TARGET_JDK_COMPILER = GlobalContextKey.create("_prefer_target_jdk_javac_"); private static final Key<JavaCompilingTool> COMPILING_TOOL = Key.create("_java_compiling_tool_"); private static final Key<ConcurrentMap<String, Collection<String>>> COMPILER_USAGE_STATISTICS = Key.create("_java_compiler_usage_stats_"); private static final List<String> COMPILABLE_EXTENSIONS = Collections.singletonList(JAVA_EXTENSION); private static final Set<String> FILTERED_OPTIONS = ContainerUtil.newHashSet( "-target"); private static final Set<String> FILTERED_SINGLE_OPTIONS = ContainerUtil.newHashSet( "-g", "-deprecation", "-nowarn", "-verbose", "-proc:none", "-proc:only", "-proceedOnError"); private static final List<ClassPostProcessor> ourClassProcessors = new ArrayList<>(); private static final Set<JpsModuleType<?>> ourCompilableModuleTypes = new HashSet<>(); private static final @Nullable File ourDefaultRtJar; static { for (JavaBuilderExtension extension : JpsServiceManager.getInstance().getExtensions(JavaBuilderExtension.class)) { ourCompilableModuleTypes.addAll(extension.getCompilableModuleTypes()); } File rtJar = null; StringTokenizer tokenizer = new StringTokenizer(System.getProperty("sun.boot.class.path", ""), File.pathSeparator, false); while (tokenizer.hasMoreTokens()) { File file = new File(tokenizer.nextToken()); if ("rt.jar".equals(file.getName())) { rtJar = file; break; } } ourDefaultRtJar = rtJar; } public static void registerClassPostProcessor(ClassPostProcessor processor) { ourClassProcessors.add(processor); } private final Executor myTaskRunner; public JavaBuilder(Executor tasksExecutor) { super(BuilderCategory.TRANSLATOR); myTaskRunner = new SequentialTaskExecutor("JavaBuilder pool", tasksExecutor); //add here class processors in the sequence they should be executed } @Override @NotNull public String getPresentableName() { return BUILDER_NAME; } @Override public void buildStarted(CompileContext context) { final String compilerId = getUsedCompilerId(context); if (LOG.isDebugEnabled()) { LOG.debug("Java compiler ID: " + compilerId); } JavaCompilingTool compilingTool = JavaBuilderUtil.findCompilingTool(compilerId); COMPILING_TOOL.set(context, compilingTool); COMPILER_USAGE_STATISTICS.set(context, new ConcurrentHashMap<>()); } @Override public void chunkBuildStarted(final CompileContext context, final ModuleChunk chunk) { // before the first compilation round starts: find and mark dirty all classes that depend on removed or moved classes so // that all such files are compiled in the first round. try { JavaBuilderUtil.markDirtyDependenciesForInitialRound(context, new DirtyFilesHolderBase<JavaSourceRootDescriptor, ModuleBuildTarget>(context) { @Override public void processDirtyFiles(@NotNull FileProcessor<JavaSourceRootDescriptor, ModuleBuildTarget> processor) throws IOException { FSOperations.processFilesToRecompile(context, chunk, processor); } }, chunk); } catch (IOException e) { throw new RuntimeException(e); } } public void buildFinished(CompileContext context) { final ConcurrentMap<String, Collection<String>> stats = COMPILER_USAGE_STATISTICS.get(context); if (stats.size() == 1) { final Map.Entry<String, Collection<String>> entry = stats.entrySet().iterator().next(); final String compilerName = entry.getKey(); context.processMessage(new CompilerMessage("", BuildMessage.Kind.JPS_INFO, compilerName + " was used to compile java sources")); LOG.info(compilerName + " was used to compile " + entry.getValue()); } else { for (Map.Entry<String, Collection<String>> entry : stats.entrySet()) { final String compilerName = entry.getKey(); final Collection<String> moduleNames = entry.getValue(); context.processMessage(new CompilerMessage("", BuildMessage.Kind.JPS_INFO, moduleNames.size() == 1 ? compilerName + " was used to compile [" + moduleNames.iterator().next() + "]" : compilerName + " was used to compile " + moduleNames.size() + " modules" )); LOG.info(compilerName + " was used to compile " + moduleNames); } } } @Override public List<String> getCompilableFileExtensions() { return COMPILABLE_EXTENSIONS; } @Override public ExitCode build(@NotNull CompileContext context, @NotNull ModuleChunk chunk, @NotNull DirtyFilesHolder<JavaSourceRootDescriptor, ModuleBuildTarget> dirtyFilesHolder, @NotNull OutputConsumer outputConsumer) throws ProjectBuildException, IOException { JavaCompilingTool compilingTool = COMPILING_TOOL.get(context); if (!IS_ENABLED.get(context, Boolean.TRUE) || compilingTool == null) { return ExitCode.NOTHING_DONE; } return doBuild(context, chunk, dirtyFilesHolder, outputConsumer, compilingTool); } public ExitCode doBuild(@NotNull CompileContext context, @NotNull ModuleChunk chunk, @NotNull DirtyFilesHolder<JavaSourceRootDescriptor, ModuleBuildTarget> dirtyFilesHolder, @NotNull OutputConsumer outputConsumer, @NotNull JavaCompilingTool compilingTool) throws ProjectBuildException, IOException { try { final Set<File> filesToCompile = new THashSet<>(FileUtil.FILE_HASHING_STRATEGY); dirtyFilesHolder.processDirtyFiles((target, file, descriptor) -> { if (JAVA_SOURCES_FILTER.accept(file) && ourCompilableModuleTypes.contains(target.getModule().getModuleType())) { filesToCompile.add(file); } return true; }); int javaModulesCount = 0; if ((!filesToCompile.isEmpty() || dirtyFilesHolder.hasRemovedFiles()) && JpsJavaSdkType.parseVersion(getLanguageLevel(ContainerUtil.getFirstItem(chunk.getModules()))) >= 9) { // at the moment, there is no incremental compilation for module-info files, so they should be rebuilt on every change JavaModuleIndex index = getJavaModuleIndex(context); for (ModuleBuildTarget target : chunk.getTargets()) { File moduleInfoFile = index.getModuleInfoFile(target.getModule(), target.isTests()); if (moduleInfoFile != null) { filesToCompile.add(moduleInfoFile); javaModulesCount++; } } } if (JavaBuilderUtil.isCompileJavaIncrementally(context)) { ProjectBuilderLogger logger = context.getLoggingManager().getProjectBuilderLogger(); if (logger.isEnabled() && !filesToCompile.isEmpty()) { logger.logCompiledFiles(filesToCompile, BUILDER_NAME, "Compiling files:"); } } if (javaModulesCount > 1) { String prefix = "Cannot compile a module cycle with multiple module-info.java files: "; String message = chunk.getModules().stream().map(JpsModule::getName).collect(Collectors.joining(", ", prefix, "")); context.processMessage(new CompilerMessage(BUILDER_NAME, BuildMessage.Kind.ERROR, message)); return ExitCode.ABORT; } return compile(context, chunk, dirtyFilesHolder, filesToCompile, outputConsumer, compilingTool, javaModulesCount > 0); } catch (BuildDataCorruptedException | PersistentEnumeratorBase.CorruptedException | ProjectBuildException e) { throw e; } catch (Exception e) { LOG.info(e); String message = e.getMessage(); if (message == null) message = "Internal error: \n" + ExceptionUtil.getThrowableText(e); context.processMessage(new CompilerMessage(BUILDER_NAME, BuildMessage.Kind.ERROR, message)); throw new StopBuildException(); } } private ExitCode compile(CompileContext context, ModuleChunk chunk, DirtyFilesHolder<JavaSourceRootDescriptor, ModuleBuildTarget> dirtyFilesHolder, Collection<File> files, OutputConsumer outputConsumer, JavaCompilingTool compilingTool, boolean hasModules) throws Exception { ExitCode exitCode = ExitCode.NOTHING_DONE; final boolean hasSourcesToCompile = !files.isEmpty(); if (!hasSourcesToCompile && !dirtyFilesHolder.hasRemovedFiles()) { return exitCode; } final ProjectDescriptor pd = context.getProjectDescriptor(); JavaBuilderUtil.ensureModuleHasJdk(chunk.representativeTarget().getModule(), context, BUILDER_NAME); final Collection<File> classpath = ProjectPaths.getCompilationClasspath(chunk, false); final Collection<File> platformCp = ProjectPaths.getPlatformCompilationClasspath(chunk, false); // begin compilation round final OutputFilesSink outputSink = new OutputFilesSink(context, outputConsumer, JavaBuilderUtil.getDependenciesRegistrar(context), chunk.getPresentableShortName()); Collection<File> filesWithErrors = null; try { if (hasSourcesToCompile) { exitCode = ExitCode.OK; final Set<File> srcPath = new HashSet<>(); final BuildRootIndex index = pd.getBuildRootIndex(); for (ModuleBuildTarget target : chunk.getTargets()) { for (JavaSourceRootDescriptor rd : index.getTempTargetRoots(target, context)) { srcPath.add(rd.root); } } final DiagnosticSink diagnosticSink = new DiagnosticSink(context); final String chunkName = chunk.getName(); context.processMessage(new ProgressMessage("Parsing java... [" + chunk.getPresentableShortName() + "]")); final int filesCount = files.size(); boolean compiledOk = true; if (filesCount > 0) { LOG.info("Compiling " + filesCount + " java files; module: " + chunkName + (chunk.containsTests() ? " (tests)" : "")); if (LOG.isDebugEnabled()) { for (File file : files) { LOG.debug("Compiling " + file.getPath()); } LOG.debug(" classpath for " + chunkName + ":"); for (File file : classpath) { LOG.debug(" " + file.getAbsolutePath()); } LOG.debug(" platform classpath for " + chunkName + ":"); for (File file : platformCp) { LOG.debug(" " + file.getAbsolutePath()); } } try { compiledOk = compileJava(context, chunk, files, classpath, platformCp, srcPath, diagnosticSink, outputSink, compilingTool, hasModules); } finally { // heuristic: incorrect paths data recovery, so that the next make should not contain non-existing sources in 'recompile' list filesWithErrors = diagnosticSink.getFilesWithErrors(); for (File file : filesWithErrors) { if (!file.exists()) { FSOperations.markDeleted(context, file); } } } } context.checkCanceled(); if (!compiledOk && diagnosticSink.getErrorCount() == 0) { diagnosticSink.report(new PlainMessageDiagnostic(Diagnostic.Kind.ERROR, "Compilation failed: internal java compiler error")); } if (!Utils.PROCEED_ON_ERROR_KEY.get(context, Boolean.FALSE) && diagnosticSink.getErrorCount() > 0) { if (!compiledOk) { diagnosticSink.report(new JpsInfoDiagnostic("Errors occurred while compiling module '" + chunkName + "'")); } throw new StopBuildException( "Compilation failed: errors: " + diagnosticSink.getErrorCount() + "; warnings: " + diagnosticSink.getWarningCount() ); } } } finally { JavaBuilderUtil.registerFilesToCompile(context, files); if (filesWithErrors != null) { JavaBuilderUtil.registerFilesWithErrors(context, filesWithErrors); } JavaBuilderUtil.registerSuccessfullyCompiled(context, outputSink.getSuccessfullyCompiled()); } return exitCode; } private boolean compileJava(CompileContext context, ModuleChunk chunk, Collection<File> files, Collection<File> originalClassPath, Collection<File> originalPlatformCp, Collection<File> sourcePath, DiagnosticOutputConsumer diagnosticSink, OutputFileConsumer outputSink, JavaCompilingTool compilingTool, boolean hasModules) throws Exception { final TasksCounter counter = new TasksCounter(); COUNTER_KEY.set(context, counter); final JpsJavaExtensionService javaExt = JpsJavaExtensionService.getInstance(); final JpsJavaCompilerConfiguration compilerConfig = javaExt.getCompilerConfiguration(context.getProjectDescriptor().getProject()); assert compilerConfig != null; final Set<JpsModule> modules = chunk.getModules(); ProcessorConfigProfile profile = null; if (modules.size() == 1) { profile = compilerConfig.getAnnotationProcessingProfile(modules.iterator().next()); } else { String message = validateCycle(chunk, javaExt, compilerConfig, modules); if (message != null) { diagnosticSink.report(new PlainMessageDiagnostic(Diagnostic.Kind.ERROR, message)); return true; } } final Map<File, Set<File>> outs = buildOutputDirectoriesMap(context, chunk); try { final int targetLanguageLevel = JpsJavaSdkType.parseVersion(getLanguageLevel(chunk.getModules().iterator().next())); final boolean shouldForkJavac = shouldForkCompilerProcess(context, chunk, targetLanguageLevel); // when forking external javac, compilers from SDK 1.6 and higher are supported Pair<String, Integer> forkSdk = null; if (shouldForkJavac) { forkSdk = getForkedJavacSdk(chunk, targetLanguageLevel); if (forkSdk == null) { String text = "Cannot start javac process for " + chunk.getName() + ": unknown JDK home path.\nPlease check project configuration."; diagnosticSink.report(new PlainMessageDiagnostic(Diagnostic.Kind.ERROR, text)); return true; } } final int compilerSdkVersion = forkSdk == null? getCompilerSdkVersion(context) : forkSdk.getSecond(); final List<String> options = getCompilationOptions(compilerSdkVersion, context, chunk, profile, compilingTool); if (LOG.isDebugEnabled()) { String mode = shouldForkJavac ? "fork" : "in-process"; LOG.debug("Compiling chunk [" + chunk.getName() + "] with options: \"" + StringUtil.join(options, " ") + "\", mode=" + mode); } Collection<File> platformCp = calcEffectivePlatformCp(originalPlatformCp, options, compilingTool); if (platformCp == null) { String text = "Compact compilation profile was requested, but target platform for module \"" + chunk.getName() + "\"" + " differs from javac's platform (" + System.getProperty("java.version") + ")\n" + "Compilation profiles are not supported for such configuration"; context.processMessage(new CompilerMessage(BUILDER_NAME, BuildMessage.Kind.ERROR, text)); return true; } Collection<File> classPath = originalClassPath; Collection<File> modulePath = Collections.emptyList(); if (hasModules) { // in Java 9, named modules are not allowed to read classes from the classpath // moreover, the compiler requires all transitive dependencies to be on the module path modulePath = ProjectPaths.getCompilationModulePath(chunk, false); classPath = Collections.emptyList(); } if (!platformCp.isEmpty()) { final int chunkSdkVersion; if (hasModules) { modulePath = JBIterable.from(platformCp).append(modulePath).toList(); platformCp = Collections.emptyList(); } else if ((chunkSdkVersion = getChunkSdkVersion(chunk)) >= 9) { // if chunk's SDK is 9 or higher, there is no way to specify full platform classpath // because platform classes are stored in jimage binary files with unknown format. // Because of this we are clearing platform classpath so that javac will resolve against its own boot classpath // and prepending additional jars from the JDK configuration to compilation classpath classPath = JBIterable.from(platformCp).append(classPath).toList(); platformCp = Collections.emptyList(); } else if (shouldUseReleaseOption(context, compilerSdkVersion, chunkSdkVersion, targetLanguageLevel)) { final Collection<File> joined = new ArrayList<>(classPath.size() + 1); for (File file : platformCp) { // platform runtime classes will be handled by -release option // include only additional jars from sdk distribution, e.g. tools.jar if (!FileUtil.toSystemIndependentName(file.getAbsolutePath()).contains("/jre/")) { joined.add(file); } } joined.addAll(classPath); classPath = joined; platformCp = Collections.emptyList(); } } final ClassProcessingConsumer classesConsumer = new ClassProcessingConsumer(context, outputSink); final boolean rc; if (!shouldForkJavac) { updateCompilerUsageStatistics(context, compilingTool.getDescription(), chunk); rc = JavacMain.compile( options, files, classPath, platformCp, modulePath, sourcePath, outs, diagnosticSink, classesConsumer, context.getCancelStatus(), compilingTool ); } else { updateCompilerUsageStatistics(context, "javac " + forkSdk.getSecond(), chunk); final List<String> vmOptions = getCompilationVMOptions(context, compilingTool); final ExternalJavacManager server = ensureJavacServerStarted(context); rc = server.forkJavac( forkSdk.getFirst(), getExternalJavacHeapSize(), vmOptions, options, platformCp, classPath, modulePath, sourcePath, files, outs, diagnosticSink, classesConsumer, compilingTool, context.getCancelStatus() ); } return rc; } finally { counter.await(); } } private static void updateCompilerUsageStatistics(CompileContext context, String compilerName, ModuleChunk chunk) { final ConcurrentMap<String, Collection<String>> map = COMPILER_USAGE_STATISTICS.get(context); Collection<String> names = map.get(compilerName); if (names == null) { names = Collections.synchronizedSet(new HashSet<String>()); final Collection<String> prev = map.putIfAbsent(compilerName, names); if (prev != null) { names = prev; } } for (JpsModule module : chunk.getModules()) { names.add(module.getName()); } } private static int getExternalJavacHeapSize() { //final JpsProject project = context.getProjectDescriptor().getProject(); //final JpsJavaCompilerConfiguration config = JpsJavaExtensionService.getInstance().getOrCreateCompilerConfiguration(project); //final JpsJavaCompilerOptions options = config.getCurrentCompilerOptions(); //return options.MAXIMUM_HEAP_SIZE; final int maxMbytes = (int)(Runtime.getRuntime().maxMemory() / 1048576L); if (maxMbytes < 0) { return -1; // in case of int overflow, return -1 to let VM choose the heap size } return Math.max(maxMbytes * 75 / 100, 256); // minimum 256 Mb, maximum 75% from JPS max heap size } @Nullable public static String validateCycle(ModuleChunk chunk, JpsJavaExtensionService javaExt, JpsJavaCompilerConfiguration compilerConfig, Set<JpsModule> modules) { Pair<String, LanguageLevel> pair = null; for (JpsModule module : modules) { final LanguageLevel moduleLevel = javaExt.getLanguageLevel(module); if (pair == null) { pair = Pair.create(module.getName(), moduleLevel); // first value } else if (!Comparing.equal(pair.getSecond(), moduleLevel)) { return "Modules " + pair.getFirst() + " and " + module.getName() + " must have the same language level because of cyclic dependencies between them"; } } // check that all chunk modules are excluded from annotation processing for (JpsModule module : modules) { final ProcessorConfigProfile prof = compilerConfig.getAnnotationProcessingProfile(module); if (prof.isEnabled()) { return "Annotation processing is not supported for module cycles. Please ensure that all modules from cycle [" + chunk.getName() + "] are excluded from annotation processing"; } } return null; } private static boolean shouldUseReleaseOption(CompileContext context, int compilerVersion, int chunkSdkVersion, int targetLanguageLevel) { // -release option makes sense for javac only and is supported in java9+ and higher if (compilerVersion >= 9 && chunkSdkVersion > 0 && targetLanguageLevel > 0 && isJavac(COMPILING_TOOL.get(context))) { if (chunkSdkVersion < 9) { // target sdk is set explicitly and differs from compiler SDK, so for consistency we should link against it return false; } // chunkSdkVersion >= 9, so we have no rt.jar anymore and '-release' is the only cross-compilation option available return true; } return false; } private static boolean shouldForkCompilerProcess(CompileContext context, ModuleChunk chunk, int chunkLanguageLevel) { if (!isJavac(COMPILING_TOOL.get(context))) { return false; // applicable to javac only } final int compilerSdkVersion = getCompilerSdkVersion(context); if (preferTargetJdkCompiler(context)) { final Pair<JpsSdk<JpsDummyElement>, Integer> sdkVersionPair = getAssociatedSdk(chunk); if (sdkVersionPair != null) { final Integer chunkSdkVersion = sdkVersionPair.second; if (chunkSdkVersion != compilerSdkVersion && chunkSdkVersion >= 6 /*min. supported compiler version*/) { // there is a special case because of difference in type inference behavior between javac8 and javac6-javac7 // so if corresponding JDK is associated with the module chunk, prefer compiler from this JDK over the newer compiler version return true; } } } if (compilerSdkVersion < 9 || chunkLanguageLevel <= 0) { // javac up to version 9 supports all previous releases // or // was not able to determine jdk version, so assuming in-process compiler return false; } // compilerSdkVersion is 9+ here, so applying JEP 182 "Retiring javac 'one plus three back'" policy return Math.abs(compilerSdkVersion - chunkLanguageLevel) > 3; } private static boolean isJavac(final JavaCompilingTool compilingTool) { return compilingTool != null && (compilingTool.getId() == JavaCompilers.JAVAC_ID || compilingTool.getId() == JavaCompilers.JAVAC_API_ID); } private static boolean preferTargetJdkCompiler(CompileContext context) { Boolean val = PREFER_TARGET_JDK_COMPILER.get(context); if (val == null) { final JpsProject project = context.getProjectDescriptor().getProject(); final JpsJavaCompilerConfiguration config = JpsJavaExtensionService.getInstance().getCompilerConfiguration(project); // default val = config != null? config.getCompilerOptions(JavaCompilers.JAVAC_ID).PREFER_TARGET_JDK_COMPILER : Boolean.TRUE; PREFER_TARGET_JDK_COMPILER.set(context, val); } return val; } // If platformCp of the build process is the same as the target platform, do not specify platformCp explicitly // this will allow javac to resolve against ct.sym file, which is required for the "compilation profiles" feature @Nullable private static Collection<File> calcEffectivePlatformCp(Collection<File> platformCp, List<String> options, JavaCompilingTool compilingTool) { if (ourDefaultRtJar == null || !isJavac(compilingTool)) { return platformCp; } boolean profileFeatureRequested = false; for (String option : options) { if ("-profile".equalsIgnoreCase(option)) { profileFeatureRequested = true; break; } } if (!profileFeatureRequested) { return platformCp; } boolean isTargetPlatformSameAsBuildRuntime = false; for (File file : platformCp) { if (FileUtil.filesEqual(file, ourDefaultRtJar)) { isTargetPlatformSameAsBuildRuntime = true; break; } } if (!isTargetPlatformSameAsBuildRuntime) { // compact profile was requested, but we have to use alternative platform classpath to meet project settings // consider this a compile error and let user re-configure the project return null; } // returning empty list will force default behaviour for platform classpath calculation // javac will resolve against its own bootclasspath and use ct.sym file when available return Collections.emptyList(); } private void submitAsyncTask(final CompileContext context, final Runnable taskRunnable) { final TasksCounter counter = COUNTER_KEY.get(context); assert counter != null; counter.incTaskCount(); myTaskRunner.execute(() -> { try { taskRunnable.run(); } catch (Throwable e) { context.processMessage(new CompilerMessage(BUILDER_NAME, e)); } finally { counter.decTaskCounter(); } }); } private static synchronized ExternalJavacManager ensureJavacServerStarted(@NotNull CompileContext context) throws Exception { ExternalJavacManager server = ExternalJavacManager.KEY.get(context); if (server != null) { return server; } final int listenPort = findFreePort(); server = new ExternalJavacManager(Utils.getSystemRoot()) { @Override protected ExternalJavacProcessHandler createProcessHandler(@NotNull Process process, @NotNull String commandLine) { return new ExternalJavacProcessHandler(process, commandLine) { @Override @NotNull protected Future<?> executeOnPooledThread(@NotNull Runnable task) { return SharedThreadPool.getInstance().executeOnPooledThread(task); } }; } }; server.start(listenPort); ExternalJavacManager.KEY.set(context, server); return server; } private static int findFreePort() { try { final ServerSocket serverSocket = new ServerSocket(0); try { return serverSocket.getLocalPort(); } finally { //workaround for linux : calling close() immediately after opening socket //may result that socket is not closed synchronized (serverSocket) { try { serverSocket.wait(1); } catch (Throwable ignored) { } } serverSocket.close(); } } catch (IOException e) { e.printStackTrace(System.err); return ExternalJavacManager.DEFAULT_SERVER_PORT; } } private static final Key<List<String>> JAVAC_OPTIONS = Key.create("_javac_options_"); private static final Key<List<String>> JAVAC_VM_OPTIONS = Key.create("_javac_vm_options_"); private static final Key<String> USER_DEFINED_BYTECODE_TARGET = Key.create("_user_defined_bytecode_target_"); private static List<String> getCompilationVMOptions(CompileContext context, JavaCompilingTool compilingTool) { List<String> cached = JAVAC_VM_OPTIONS.get(context); if (cached == null) { loadCommonJavacOptions(context, compilingTool); cached = JAVAC_VM_OPTIONS.get(context); } return cached; } private static List<String> getCompilationOptions(int compilerSdkVersion, CompileContext context, ModuleChunk chunk, @Nullable ProcessorConfigProfile profile, @NotNull JavaCompilingTool compilingTool) { List<String> cached = JAVAC_OPTIONS.get(context); if (cached == null) { loadCommonJavacOptions(context, compilingTool); cached = JAVAC_OPTIONS.get(context); assert cached != null : context; } List<String> options = new ArrayList<>(); JpsModule module = chunk.representativeTarget().getModule(); File baseDirectory = JpsModelSerializationDataService.getBaseDirectory(module); if (baseDirectory != null) { //this is a temporary workaround to allow passing per-module compiler options for Eclipse compiler in form // -properties $MODULE_DIR$/.settings/org.eclipse.jdt.core.prefs String stringToReplace = "$" + PathMacroUtil.MODULE_DIR_MACRO_NAME + "$"; String moduleDirPath = FileUtil.toCanonicalPath(baseDirectory.getAbsolutePath()); for (String s : cached) { options.add(StringUtil.replace(s, stringToReplace, moduleDirPath)); } } else { options.addAll(cached); } addCompilationOptions(compilerSdkVersion, options, context, chunk, profile); return options; } public static void addCompilationOptions(List<String> options, CompileContext context, ModuleChunk chunk, @Nullable ProcessorConfigProfile profile) { addCompilationOptions(getCompilerSdkVersion(context), options, context, chunk, profile); } public static void addCompilationOptions(int compilerSdkVersion, List<String> options, CompileContext context, ModuleChunk chunk, @Nullable ProcessorConfigProfile profile) { if (!isEncodingSet(options)) { final CompilerEncodingConfiguration config = context.getProjectDescriptor().getEncodingConfiguration(); final String encoding = config.getPreferredModuleChunkEncoding(chunk); if (config.getAllModuleChunkEncodings(chunk).size() > 1) { final StringBuilder msgBuilder = new StringBuilder(); msgBuilder.append("Multiple encodings set for module chunk ").append(chunk.getName()); if (encoding != null) { msgBuilder.append("\n\"").append(encoding).append("\" will be used by compiler"); } context.processMessage(new CompilerMessage(BUILDER_NAME, BuildMessage.Kind.INFO, msgBuilder.toString())); } if (!StringUtil.isEmpty(encoding)) { options.add("-encoding"); options.add(encoding); } } addCrossCompilationOptions(compilerSdkVersion, options, context, chunk); if (addAnnotationProcessingOptions(options, profile)) { final File srcOutput = ProjectPaths.getAnnotationProcessorGeneratedSourcesOutputDir( chunk.getModules().iterator().next(), chunk.containsTests(), profile ); if (srcOutput != null) { FileUtil.createDirectory(srcOutput); options.add("-s"); options.add(srcOutput.getPath()); } } } /** * @param options * @param profile * @return true if annotation processing is enabled and corresponding options were added, false if profile is null or disabled */ public static boolean addAnnotationProcessingOptions(List<String> options, @Nullable AnnotationProcessingConfiguration profile) { if (profile == null || !profile.isEnabled()) { options.add("-proc:none"); return false; } // configuring annotation processing if (!profile.isObtainProcessorsFromClasspath()) { final String processorsPath = profile.getProcessorPath(); options.add("-processorpath"); options.add(FileUtil.toSystemDependentName(processorsPath.trim())); } final Set<String> processors = profile.getProcessors(); if (!processors.isEmpty()) { options.add("-processor"); options.add(StringUtil.join(processors, ",")); } for (Map.Entry<String, String> optionEntry : profile.getProcessorOptions().entrySet()) { options.add("-A" + optionEntry.getKey() + "=" + optionEntry.getValue()); } return true; } @NotNull public static String getUsedCompilerId(CompileContext context) { final JpsProject project = context.getProjectDescriptor().getProject(); final JpsJavaCompilerConfiguration config = JpsJavaExtensionService.getInstance().getCompilerConfiguration(project); return config == null ? JavaCompilers.JAVAC_ID : config.getJavaCompilerId(); } private static void addCrossCompilationOptions(int compilerSdkVersion, List<String> options, CompileContext context, ModuleChunk chunk) { final JpsJavaCompilerConfiguration compilerConfiguration = JpsJavaExtensionService.getInstance().getOrCreateCompilerConfiguration( context.getProjectDescriptor().getProject() ); final String langLevel = getLanguageLevel(chunk.getModules().iterator().next()); final int chunkSdkVersion = getChunkSdkVersion(chunk); final int targetLanguageLevel = JpsJavaSdkType.parseVersion(langLevel); if (shouldUseReleaseOption(context, compilerSdkVersion, chunkSdkVersion, targetLanguageLevel)) { if (compilerSdkVersion != targetLanguageLevel) { // Only specify '--release' when cross-compilation is indeed really required. // Otherwise '--release' may not be compatible with other compilation options, e.g. exporting a package from system module options.add("--release"); options.add(String.valueOf(targetLanguageLevel)); } return; } // using older -source, -target and -bootclasspath options if (!StringUtil.isEmpty(langLevel)) { options.add("-source"); options.add(langLevel); } String bytecodeTarget = null; for (JpsModule module : chunk.getModules()) { final String moduleTarget = compilerConfiguration.getByteCodeTargetLevel(module.getName()); if (moduleTarget == null) { continue; } if (bytecodeTarget == null) { bytecodeTarget = moduleTarget; } else { if (moduleTarget.compareTo(bytecodeTarget) < 0) { bytecodeTarget = moduleTarget; // use the lower possible target among modules that form the chunk } } } if (bytecodeTarget == null) { if (!StringUtil.isEmpty(langLevel)) { // according to IDEA rule: if not specified explicitly, set target to be the same as source language level bytecodeTarget = langLevel; } else { // last resort and backward compatibility: // check if user explicitly defined bytecode target in additional compiler options bytecodeTarget = USER_DEFINED_BYTECODE_TARGET.get(context); } } if (bytecodeTarget != null) { options.add("-target"); if (chunkSdkVersion > 0 && compilerSdkVersion > chunkSdkVersion) { // if compiler is newer than module JDK final int userSpecifiedTargetVersion = JpsJavaSdkType.parseVersion(bytecodeTarget); if (userSpecifiedTargetVersion > 0 && userSpecifiedTargetVersion <= compilerSdkVersion) { // if user-specified bytecode version can be determined and is supported by compiler if (userSpecifiedTargetVersion > chunkSdkVersion) { // and user-specified bytecode target level is higher than the highest one supported by the target JDK, // force compiler to use highest-available bytecode target version that is supported by the chunk JDK. bytecodeTarget = "1." + chunkSdkVersion; } } // otherwise let compiler display compilation error about incorrectly set bytecode target version } options.add(bytecodeTarget); } else { if (chunkSdkVersion > 0 && compilerSdkVersion > chunkSdkVersion) { // force lower bytecode target level to match the version of sdk assigned to this chunk options.add("-target"); options.add("1." + chunkSdkVersion); } } } private static String getLanguageLevel(JpsModule module) { final LanguageLevel level = JpsJavaExtensionService.getInstance().getLanguageLevel(module); return level != null ? level.getComplianceOption() : null; } private static boolean isEncodingSet(List<String> options) { for (String option : options) { if ("-encoding".equals(option)) { return true; } } return false; } private static int getCompilerSdkVersion(CompileContext context) { final Integer cached = JAVA_COMPILER_VERSION_KEY.get(context); if (cached != null) { return cached; } int javaVersion = JpsJavaSdkType.parseVersion(SystemProperties.getJavaVersion()); JAVA_COMPILER_VERSION_KEY.set(context, javaVersion); return javaVersion; } private static int getChunkSdkVersion(ModuleChunk chunk) { int chunkSdkVersion = -1; for (JpsModule module : chunk.getModules()) { final JpsSdk<JpsDummyElement> sdk = module.getSdk(JpsJavaSdkType.INSTANCE); if (sdk != null) { final int moduleSdkVersion = JpsJavaSdkType.parseVersion(sdk.getVersionString()); if (moduleSdkVersion != 0 /*could determine the version*/&& (chunkSdkVersion < 0 || chunkSdkVersion > moduleSdkVersion)) { chunkSdkVersion = moduleSdkVersion; } } } return chunkSdkVersion; } @Nullable private static Pair<String, Integer> getForkedJavacSdk(ModuleChunk chunk, int targetLanguageLevel) { final Pair<JpsSdk<JpsDummyElement>, Integer> sdkVersionPair = getAssociatedSdk(chunk); if (sdkVersionPair != null) { final int sdkVersion = sdkVersionPair.second; if (sdkVersion >= 6 && (sdkVersion < 9 || Math.abs(sdkVersion - targetLanguageLevel) <= 3)) { // current javac compiler does support required language level return Pair.create(sdkVersionPair.first.getHomePath(), sdkVersion); } } final String fallbackJdkHome = System.getProperty(GlobalOptions.FALLBACK_JDK_HOME, null); if (fallbackJdkHome == null) { LOG.info("Fallback JDK is not specified. (See " + GlobalOptions.FALLBACK_JDK_HOME + " option)"); return null; } final String fallbackJdkVersion = System.getProperty(GlobalOptions.FALLBACK_JDK_VERSION, null); if (fallbackJdkVersion == null) { LOG.info("Fallback JDK version is not specified. (See " + GlobalOptions.FALLBACK_JDK_VERSION + " option)"); return null; } final int fallbackVersion = JpsJavaSdkType.parseVersion(fallbackJdkVersion); if (fallbackVersion < 6) { LOG.info("Version string for fallback JDK is '" + fallbackJdkVersion + "' (recognized as version '" + fallbackJdkVersion + "')." + " At least version 6 is required."); return null; } return Pair.create(fallbackJdkHome, fallbackVersion); } @Nullable private static Pair<JpsSdk<JpsDummyElement>, Integer> getAssociatedSdk(ModuleChunk chunk) { // assuming all modules in the chunk have the same associated JDK; // this constraint should be validated on build start final JpsSdk<JpsDummyElement> sdk = chunk.representativeTarget().getModule().getSdk(JpsJavaSdkType.INSTANCE); return sdk != null? Pair.create(sdk, JpsJavaSdkType.parseVersion(sdk.getVersionString())) : null; } private static void loadCommonJavacOptions(@NotNull CompileContext context, @NotNull JavaCompilingTool compilingTool) { final List<String> options = new ArrayList<>(); final List<String> vmOptions = new ArrayList<>(); final JpsProject project = context.getProjectDescriptor().getProject(); final JpsJavaCompilerConfiguration compilerConfig = JpsJavaExtensionService.getInstance().getOrCreateCompilerConfiguration(project); final JpsJavaCompilerOptions compilerOptions = compilerConfig.getCurrentCompilerOptions(); if (compilerOptions.DEBUGGING_INFO) { options.add("-g"); } if (compilerOptions.DEPRECATION) { options.add("-deprecation"); } if (compilerOptions.GENERATE_NO_WARNINGS) { options.add("-nowarn"); } if (compilerOptions instanceof EclipseCompilerOptions) { final EclipseCompilerOptions eclipseOptions = (EclipseCompilerOptions)compilerOptions; if (eclipseOptions.PROCEED_ON_ERROR) { options.add("-proceedOnError"); } } final String customArgs = compilerOptions.ADDITIONAL_OPTIONS_STRING; if (customArgs != null) { boolean skip = false; boolean targetOptionFound = false; for (final String userOption : ParametersListUtil.parse(customArgs)) { if (FILTERED_OPTIONS.contains(userOption)) { skip = true; targetOptionFound = "-target".equals(userOption); continue; } if (skip) { skip = false; if (targetOptionFound) { targetOptionFound = false; USER_DEFINED_BYTECODE_TARGET.set(context, userOption); } } else { if (!FILTERED_SINGLE_OPTIONS.contains(userOption)) { if (userOption.startsWith("-J-")) { vmOptions.add(userOption.substring("-J".length())); } else { options.add(userOption); } } } } } for (ExternalJavacOptionsProvider extension : JpsServiceManager.getInstance().getExtensions(ExternalJavacOptionsProvider.class)) { vmOptions.addAll(extension.getOptions(compilingTool)); } if (JavaCompilers.ECLIPSE_ID.equals(compilingTool.getId())) { for (String option : options) { if (option.startsWith("-proceedOnError")) { Utils.PROCEED_ON_ERROR_KEY.set(context, Boolean.TRUE); break; } } } JAVAC_OPTIONS.set(context, options); JAVAC_VM_OPTIONS.set(context, vmOptions); } @Override public void chunkBuildFinished(CompileContext context, ModuleChunk chunk) { JavaBuilderUtil.cleanupChunkResources(context); } private static Map<File, Set<File>> buildOutputDirectoriesMap(CompileContext context, ModuleChunk chunk) { final Map<File, Set<File>> map = new THashMap<>(FileUtil.FILE_HASHING_STRATEGY); for (ModuleBuildTarget target : chunk.getTargets()) { final File outputDir = target.getOutputDir(); if (outputDir == null) { continue; } final Set<File> roots = new THashSet<>(FileUtil.FILE_HASHING_STRATEGY); for (JavaSourceRootDescriptor descriptor : context.getProjectDescriptor().getBuildRootIndex().getTargetRoots(target, context)) { roots.add(descriptor.root); } map.put(outputDir, roots); } return map; } private static JavaModuleIndex getJavaModuleIndex(CompileContext context) { JpsProject project = context.getProjectDescriptor().getProject(); File storageRoot = context.getProjectDescriptor().dataManager.getDataPaths().getDataStorageRoot(); return JpsJavaExtensionService.getInstance().getJavaModuleIndex(project, storageRoot); } private static class DiagnosticSink implements DiagnosticOutputConsumer { private final CompileContext myContext; private volatile int myErrorCount; private volatile int myWarningCount; private final Set<File> myFilesWithErrors = new THashSet<>(FileUtil.FILE_HASHING_STRATEGY); private DiagnosticSink(CompileContext context) { myContext = context; } @Override public void javaFileLoaded(File file) { } @Override public void registerImports(final String className, final Collection<String> imports, final Collection<String> staticImports) { //submitAsyncTask(myContext, new Runnable() { // public void run() { // final Callbacks.Backend callback = DELTA_MAPPINGS_CALLBACK_KEY.get(myContext); // if (callback != null) { // callback.registerImports(className, imports, staticImports); // } // } //}); } @Override public void customOutputData(String pluginId, String dataName, byte[] data) { for (CustomOutputDataListener listener : JpsServiceManager.getInstance().getExtensions(CustomOutputDataListener.class)) { if (pluginId.equals(listener.getId())) { listener.processData(dataName, data); return; } } } @Override public void outputLineAvailable(String line) { if (!StringUtil.isEmpty(line)) { if (line.startsWith(ExternalJavacManager.STDOUT_LINE_PREFIX)) { //noinspection UseOfSystemOutOrSystemErr System.out.println(line); } else if (line.startsWith(ExternalJavacManager.STDERR_LINE_PREFIX)) { //noinspection UseOfSystemOutOrSystemErr System.err.println(line); } else if (line.contains("java.lang.OutOfMemoryError")) { myContext.processMessage(new CompilerMessage(BUILDER_NAME, BuildMessage.Kind.ERROR, "OutOfMemoryError: insufficient memory")); myErrorCount++; } else { final BuildMessage.Kind kind = getKindByMessageText(line); if (kind == BuildMessage.Kind.ERROR) { myErrorCount++; } else if (kind == BuildMessage.Kind.WARNING) { myWarningCount++; } myContext.processMessage(new CompilerMessage(BUILDER_NAME, kind, line)); } } } private static BuildMessage.Kind getKindByMessageText(String line) { final String lowercasedLine = line.toLowerCase(Locale.US); if (lowercasedLine.contains("error") || lowercasedLine.contains("requires target release")) { return BuildMessage.Kind.ERROR; } return BuildMessage.Kind.INFO; } @Override public void report(Diagnostic<? extends JavaFileObject> diagnostic) { final CompilerMessage.Kind kind; switch (diagnostic.getKind()) { case ERROR: kind = BuildMessage.Kind.ERROR; myErrorCount++; break; case MANDATORY_WARNING: case WARNING: kind = BuildMessage.Kind.WARNING; myWarningCount++; break; case NOTE: kind = BuildMessage.Kind.INFO; break; case OTHER: kind = diagnostic instanceof JpsInfoDiagnostic? BuildMessage.Kind.JPS_INFO : BuildMessage.Kind.OTHER; break; default: kind = BuildMessage.Kind.OTHER; } File sourceFile = null; try { // for eclipse compiler just an attempt to call getSource() may lead to an NPE, // so calling this method under try/catch to avoid induced compiler errors final JavaFileObject source = diagnostic.getSource(); sourceFile = source != null ? PathUtils.convertToFile(source.toUri()) : null; } catch (Exception e) { LOG.info(e); } final String srcPath; if (sourceFile != null) { if (kind == BuildMessage.Kind.ERROR) { myFilesWithErrors.add(sourceFile); } srcPath = FileUtil.toSystemIndependentName(sourceFile.getPath()); } else { srcPath = null; } String message = diagnostic.getMessage(Locale.US); if (Utils.IS_TEST_MODE) { LOG.info(message); } final CompilerMessage compilerMsg = new CompilerMessage( BUILDER_NAME, kind, message, srcPath, diagnostic.getStartPosition(), diagnostic.getEndPosition(), diagnostic.getPosition(), diagnostic.getLineNumber(), diagnostic.getColumnNumber() ); if (LOG.isDebugEnabled()) { LOG.debug(compilerMsg.toString()); } myContext.processMessage(compilerMsg); } public int getErrorCount() { return myErrorCount; } public int getWarningCount() { return myWarningCount; } @NotNull public Collection<File> getFilesWithErrors() { return myFilesWithErrors; } } private class ClassProcessingConsumer implements OutputFileConsumer { private final CompileContext myContext; private final OutputFileConsumer myDelegateOutputFileSink; private ClassProcessingConsumer(CompileContext context, OutputFileConsumer sink) { myContext = context; myDelegateOutputFileSink = sink != null ? sink : new OutputFileConsumer() { @Override public void save(@NotNull OutputFileObject fileObject) { throw new RuntimeException("Output sink for compiler was not specified"); } }; } @Override public void save(@NotNull final OutputFileObject fileObject) { // generated files must be saved synchronously, because some compilers (e.g. eclipse) // may want to read them for further compilation try { final BinaryContent content = fileObject.getContent(); final File file = fileObject.getFile(); if (content != null) { content.saveToFile(file); } else { myContext.processMessage(new CompilerMessage(BUILDER_NAME, BuildMessage.Kind.WARNING, "Missing content for file " + file.getPath())); } } catch (IOException e) { myContext.processMessage(new CompilerMessage(BUILDER_NAME, BuildMessage.Kind.ERROR, e.getMessage())); } submitAsyncTask(myContext, () -> { try { for (ClassPostProcessor processor : ourClassProcessors) { processor.process(myContext, fileObject); } } finally { myDelegateOutputFileSink.save(fileObject); } }); } } private static final Key<TasksCounter> COUNTER_KEY = Key.create("_async_task_counter_"); private static final class TasksCounter { private int myCounter; private synchronized void incTaskCount() { myCounter++; } private synchronized void decTaskCounter() { myCounter = Math.max(0, myCounter - 1); if (myCounter == 0) { notifyAll(); } } public synchronized void await() { while (myCounter > 0) { try { wait(); } catch (InterruptedException ignored) { } } } } }
/** * Copyright (C) 2011 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.inject.servlet; import static org.easymock.EasyMock.createControl; import static org.easymock.EasyMock.expect; import java.io.IOException; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import junit.framework.TestCase; import org.easymock.IMocksControl; import com.google.inject.Guice; import com.google.inject.Inject; import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.Scopes; import com.google.inject.name.Named; import com.google.inject.name.Names; /** Tests to make sure that servlets with a context path are handled right. */ public class ContextPathTest extends TestCase { @Inject @Named("foo") private TestServlet fooServlet; @Inject @Named("bar") private TestServlet barServlet; private IMocksControl globalControl; private Injector injector; private ServletContext servletContext; private FilterConfig filterConfig; private GuiceFilter guiceFilter; @Override public final void setUp() throws Exception { injector = Guice.createInjector(new ServletModule() { @Override protected void configureServlets() { bind(TestServlet.class).annotatedWith(Names.named("foo")) .to(TestServlet.class).in(Scopes.SINGLETON); bind(TestServlet.class).annotatedWith(Names.named("bar")) .to(TestServlet.class).in(Scopes.SINGLETON); serve("/foo/*").with(Key.get(TestServlet.class, Names.named("foo"))); serve("/bar/*").with(Key.get(TestServlet.class, Names.named("bar"))); // TODO: add a filter(..) call and validate it is correct } }); injector.injectMembers(this); assertNotNull(fooServlet); assertNotNull(barServlet); assertNotSame(fooServlet, barServlet); globalControl = createControl(); servletContext = globalControl.createMock(ServletContext.class); filterConfig = globalControl.createMock(FilterConfig.class); expect(servletContext.getAttribute(GuiceServletContextListener.INJECTOR_NAME)) .andReturn(injector).anyTimes(); expect(filterConfig.getServletContext()).andReturn(servletContext).anyTimes(); globalControl.replay(); guiceFilter = new GuiceFilter(); guiceFilter.init(filterConfig); } @Override public final void tearDown() { assertNotNull(fooServlet); assertNotNull(barServlet); fooServlet = null; barServlet = null; guiceFilter.destroy(); guiceFilter = null; injector = null; filterConfig = null; servletContext = null; globalControl.verify(); } public void testSimple() throws Exception { IMocksControl testControl = createControl(); TestFilterChain testFilterChain = new TestFilterChain(); HttpServletRequest req = testControl.createMock(HttpServletRequest.class); HttpServletResponse res = testControl.createMock(HttpServletResponse.class); expect(req.getMethod()).andReturn("GET").anyTimes(); expect(req.getRequestURI()).andReturn("/bar/foo").anyTimes(); expect(req.getServletPath()).andReturn("/bar/foo").anyTimes(); expect(req.getContextPath()).andReturn("").anyTimes(); testControl.replay(); guiceFilter.doFilter(req, res, testFilterChain); assertFalse(testFilterChain.isTriggered()); assertFalse(fooServlet.isTriggered()); assertTrue(barServlet.isTriggered()); testControl.verify(); } // // each of the following "runTest" calls takes three path parameters: // // The value of "getRequestURI()" // The value of "getServletPath()" // The value of "getContextPath()" // // these values have been captured using a filter in Apache Tomcat 6.0.32 // and are used for real-world values that a servlet container would send into // the GuiceFilter. // // the remaining three booleans are: // // True if the request gets passed down the filter chain // True if the request hits the "foo" servlet // True if the request hits the "bar" sevlet // // After adjusting the request URI for the web app deployment location, all // calls // should always produce the same result. // // ROOT Web app, using Tomcat default servlet public void testRootDefault() throws Exception { // fetching /. Should go up the filter chain (only mappings on /foo/* and /bar/*). runTest("/", "/", "", true, false, false); // fetching /bar/. Should hit the bar servlet runTest("/bar/", "/bar/", "", false, false, true); // fetching /foo/xxx. Should hit the foo servlet runTest("/foo/xxx", "/foo/xxx", "", false, true, false); // fetching /xxx. Should go up the chain runTest("/xxx", "/xxx", "", true, false, false); } // ROOT Web app, using explicit backing servlet mounted at /* public void testRootExplicit() throws Exception { // fetching /. Should go up the filter chain (only mappings on /foo/* and /bar/*). runTest("/", "", "", true, false, false); // fetching /bar/. Should hit the bar servlet runTest("/bar/", "", "", false, false, true); // fetching /foo/xxx. Should hit the foo servlet runTest("/foo/xxx", "", "", false, true, false); // fetching /xxx. Should go up the chain runTest("/xxx", "", "", true, false, false); } // ROOT Web app, using two backing servlets, mounted at /bar/* and /foo/* public void testRootSpecific() throws Exception { // fetching /. Should go up the filter chain (only mappings on /foo/* and /bar/*). runTest("/", "/", "", true, false, false); // fetching /bar/. Should hit the bar servlet runTest("/bar/", "/bar", "", false, false, true); // fetching /foo/xxx. Should hit the foo servlet runTest("/foo/xxx", "/foo", "", false, true, false); // fetching /xxx. Should go up the chain runTest("/xxx", "/xxx", "", true, false, false); } // Web app located at /webtest, using Tomcat default servlet public void testWebtestDefault() throws Exception { // fetching /. Should go up the filter chain (only mappings on /foo/* and /bar/*). runTest("/webtest/", "/", "/webtest", true, false, false); // fetching /bar/. Should hit the bar servlet runTest("/webtest/bar/", "/bar/", "/webtest", false, false, true); // fetching /foo/xxx. Should hit the foo servlet runTest("/webtest/foo/xxx", "/foo/xxx", "/webtest", false, true, false); // fetching /xxx. Should go up the chain runTest("/webtest/xxx", "/xxx", "/webtest", true, false, false); } // Web app located at /webtest, using explicit backing servlet mounted at /* public void testWebtestExplicit() throws Exception { // fetching /. Should go up the filter chain (only mappings on /foo/* and /bar/*). runTest("/webtest/", "", "/webtest", true, false, false); // fetching /bar/. Should hit the bar servlet runTest("/webtest/bar/", "", "/webtest", false, false, true); // fetching /foo/xxx. Should hit the foo servlet runTest("/webtest/foo/xxx", "", "/webtest", false, true, false); // fetching /xxx. Should go up the chain runTest("/webtest/xxx", "", "/webtest", true, false, false); } // Web app located at /webtest, using two backing servlets, mounted at /bar/* // and /foo/* public void testWebtestSpecific() throws Exception { // fetching /. Should go up the filter chain (only mappings on /foo/* and // /bar/*). runTest("/webtest/", "/", "/webtest", true, false, false); // fetching /bar/. Should hit the bar servlet runTest("/webtest/bar/", "/bar", "/webtest", false, false, true); // fetching /foo/xxx. Should hit the foo servlet runTest("/webtest/foo/xxx", "/foo", "/webtest", false, true, false); // fetching /xxx. Should go up the chain runTest("/webtest/xxx", "/xxx", "/webtest", true, false, false); } private void runTest(final String requestURI, final String servletPath, final String contextPath, final boolean filterResult, final boolean fooResult, final boolean barResult) throws Exception { IMocksControl testControl = createControl(); barServlet.clear(); fooServlet.clear(); TestFilterChain testFilterChain = new TestFilterChain(); HttpServletRequest req = testControl.createMock(HttpServletRequest.class); HttpServletResponse res = testControl.createMock(HttpServletResponse.class); expect(req.getMethod()).andReturn("GET").anyTimes(); expect(req.getRequestURI()).andReturn(requestURI).anyTimes(); expect(req.getServletPath()).andReturn(servletPath).anyTimes(); expect(req.getContextPath()).andReturn(contextPath).anyTimes(); testControl.replay(); guiceFilter.doFilter(req, res, testFilterChain); assertEquals(filterResult, testFilterChain.isTriggered()); assertEquals(fooResult, fooServlet.isTriggered()); assertEquals(barResult, barServlet.isTriggered()); testControl.verify(); } public static class TestServlet extends HttpServlet { private boolean triggered = false; @Override public void doGet(HttpServletRequest req, HttpServletResponse resp) { triggered = true; } public boolean isTriggered() { return triggered; } public void clear() { triggered = false; } } public static class TestFilterChain implements FilterChain { private boolean triggered = false; public void doFilter(ServletRequest request, ServletResponse response) throws IOException, ServletException { triggered = true; } public boolean isTriggered() { return triggered; } public void clear() { triggered = false; } } }
/** * Copyright MaDgIK Group 2010 - 2015. */ package madgik.exareme.worker.art.executionPlan; import madgik.exareme.common.art.entity.EntityName; import madgik.exareme.worker.art.executionPlan.entity.*; import madgik.exareme.worker.art.executionPlan.parser.expression.*; /** * @author herald */ public class ExecutionPlanImplSync implements EditableExecutionPlan { private static final long serialVersionUID = 1L; private final EditableExecutionPlan plan; public ExecutionPlanImplSync() { plan = new ExecutionPlanImpl(); } public ExecutionPlanImplSync(PlanExpression expression) throws SemanticError { plan = new ExecutionPlanImpl(expression); } @Override public ObjectType getType(String name) throws SemanticError { synchronized (plan) { return plan.getType(name); } } @Override public EntityName addContainer(Container c) throws SemanticError { synchronized (plan) { return plan.addContainer(c); } } @Override public EntityName removeContainer(String containerName) throws SemanticError { synchronized (plan) { return plan.removeContainer(containerName); } } @Override public OperatorEntity addOperator(Operator operator) throws SemanticError { synchronized (plan) { return plan.addOperator(operator); } } @Override public OperatorEntity removeOperator(String operatorName) throws SemanticError { synchronized (plan) { return plan.removeOperator(operatorName); } } @Override public StateEntity addState(State state) throws SemanticError { synchronized (plan) { return plan.addState(state); } } @Override public StateEntity removeState(String stateName) throws SemanticError { synchronized (plan) { return plan.removeState(stateName); } } @Override public StateLinkEntity addStateLink(StateLink statelink) throws SemanticError { synchronized (plan) { return plan.addStateLink(statelink); } } @Override public StateLinkEntity removeStateLink(String operatorName, String stateName) throws SemanticError { synchronized (plan) { return plan.removeStateLink(operatorName, stateName); } } @Override public SwitchEntity addSwitch(Switch s) throws SemanticError { synchronized (plan) { return plan.addSwitch(s); } } @Override public SwitchEntity removeSwitch(Switch s) throws SemanticError { synchronized (plan) { return plan.removeSwitch(s); } } @Override public SwitchLinkEntity addSwitchConnect(SwitchLink switchConnect) throws SemanticError { synchronized (plan) { return plan.addSwitchConnect(switchConnect); } } @Override public SwitchLinkEntity removeSwitchConnect(SwitchLink switchConnect) throws SemanticError { synchronized (plan) { return plan.removeSwitchConnect(switchConnect); } } @Override public void setDataTransferOperatorsCount(int dataTransferOperatorsCount) { synchronized (plan) { plan.setDataTransferOperatorsCount(dataTransferOperatorsCount); } } @Override public int getDataTransferOperatorsCount() { synchronized (plan) { return plan.getDataTransferOperatorsCount(); } } @Override public StartEntity createStartEntity(Start start) throws SemanticError { synchronized (plan) { return plan.createStartEntity(start); } } @Override public StopEntity createStopEntity(Stop stop) throws SemanticError { synchronized (plan) { return plan.createStopEntity(stop); } } @Override public DestroyEntity createDestroyEntity(Destroy destroy) throws SemanticError { synchronized (plan) { return plan.createDestroyEntity(destroy); } } @Override public int getContainerCount() { synchronized (plan) { return plan.getContainerCount(); } } @Override public int getOperatorCount() { synchronized (plan) { return plan.getOperatorCount(); } } @Override public OperatorEntity getOperator(String operatorName) throws SemanticError { synchronized (plan) { return plan.getOperator(operatorName); } } @Override public StateEntity getState(String stateName) throws SemanticError { synchronized (plan) { return plan.getState(stateName); } } @Override public StateLinkEntity getStateLink(String operatorName, String stateName) throws SemanticError { synchronized (plan) { return plan.getStateLink(operatorName, stateName); } } @Override public Iterable<OperatorEntity> getFromLinks(OperatorEntity to) throws SemanticError { synchronized (plan) { return plan.getFromLinks(to); } } @Override public Iterable<OperatorEntity> getToLinks(OperatorEntity from) throws SemanticError { synchronized (plan) { return plan.getToLinks(from); } } @Override public Iterable<StateEntity> getConnectedStates(String operatorName) throws SemanticError { synchronized (plan) { return plan.getConnectedStates(operatorName); } } @Override public Iterable<OperatorEntity> getConnectedOperators(String stateName) throws SemanticError { synchronized (plan) { return plan.getConnectedOperators(stateName); } } @Override public Iterable<OperatorEntity> iterateOperators() { synchronized (plan) { return plan.iterateOperators(); } } @Override public Iterable<StateEntity> iterateStates() { synchronized (plan) { return plan.iterateStates(); } } @Override public Iterable<StateLinkEntity> iterateStateLinks() { synchronized (plan) { return plan.iterateStateLinks(); } } @Override public boolean isDefined(String name) throws SemanticError { synchronized (plan) { return plan.isDefined(name); } } @Override public EntityName getContainer(String containerName) throws SemanticError { synchronized (plan) { return plan.getContainer(containerName); } } @Override public Iterable<String> iterateContainers() { synchronized (plan) { return plan.iterateContainers(); } } @Override public PragmaEntity addPragma(Pragma p) throws SemanticError { synchronized (plan) { return plan.addPragma(p); } } @Override public int getPragmaCount() { synchronized (plan) { return plan.getPragmaCount(); } } @Override public Iterable<PragmaEntity> iteratePragmas() { synchronized (plan) { return plan.iteratePragmas(); } } @Override public PragmaEntity getPragma(String pragmaName) throws SemanticError { synchronized (plan) { return plan.getPragma(pragmaName); } } @Override public PragmaEntity removePragma(String pragmaName) throws SemanticError { synchronized (plan) { return plan.removePragma(pragmaName); } } @Override public int getStateCount() { synchronized (plan) { return plan.getStateCount(); } } @Override public int getStateLinkCount() { synchronized (plan) { return plan.getStateLinkCount(); } } @Override public int getOperatorLinkCount() { synchronized (plan) { return plan.getOperatorLinkCount(); } } @Override public OperatorLinkEntity getOperatorLink(String from, String to) throws SemanticError { synchronized (plan) { return plan.getOperatorLink(from, to); } } @Override public Iterable<OperatorLinkEntity> iterateOperatorLinks() { synchronized (plan) { return plan.iterateOperatorLinks(); } } @Override public OperatorLinkEntity addOperatorLink(OperatorLink opLink) throws SemanticError { synchronized (plan) { return plan.addOperatorLink(opLink); } } @Override public OperatorLinkEntity removeOperatorLink(String from, String to) throws SemanticError { synchronized (plan) { return plan.removeOperatorLink(from, to); } } }
/*L * Copyright Georgetown University, Washington University. * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/cab2b/LICENSE.txt for details. */ package edu.wustl.cab2b.client.ui.searchDataWizard; import java.awt.Color; import java.awt.Component; import java.awt.Container; import java.awt.Dimension; import java.awt.FlowLayout; import java.awt.Font; import java.awt.GradientPaint; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.geom.Point2D; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.Vector; import javax.swing.BorderFactory; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.JScrollPane; import javax.swing.JTable; import javax.swing.JTextArea; import javax.swing.border.EmptyBorder; import javax.swing.table.AbstractTableModel; import javax.swing.table.TableCellRenderer; import org.jdesktop.swingx.JXPanel; import org.jdesktop.swingx.JXTitledPanel; import org.jdesktop.swingx.painter.gradient.BasicGradientPainter; import edu.common.dynamicextensions.domaininterface.AttributeInterface; import edu.common.dynamicextensions.domaininterface.EntityInterface; import edu.wustl.cab2b.client.ui.controls.Cab2bButton; import edu.wustl.cab2b.client.ui.controls.Cab2bHyperlink; import edu.wustl.cab2b.client.ui.controls.Cab2bPanel; import edu.wustl.cab2b.client.ui.controls.Cab2bTable; import edu.wustl.cab2b.client.ui.controls.Cab2bTitledPanel; import edu.wustl.cab2b.client.ui.controls.RiverLayout; import edu.wustl.cab2b.client.ui.main.AbstractTypePanel; import edu.wustl.cab2b.client.ui.main.ParseXMLFile; import edu.wustl.cab2b.client.ui.main.SwingUIManager; import edu.wustl.cab2b.client.ui.mainframe.Cab2bContentPanel; import edu.wustl.cab2b.client.ui.mainframe.NewWelcomePanel; import edu.wustl.cab2b.client.ui.pagination.JPageElement; import edu.wustl.cab2b.client.ui.pagination.JPagination; import edu.wustl.cab2b.client.ui.pagination.NumericPager; import edu.wustl.cab2b.client.ui.pagination.PageElement; import edu.wustl.cab2b.client.ui.pagination.PageElementImpl; import edu.wustl.cab2b.client.ui.query.ClientQueryBuilder; import edu.wustl.cab2b.client.ui.query.IClientQueryBuilderInterface; import edu.wustl.cab2b.client.ui.searchDataWizard.addLimit.AddLimitPanel; import edu.wustl.cab2b.client.ui.util.CDEDetails; import edu.wustl.cab2b.client.ui.util.CommonUtils; import edu.wustl.cab2b.client.ui.util.WindowUtilities; import edu.wustl.cab2b.common.exception.CheckedException; import edu.wustl.cab2b.common.queryengine.Cab2bQueryObjectFactory; import edu.wustl.cab2b.common.util.AttributeInterfaceComparator; import edu.wustl.cab2b.common.util.Constants; import edu.wustl.cab2b.common.util.Utility; import edu.wustl.common.querysuite.queryobject.ICondition; import edu.wustl.common.querysuite.queryobject.IExpression; import edu.wustl.common.querysuite.queryobject.IRule; /** * The class that contains commonalities required for displaying results from * the 'AddLimit' and 'choose category' section from the main search dialog. * * @author mahesh_iyer/chetan_bh/gautam_shetty/Deepak_Shingan */ public class SearchResultPanel extends Cab2bPanel implements ActionListener { private static final long serialVersionUID = 1L; /** The pagination component to paginate the results of the search */ private Cab2bPanel resultPanel; private Cab2bButton addLimitButton; private Cab2bButton editLimitButton; private Cab2bHyperlink attributeDetailsLink; private Cab2bPanel constraintButtonPanel; private EntityInterface entityForSelectedLink; /** * Saved reference to the content searchPanel that needs to be refreshed for * appropriate events. */ protected ContentPanel contentPanel; /** * Constructor * * @param addLimitPanel * Reference to the parent content searchPanel that needs * refreshing. * * @param result * The collectiond of entities. */ public SearchResultPanel(ContentPanel contentPanel, Set<EntityInterface> result) { this.contentPanel = contentPanel; initGUI(result); } /** * Method initializes the searchPanel by appropriately laying out child * components. * * @param result * The collectiond of entities. */ private void initGUI(Set<EntityInterface> resultSet) { if (contentPanel instanceof AddLimitPanel) { ((AddLimitPanel) contentPanel).setSearchResultPanel(this); } Vector<PageElement> pageElementCollection = new Vector<PageElement>(); if (resultSet != null) { List<EntityInterface> resultList = new ArrayList<EntityInterface>(resultSet); //Collections.sort(resultList, new EntityInterfaceComparator()); for (EntityInterface entity : resultList) { // Create an instance of the PageElement. Initialize with the // appropriate data PageElement pageElement = new PageElementImpl(); String className = Utility.getDisplayName(entity); pageElement.setDisplayName(className); String description = entity.getDescription(); if (description == null || description.equals("")) { description = "*No description available. "; } pageElement.setDescription(description); pageElement.setUserObject(entity); pageElementCollection.add(pageElement); } NumericPager numericPager = new NumericPager(pageElementCollection, getPageSize()); /* Initalize the pagination component. */ JPagination resultsPage = new JPagination(pageElementCollection, numericPager, this, true); resultsPage.setSelectableEnabled(false); resultsPage.setGroupActionEnabled(false); resultsPage.addPageElementActionListener(this); resultPanel = new Cab2bPanel(); resultPanel.add("hfill vfill ", resultsPage); JXTitledPanel titledSearchResultsPanel = displaySearchSummary(resultList.size()); titledSearchResultsPanel.setContentContainer(resultPanel); add("hfill vfill", titledSearchResultsPanel); } } /** * Sets result panel * * @param resulPanel */ public void setResultPanel(Cab2bPanel resulPanel) { resultPanel.removeAll(); resultPanel.add("hfill vfill ", resulPanel); } public EntityInterface getEntityForSelectedLink() { return entityForSelectedLink; } /** * Removing result panel */ public void removeResultPanel() { if (resultPanel != null) { resultPanel.removeAll(); } } /** * Initiliasing/Adding Add Limit buttons * * @param panelsToAdd * @param entity */ public void initializeAddLimitButton(final JXPanel[] panelsToAdd, final EntityInterface entity) { addLimitButton = new Cab2bButton("Add Limit"); addLimitButton.setPreferredSize(new Dimension(95, 22)); addLimitButton.addActionListener(new AddLimitButtonListner(panelsToAdd, entity)); } /** * Initiliasing/Adding EditLimit buttons * * @param panelsToAdd * @param expression */ private void initializeEditLimitButtons(final JXPanel[] panelsToAdd, final IExpression expression) { editLimitButton = new Cab2bButton("Edit Limit"); editLimitButton.addActionListener(new EditLimitButtonListner(panelsToAdd, expression)); editLimitButton.setPreferredSize(new Dimension(95, 22)); } /** * This method ctreates and returns a hyperlink which will display certain * details of all the attributes of the given entity. * * @param entity * @return */ private void initializeAttributeDetailLink(final EntityInterface entity) { attributeDetailsLink = new Cab2bHyperlink(); attributeDetailsLink.setText("CDE Details"); attributeDetailsLink.addActionListener(new AttributeDetailsLinkListener(entity)); } /** * * @param cab2bButton * @return */ private Cab2bPanel getConstraintButtonPanel(Cab2bButton cab2bButton, EntityInterface entity) { constraintButtonPanel = new Cab2bPanel(new RiverLayout(5, 0)); constraintButtonPanel.add(cab2bButton); constraintButtonPanel.add("tab", new JLabel(" | ")); if (attributeDetailsLink == null) { initializeAttributeDetailLink(entity); } constraintButtonPanel.add("tab", attributeDetailsLink); constraintButtonPanel.setOpaque(false); return constraintButtonPanel; } /** * Method to create AddLimitUI * * @param entity */ protected JXPanel[] createAddLimitPanels(final EntityInterface entity) { final JXPanel[] componentPanel = getAttributeComponentPanels(entity); final JXPanel[] finalPanelToadd = initializePanelsForAddConstraints(componentPanel); initializeAddLimitButton(componentPanel, entity); finalPanelToadd[0].add(getConstraintButtonPanel(addLimitButton, entity)); GradientPaint gp1 = new GradientPaint(new Point2D.Double(.09d, 0), Color.LIGHT_GRAY, new Point2D.Double( .95d, 0), Color.WHITE); finalPanelToadd[0].setBackgroundPainter(new BasicGradientPainter(gp1)); finalPanelToadd[0].setBorder(BorderFactory.createLineBorder(Color.BLACK)); return finalPanelToadd; } /** * Get panels array to be displayed in add limit searchPanel * * @param entity * @return */ public JXPanel[] createEditLimitPanels(final IExpression expression) { /* This is the EntityInterface instance. */ final EntityInterface entity = expression.getQueryEntity().getDynamicExtensionsEntity(); final JXPanel[] componentPanel = getAttributeComponentPanels(entity); final JXPanel[] finalPanelToadd = initializePanelsForAddConstraints(componentPanel); initializeEditLimitButtons(componentPanel, expression); finalPanelToadd[0].add(getConstraintButtonPanel(editLimitButton, entity)); return finalPanelToadd; } /** * The action listener for the individual page elements. * * @param actionEvent * The event that contains details of the click on the individual * page elements. */ public void actionPerformed(ActionEvent actionEvent) { Cab2bHyperlink<JPageElement> link = (Cab2bHyperlink<JPageElement>) (actionEvent.getSource()); JPageElement jPageElement = link.getUserObject(); jPageElement.resetLabel(); JPagination pagination = jPageElement.getPagination(); JPageElement selectedPageElement = pagination.getSelectedJPageElement(); if (selectedPageElement != null) { selectedPageElement.resetHyperLink(); } pagination.setSelectedJPageElement(jPageElement); PageElement pageElement = jPageElement.getPageElement(); entityForSelectedLink = (EntityInterface) pageElement.getUserObject(); updateAddLimitPage(contentPanel, entityForSelectedLink); } /** * Method to update Add limit page for selected entity * @param contentPanel */ public void updateAddLimitPage(ContentPanel contentPanel, EntityInterface entity) { if (contentPanel instanceof Cab2bContentPanel) { SearchPanel searchPanel = (SearchPanel) this.getParent(); (searchPanel.getAttributeSelectCDCPanel()).setEntityInterface(entity); (searchPanel.getAttributeSelectCDCPanel()).generatePanel(); (searchPanel.getAttributeSelectCDCPanel()).setTestDAG(searchPanel.getTestDAG()); } initializeAttributeDetailLink(entity); final JXPanel[] panelsToAdd = createAddLimitPanels(entity); if (getAttributeComponentPanels(entity) != null) { // pass the appropriate class name for display performAction(panelsToAdd, Utility.getDisplayName(entity)); } updateUI(); } /** * Get panels array to be displayed in add limit searchPanel * * @param entity * @return */ private JXPanel[] getAttributeComponentPanels(final EntityInterface entity) { final Collection<AttributeInterface> attributeCollection = entity.getAttributeCollection(); AbstractTypePanel[] componentPanels = null; if (attributeCollection != null) { try { List<AttributeInterface> attributeList = new ArrayList<AttributeInterface>(attributeCollection); Collections.sort(attributeList, new AttributeInterfaceComparator()); componentPanels = new AbstractTypePanel[attributeList.size()]; ParseXMLFile parseFile = ParseXMLFile.getInstance(); Dimension maxLabelDimension = CommonUtils.getMaximumLabelDimension(attributeList); int i = 0; for (AttributeInterface attribute : attributeList) { componentPanels[i++] = (AbstractTypePanel) SwingUIManager.generateUIPanel(parseFile, attribute, maxLabelDimension); componentPanels[i - 1].createPanelWithOperator(attribute); } } catch (CheckedException checkedException) { CommonUtils.handleException(checkedException, this, true, true, false, false); } } return componentPanels; } /** * * @param componentPanel * @return */ public JXPanel[] initializePanelsForAddConstraints(JXPanel[] componentPanel) { Cab2bPanel cab2bPanel = new Cab2bPanel(new RiverLayout(5, 5)); for (int j = 0; j < componentPanel.length; j++) { cab2bPanel.add("br", componentPanel[j]); } JScrollPane pane = new JScrollPane(JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED, JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED); pane.getViewport().setBackground(Color.WHITE); pane.getViewport().add(cab2bPanel); pane.getViewport().setBorder(null); pane.setBorder(null); JXPanel[] finalPanelsToAdd = new Cab2bPanel[2]; FlowLayout flowLayout = new FlowLayout(2, 0, 3); finalPanelsToAdd[0] = new Cab2bPanel(flowLayout); finalPanelsToAdd[1] = new Cab2bPanel(); finalPanelsToAdd[1].add("hfill vfill ", pane); return finalPanelsToAdd; } /** * Method to handle 'Add Limit' button click event * * @param componentPanel * @param entity */ public void performAddLimitAction(JXPanel[] componentPanel, EntityInterface entity) { List<AttributeInterface> attributes = new ArrayList<AttributeInterface>(); List<String> conditions = new ArrayList<String>(); List<List<String>> values = new ArrayList<List<String>>(); for (int j = 0; j < componentPanel.length; j++) { AbstractTypePanel panel = (AbstractTypePanel) componentPanel[j]; int conditionStatus = panel.isConditionValid(contentPanel); if (conditionStatus < 0) { return; } else if (conditionStatus == 0) { attributes.add(panel.getAttributeEntity()); conditions.add(panel.getConditionItem()); values.add(panel.getValues()); } } if (attributes.isEmpty()) { JOptionPane.showMessageDialog(contentPanel, "Please add condition(s) before proceeding", "Add Limit Warning", JOptionPane.WARNING_MESSAGE); } else { MainSearchPanel mainSearchPanel = (MainSearchPanel) ((JXPanel) contentPanel).getParent().getParent(); if (mainSearchPanel.getQueryObject() == null) { IClientQueryBuilderInterface query = new ClientQueryBuilder(); mainSearchPanel.setQueryObject(query); mainSearchPanel.getCenterPanel().getAddLimitPanel().setQueryObject(query); } int expressionId = mainSearchPanel.getQueryObject().addRule(attributes, conditions, values, attributes.get(0).getEntity()); mainSearchPanel.getCenterPanel().getAddLimitPanel().refreshBottomCenterPanel(expressionId); } } /** * returns Attribute Interface for the name from the collection parameter * * @param collection * @param attributeName * @return */ private AttributeInterface getAttribute(Collection<AttributeInterface> attributeCollection, String attributeName) { AttributeInterface requriedAttribute = null; for (AttributeInterface attribute : attributeCollection) { if (attributeName.trim().equals(attribute.getName().trim())) { requriedAttribute = attribute; break; } } return requriedAttribute; } /** * Method to perform edit limit action */ public void performEditLimitAction(JXPanel[] componentPanel, IExpression expression) { List<ICondition> conditionList = new ArrayList<ICondition>(); for (int j = 0; j < componentPanel.length; j++) { AbstractTypePanel panel = (AbstractTypePanel) componentPanel[j]; String conditionString = panel.getConditionItem(); ArrayList<String> values = panel.getValues(); if (0 == conditionString.compareToIgnoreCase("Between") && (values.size() == 1)) { JOptionPane.showInternalMessageDialog((this.contentPanel).getParent().getParent().getParent(), "Please enter both the values for between operator.", "Error", JOptionPane.ERROR_MESSAGE); return; } if ((conditionString.equals("Is Null")) || conditionString.equals("Is Not Null") || (values.size() != 0)) { ICondition condition = Cab2bQueryObjectFactory.createCondition(); final AttributeInterface attribute = panel.getAttributeEntity(); condition.setAttribute(attribute); condition.setRelationalOperator(edu.wustl.cab2b.client.ui.query.Utility.getRelationalOperator(conditionString)); for (int i = 0; i < values.size(); i++) { condition.addValue(values.get(i)); } conditionList.add(condition); } } if (conditionList.isEmpty()) { MainSearchPanel mainSearchPanel = (MainSearchPanel) ((JXPanel) contentPanel).getParent().getParent(); JOptionPane.showInternalMessageDialog( mainSearchPanel.getParent(), "This rule cannot be added as it is not associated with the added rules.", "Error", JOptionPane.ERROR_MESSAGE); } else { IRule rule = (IRule) expression.getOperand(0); rule.removeAllConditions(); for (int i = 0; i < conditionList.size(); i++) { rule.addCondition(conditionList.get(i)); } } } /** * This method generates the search summary searchPanel * * @param numberOfResults * number of results obtained * @return summary searchPanel */ public JXTitledPanel displaySearchSummary(int numberOfResults) { String message = (numberOfResults == 0) ? "No result found." : "Search Results :- Total results ( " + numberOfResults + " )"; JXTitledPanel titledSearchResultsPanel = new Cab2bTitledPanel(message); GradientPaint gp = new GradientPaint(new Point2D.Double(.05d, 0), new Color(185, 211, 238), new Point2D.Double(.95d, 0), Color.WHITE); titledSearchResultsPanel.setTitlePainter(new BasicGradientPainter(gp)); titledSearchResultsPanel.setBorder(new EmptyBorder(0, 0, 0, 0)); titledSearchResultsPanel.setTitleFont(new Font("SansSerif", Font.BOLD, 11)); titledSearchResultsPanel.setTitleForeground(Color.BLACK); return titledSearchResultsPanel; } /** * The method that needs to handle any refresh related activites * * @param attributeComponentPanel * This is the array of panels that forms the dynamically * generated criterion pages. Each searchPanel corresponds to one * attribute from the class/category Method to select appropriate * searchPanel and refresh the addLimit page * * @param className * The class/category name. */ private void performAction(JXPanel[] attributeComponentPanel, String className) { Container container = ((JXPanel) (contentPanel)).getParent(); if (container instanceof SearchCenterPanel) { SearchCenterPanel searchCenterPanel = (SearchCenterPanel) container; /* * Use the parent reference to in turn get a reference to the * navigation searchPanel, and cause it to move to the next card. */ MainSearchPanel mainSearchPanel = (MainSearchPanel) (searchCenterPanel.getParent()); mainSearchPanel.getNavigationPanel().enableButtons(); /* * Get the searchPanel corresponding to the currently selcted card * and refresh it. */ AddLimitPanel addLimitPanel = searchCenterPanel.getAddLimitPanel(); addLimitPanel.refresh(attributeComponentPanel, className); // set search-result searchPanel in AddLimit searchPanel and move to // next tab if (searchCenterPanel.getSelectedCardIndex() == 0) { ChooseCategoryPanel chooseCategoryPanel = searchCenterPanel.getChooseCategoryPanel(); addLimitPanel.addSearchPanel(chooseCategoryPanel.getSearchPanel()); SearchResultPanel searchResultPanel = chooseCategoryPanel.getSearchResultPanel(); if (searchResultPanel != null) { addLimitPanel.addResultsPanel(searchResultPanel); searchCenterPanel.setAddLimitPanel(addLimitPanel); } mainSearchPanel.getNavigationPanel().showCard(true); } } } /** * The abstract method that is to return the number of elements to be * displayed/page. * * @return int Value represents the number of elements/page. */ public int getPageSize() { return 3; }; /** * @return the addLimitButtonTop */ public Cab2bButton getAddLimitButton() { return addLimitButton; } /** * Action Listener class for Add Limit buttons * * @author Deepak_Shingan * */ class AddLimitButtonListner implements ActionListener { private JXPanel[] panelsToAdd; private EntityInterface entity; public AddLimitButtonListner(final JXPanel[] panelsToAdd, final EntityInterface entity) { this.panelsToAdd = panelsToAdd; this.entity = entity; } public void actionPerformed(ActionEvent event) { performAddLimitAction(panelsToAdd, entity); AddLimitPanel.m_innerPane.setDividerLocation(242); } } /** * Action Listener class for Edit Limit buttons * * @author Deepak_Shingan * */ class EditLimitButtonListner implements ActionListener { private JXPanel[] panelsToAdd; private IExpression expression; public EditLimitButtonListner(final JXPanel[] panelsToAdd, final IExpression expression) { this.panelsToAdd = panelsToAdd; this.expression = expression; } public void actionPerformed(ActionEvent event) { performEditLimitAction(this.panelsToAdd, this.expression); } } class AttributeDetailsLinkListener implements ActionListener { private EntityInterface entity; public AttributeDetailsLinkListener(EntityInterface entity) { this.entity = entity; } public void actionPerformed(ActionEvent event) { Cab2bTable cab2bTable = new Cab2bTable(new CDETableModel(entity)); cab2bTable.setBorder(null); cab2bTable.setRowHeightEnabled(true); cab2bTable.setShowGrid(false); cab2bTable.getColumnModel().getColumn(0).setPreferredWidth(50); cab2bTable.getColumnModel().getColumn(1).setPreferredWidth(10); cab2bTable.getColumnModel().getColumn(2).setPreferredWidth(30); cab2bTable.getColumnModel().getColumn(3).setPreferredWidth(320); cab2bTable.setRowSelectionAllowed(false); for (int j = 0; j < 4; j++) { cab2bTable.getColumnModel().getColumn(j).setCellRenderer(new MyCellRenderer()); } cab2bTable.getTableHeader().setFont(new Font("Arial", Font.BOLD, 14)); JScrollPane jScrollPane = new JScrollPane(cab2bTable, JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED, JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED); jScrollPane.setBorder(null); /* * WindowUtilities.showInDialog(NewWelcomePanel.mainFrame, * jScrollPane, "CDE Details", Constants.WIZARD_SIZE2_DIMENSION, * true, false); */ WindowUtilities.showInDialog(NewWelcomePanel.getMainFrame(), jScrollPane, "CDE Details", Constants.WIZARD_SIZE2_DIMENSION, true, false); } } /** * @return the constraintButtonPanel */ public Cab2bPanel getConstraintButtonPanel() { return constraintButtonPanel; } class MyCellRenderer extends JTextArea implements TableCellRenderer { public MyCellRenderer() { setLineWrap(true); setWrapStyleWord(true); } public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { if (value != null) { setText(value.toString()); } setSize(table.getColumnModel().getColumn(column).getWidth(), getPreferredSize().height); if (table.getRowHeight(row) != getPreferredSize().height) { table.setRowHeight(row, getPreferredSize().height); } return this; } } private class CDETableModel extends AbstractTableModel { private CDEDetails cdeDetails; private CDETableModel(EntityInterface entity) { super(); this.cdeDetails = new CDEDetails(entity); } public int getRowCount() { return cdeDetails.getRowCount(); } public int getColumnCount() { return cdeDetails.getColumnCount(); } public Object getValueAt(int row, int column) { return cdeDetails.getValueAt(row, column); } public String getColumnName(int column) { return cdeDetails.getColumnName(column); } } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.buildtool; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.analysis.ConfiguredAspect; import com.google.devtools.build.lib.analysis.ConfiguredTarget; import com.google.devtools.build.lib.analysis.FileProvider; import com.google.devtools.build.lib.analysis.OutputGroupInfo; import com.google.devtools.build.lib.analysis.TopLevelArtifactContext; import com.google.devtools.build.lib.analysis.TopLevelArtifactHelper; import com.google.devtools.build.lib.analysis.TransitiveInfoCollection; import com.google.devtools.build.lib.analysis.configuredtargets.InputFileConfiguredTarget; import com.google.devtools.build.lib.analysis.configuredtargets.OutputFileConfiguredTarget; import com.google.devtools.build.lib.analysis.configuredtargets.RuleConfiguredTarget; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.exec.ExecutionOptions; import com.google.devtools.build.lib.runtime.BlazeRuntime; import com.google.devtools.build.lib.runtime.CommandEnvironment; import com.google.devtools.build.lib.skyframe.AspectKeyCreator.AspectKey; import com.google.devtools.build.lib.skyframe.ConfiguredTargetKey; import com.google.devtools.build.lib.util.io.OutErr; import java.util.ArrayList; import java.util.Collection; /** * Handles --show_result and --experimental_show_artifacts. */ class BuildResultPrinter { private final CommandEnvironment env; BuildResultPrinter(CommandEnvironment env) { this.env = env; } /** * Shows the result of the build. Information includes the list of up-to-date and failed targets * and list of output artifacts for successful targets * * <p>This corresponds to the --show_result flag. */ void showBuildResult( BuildRequest request, BuildResult result, Collection<ConfiguredTarget> configuredTargets, Collection<ConfiguredTarget> configuredTargetsToSkip, ImmutableMap<AspectKey, ConfiguredAspect> aspects) { // NOTE: be careful what you print! We don't want to create a consistency // problem where the summary message and the exit code disagree. The logic // here is already complex. BlazeRuntime runtime = env.getRuntime(); String productName = runtime.getProductName(); PathPrettyPrinter prettyPrinter = OutputDirectoryLinksUtils.getPathPrettyPrinter( runtime.getRuleClassProvider().getSymlinkDefinitions(), request.getBuildOptions().getSymlinkPrefix(productName), productName, env.getWorkspace(), request.getBuildOptions().printWorkspaceInOutputPathsIfNeeded ? env.getWorkingDirectory() : env.getWorkspace()); OutErr outErr = request.getOutErr(); // Produce output as if validation aspect didn't exist; instead we'll consult validation aspect // if we end up printing targets below. Note that in the presence of other aspects, we may print // success messages for them but the overall build will still fail if validation aspects (or // targets) failed. Collection<AspectKey> aspectsToPrint = aspects.keySet(); if (request.useValidationAspect()) { aspectsToPrint = aspectsToPrint.stream() .filter( k -> !BuildRequest.VALIDATION_ASPECT_NAME.equals(k.getAspectClass().getName())) .collect(ImmutableList.toImmutableList()); } final boolean success; if (aspectsToPrint.isEmpty()) { // Suppress summary if --show_result value is exceeded: Collection<ConfiguredTarget> targetsToPrint = filterTargetsToPrint(configuredTargets); if (targetsToPrint.size() > request.getBuildOptions().maxResultTargets) { return; } // Filter the targets we care about into three buckets. Targets are only considered successful // if they and their validation aspects succeeded. Note we determined above that all aspects // are validation aspects, so just use the full keySet() here. ImmutableMap<ConfiguredTargetKey, Boolean> validated = aspects.keySet().stream() .collect( ImmutableMap.toImmutableMap( AspectKey::getBaseConfiguredTargetKey, k -> result.getSuccessfulAspects().contains(k), Boolean::logicalAnd)); Collection<ConfiguredTarget> succeeded = new ArrayList<>(); Collection<ConfiguredTarget> failed = new ArrayList<>(); Collection<ConfiguredTarget> skipped = new ArrayList<>(); Collection<ConfiguredTarget> successfulTargets = result.getSuccessfulTargets(); for (ConfiguredTarget target : targetsToPrint) { if (configuredTargetsToSkip.contains(target)) { skipped.add(target); } else if (successfulTargets.contains(target) && validated.getOrDefault( ConfiguredTargetKey.builder().setConfiguredTarget(target).build(), Boolean.TRUE)) { succeeded.add(target); } else { failed.add(target); } } for (ConfiguredTarget target : skipped) { outErr.printErr("Target " + target.getLabel() + " was skipped\n"); } TopLevelArtifactContext context = request.getTopLevelArtifactContext(); for (ConfiguredTarget target : succeeded) { Label label = target.getLabel(); // For up-to-date targets report generated artifacts, but only // if they have associated action and not middleman artifacts. boolean headerFlag = true; for (Artifact artifact : TopLevelArtifactHelper.getAllArtifactsToBuild(target, context) .getImportantArtifacts() .toList()) { if (shouldPrint(artifact)) { if (headerFlag) { outErr.printErr("Target " + label + " up-to-date:\n"); headerFlag = false; } outErr.printErrLn(formatArtifactForShowResults(prettyPrinter, artifact)); } } if (headerFlag) { outErr.printErr("Target " + label + " up-to-date (nothing to build)\n"); } } for (ConfiguredTarget target : failed) { outErr.printErr("Target " + target.getLabel() + " failed to build\n"); // For failed compilation, it is still useful to examine temp artifacts, // (ie, preprocessed and assembler files). OutputGroupInfo topLevelProvider = OutputGroupInfo.get(target); if (topLevelProvider != null) { for (Artifact temp : topLevelProvider.getOutputGroup(OutputGroupInfo.TEMP_FILES).toList()) { if (temp.getPath().exists()) { outErr.printErrLn( " See temp at " + prettyPrinter.getPrettyPath(temp.getPath().asFragment())); } } } } success = failed.isEmpty(); } else { // Suppress summary if --show_result value is exceeded: if (aspectsToPrint.size() > request.getBuildOptions().maxResultTargets) { return; } // Filter the targets we care about into two buckets: Collection<AspectKey> succeeded = new ArrayList<>(); Collection<AspectKey> failed = new ArrayList<>(); ImmutableSet<AspectKey> successfulAspects = result.getSuccessfulAspects(); for (AspectKey aspect : aspectsToPrint) { (successfulAspects.contains(aspect) ? succeeded : failed).add(aspect); } TopLevelArtifactContext context = request.getTopLevelArtifactContext(); for (AspectKey aspect : succeeded) { Label label = aspect.getLabel(); ConfiguredAspect configuredAspect = aspects.get(aspect); String aspectName = aspect.getAspectClass().getName(); boolean headerFlag = true; NestedSet<Artifact> importantArtifacts = TopLevelArtifactHelper.getAllArtifactsToBuild(configuredAspect, context) .getImportantArtifacts(); for (Artifact importantArtifact : importantArtifacts.toList()) { if (headerFlag) { outErr.printErr("Aspect " + aspectName + " of " + label + " up-to-date:\n"); headerFlag = false; } if (shouldPrint(importantArtifact)) { outErr.printErrLn(formatArtifactForShowResults(prettyPrinter, importantArtifact)); } } if (headerFlag) { outErr.printErr( "Aspect " + aspectName + " of " + label + " up-to-date (nothing to build)\n"); } } for (AspectKey aspect : failed) { Label label = aspect.getLabel(); String aspectName = aspect.getAspectClass().getName(); outErr.printErr("Aspect " + aspectName + " of " + label + " failed to build\n"); } success = failed.isEmpty(); } if (!success && !request.getOptions(ExecutionOptions.class).verboseFailures) { outErr.printErr("Use --verbose_failures to see the command lines of failed build steps.\n"); } } private boolean shouldPrint(Artifact artifact) { return !artifact.isSourceArtifact() && !artifact.isMiddlemanArtifact(); } private String formatArtifactForShowResults(PathPrettyPrinter prettyPrinter, Artifact artifact) { return " " + prettyPrinter.getPrettyPath(artifact.getPath().asFragment()); } /** * Prints a flat list of all artifacts built by the passed top-level targets. * * <p>This corresponds to the --experimental_show_artifacts flag. */ void showArtifacts( BuildRequest request, Collection<ConfiguredTarget> configuredTargets, Collection<ConfiguredAspect> aspects) { TopLevelArtifactContext context = request.getTopLevelArtifactContext(); Collection<ConfiguredTarget> targetsToPrint = filterTargetsToPrint(configuredTargets); NestedSetBuilder<Artifact> artifactsBuilder = NestedSetBuilder.stableOrder(); targetsToPrint.forEach( t -> artifactsBuilder.addTransitive( TopLevelArtifactHelper.getAllArtifactsToBuild(t, context).getImportantArtifacts())); aspects.forEach( a -> artifactsBuilder.addTransitive( TopLevelArtifactHelper.getAllArtifactsToBuild(a, context).getImportantArtifacts())); OutErr outErr = request.getOutErr(); outErr.printErrLn("Build artifacts:"); NestedSet<Artifact> artifacts = artifactsBuilder.build(); for (Artifact artifact : artifacts.toList()) { if (!artifact.isSourceArtifact()) { outErr.printErrLn(">>>" + artifact.getPath()); } } } /** * Returns a list of configured targets that should participate in printing. * * <p>Hidden rules and other inserted targets are ignored. */ private Collection<ConfiguredTarget> filterTargetsToPrint( Collection<ConfiguredTarget> configuredTargets) { ImmutableList.Builder<ConfiguredTarget> result = ImmutableList.builder(); for (ConfiguredTarget configuredTarget : configuredTargets) { // TODO(bazel-team): this is quite ugly. Add a marker provider for this check. if (configuredTarget instanceof InputFileConfiguredTarget) { // Suppress display of source files (because we do no work to build them). continue; } if (configuredTarget instanceof RuleConfiguredTarget) { RuleConfiguredTarget ruleCt = (RuleConfiguredTarget) configuredTarget; if (ruleCt.getRuleClassString().contains("$")) { // Suppress display of hidden rules continue; } } if (configuredTarget instanceof OutputFileConfiguredTarget) { // Suppress display of generated files (because they appear underneath // their generating rule), EXCEPT those ones which are not part of the // filesToBuild of their generating rule (e.g. .par, _deploy.jar // files), OR when a user explicitly requests an output file but not // its rule. TransitiveInfoCollection generatingRule = ((OutputFileConfiguredTarget) configuredTarget).getGeneratingRule(); if (generatingRule .getProvider(FileProvider.class) .getFilesToBuild() .toSet() .containsAll( configuredTarget.getProvider(FileProvider.class).getFilesToBuild().toList()) && configuredTargets.contains(generatingRule)) { continue; } } result.add(configuredTarget); } return result.build(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.ml.nn; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.util.Map; import java.util.Spliterator; import org.apache.ignite.lang.IgniteUuid; import org.apache.ignite.ml.math.Matrix; import org.apache.ignite.ml.math.MatrixStorage; import org.apache.ignite.ml.math.Vector; import org.apache.ignite.ml.math.exceptions.CardinalityException; import org.apache.ignite.ml.math.functions.IgniteBiConsumer; import org.apache.ignite.ml.math.functions.IgniteBiFunction; import org.apache.ignite.ml.math.functions.IgniteDoubleFunction; import org.apache.ignite.ml.math.functions.IgniteFunction; import org.apache.ignite.ml.math.functions.IgniteTriFunction; import org.apache.ignite.ml.math.functions.IntIntToDoubleFunction; import org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix; /** * Convenient way to create matrix of replicated columns or rows from vector. * This class should be considered as utility class: not all matrix methods are implemented here, only those which * were necessary for MLPs. */ class ReplicatedVectorMatrix implements Matrix { /** * Vector to replicate. */ private Vector vector; /** * Flag determining is vector replicated as column or row. */ private boolean asCol; /** * Count of vector replications. */ private int replicationCnt; /** * Construct ReplicatedVectorMatrix. * * @param vector Vector to replicate. * @param replicationCnt Count of replications. * @param asCol Should vector be replicated as a column or as a row. */ ReplicatedVectorMatrix(Vector vector, int replicationCnt, boolean asCol) { this.vector = vector; this.asCol = asCol; this.replicationCnt = replicationCnt; } /** * Constructor for externalization. */ public ReplicatedVectorMatrix() { // No-op. } /** {@inheritDoc} */ @Override public boolean isSequentialAccess() { return vector.isSequentialAccess(); } /** {@inheritDoc} */ @Override public boolean isRandomAccess() { return vector.isRandomAccess(); } /** {@inheritDoc} */ @Override public boolean isDense() { return vector.isDense(); } /** {@inheritDoc} */ @Override public boolean isArrayBased() { return vector.isArrayBased(); } /** {@inheritDoc} */ @Override public boolean isDistributed() { return vector.isDistributed(); } /** {@inheritDoc} */ @Override public double maxValue() { return vector.maxValue(); } /** {@inheritDoc} */ @Override public double minValue() { return vector.minValue(); } /** {@inheritDoc} */ @Override public Element maxElement() { return new Element() { @Override public double get() { return vector.maxElement().get(); } @Override public int row() { return asCol ? vector.maxElement().index() : 0; } @Override public int column() { return asCol ? 0 : vector.maxElement().index(); } @Override public void set(double val) { } }; } /** {@inheritDoc} */ @Override public Element minElement() { return new Element() { @Override public double get() { return vector.minElement().get(); } @Override public int row() { return asCol ? vector.minElement().index() : 0; } @Override public int column() { return asCol ? 0 : vector.minElement().index(); } @Override public void set(double val) { } }; } /** {@inheritDoc} */ @Override public Element getElement(int row, int col) { Vector.Element el = asCol ? vector.getElement(row) : vector.getElement(col); int r = asCol ? el.index() : 0; int c = asCol ? 0 : el.index(); return new Element() { @Override public double get() { return el.get(); } @Override public int row() { return r; } @Override public int column() { return c; } @Override public void set(double val) { } }; } /** {@inheritDoc} */ @Override public Matrix swapRows(int row1, int row2) { return asCol ? new ReplicatedVectorMatrix(swap(row1, row2), replicationCnt, asCol) : this; } /** {@inheritDoc} */ private Vector swap(int idx1, int idx2) { double val = vector.getX(idx1); vector.setX(idx1, vector.getX(idx2)); vector.setX(idx2, val); return vector; } /** {@inheritDoc} */ @Override public Matrix swapColumns(int col1, int col2) { return asCol ? this : new ReplicatedVectorMatrix(swap(col1, col2), replicationCnt, asCol); } /** {@inheritDoc} */ @Override public Matrix assign(double val) { return new ReplicatedVectorMatrix(vector.assign(val), replicationCnt, asCol); } /** {@inheritDoc} */ @Override public Matrix assign(double[][] vals) { return new DenseLocalOnHeapMatrix(vals); } /** {@inheritDoc} */ @Override public Matrix assign(Matrix mtx) { return mtx.copy(); } /** {@inheritDoc} */ @Override public Matrix assign(IntIntToDoubleFunction fun) { Vector vec = asCol ? this.vector.assign(idx -> fun.apply(idx, 0)) : this.vector.assign(idx -> fun.apply(0, idx)); return new ReplicatedVectorMatrix(vec, replicationCnt, asCol); } /** {@inheritDoc} */ @Override public Matrix map(IgniteDoubleFunction<Double> fun) { Vector vec = vector.map(fun); return new ReplicatedVectorMatrix(vec, replicationCnt, asCol); } /** {@inheritDoc} */ @Override public Matrix map(Matrix mtx, IgniteBiFunction<Double, Double, Double> fun) { throw new UnsupportedOperationException(); } /** {@inheritDoc} */ @Override public int nonZeroElements() { return vector.nonZeroElements() * (asCol ? columnSize() : rowSize()); } /** {@inheritDoc} */ @Override public Spliterator<Double> allSpliterator() { throw new UnsupportedOperationException(); } /** {@inheritDoc} */ @Override public Spliterator<Double> nonZeroSpliterator() { throw new UnsupportedOperationException(); } /** {@inheritDoc} */ @Override public Matrix assignColumn(int col, Vector vec) { int rows = asCol ? vector.size() : replicationCnt; int cols = asCol ? replicationCnt : vector.size(); int times = asCol ? cols : rows; Matrix res = new DenseLocalOnHeapMatrix(rows, cols); IgniteBiConsumer<Integer, Vector> replicantAssigner = asCol ? res::assignColumn : res::assignRow; IgniteBiConsumer<Integer, Vector> assigner = res::assignColumn; assign(replicantAssigner, assigner, vector, vec, times, col); return res; } /** {@inheritDoc} */ @Override public Matrix assignRow(int row, Vector vec) { int rows = asCol ? vector.size() : replicationCnt; int cols = asCol ? replicationCnt : vector.size(); int times = asCol ? cols : rows; Matrix res = new DenseLocalOnHeapMatrix(rows, cols); IgniteBiConsumer<Integer, Vector> replicantAssigner = asCol ? res::assignColumn : res::assignRow; IgniteBiConsumer<Integer, Vector> assigner = res::assignRow; assign(replicantAssigner, assigner, vector, vec, times, row); return res; } /** */ private void assign(IgniteBiConsumer<Integer, Vector> replicantAssigner, IgniteBiConsumer<Integer, Vector> assigner, Vector replicant, Vector vector, int times, int idx) { for (int i = 0; i < times; i++) replicantAssigner.accept(i, replicant); assigner.accept(idx, vector); } /** {@inheritDoc} */ @Override public Vector foldRows(IgniteFunction<Vector, Double> fun) { throw new UnsupportedOperationException(); } /** {@inheritDoc} */ @Override public Vector foldColumns(IgniteFunction<Vector, Double> fun) { throw new UnsupportedOperationException(); } /** {@inheritDoc} */ @Override public <T> T foldMap(IgniteBiFunction<T, Double, T> foldFun, IgniteDoubleFunction<Double> mapFun, T zeroVal) { throw new UnsupportedOperationException(); } /** {@inheritDoc} */ @Override public boolean density(double threshold) { return false; } /** {@inheritDoc} */ @Override public int columnSize() { return asCol ? replicationCnt : vector.size(); } /** {@inheritDoc} */ @Override public int rowSize() { return asCol ? vector.size() : replicationCnt; } /** {@inheritDoc} */ @Override public Matrix divide(double x) { return new ReplicatedVectorMatrix(vector.divide(x), replicationCnt, asCol); } /** {@inheritDoc} */ @Override public double get(int row, int col) { return asCol ? vector.get(row) : vector.get(col); } /** {@inheritDoc} */ @Override public double getX(int row, int col) { return asCol ? vector.getX(row) : vector.getX(col); } /** {@inheritDoc} */ @Override public MatrixStorage getStorage() { throw new UnsupportedOperationException(); } /** {@inheritDoc} */ @Override public Matrix copy() { Vector cp = vector.copy(); return new ReplicatedVectorMatrix(cp, replicationCnt, asCol); } /** {@inheritDoc} */ @Override public Matrix like(int rows, int cols) { Vector lk = vector.like(vector.size()); return new ReplicatedVectorMatrix(lk, replicationCnt, asCol); } /** {@inheritDoc} */ @Override public Vector likeVector(int crd) { throw new UnsupportedOperationException(); } /** {@inheritDoc} */ @Override public Matrix minus(Matrix mtx) { throw new UnsupportedOperationException(); } /** * Specialized optimized version of minus for ReplicatedVectorMatrix. * * @param mtx Matrix to be subtracted. * @return new ReplicatedVectorMatrix resulting from subtraction. */ public Matrix minus(ReplicatedVectorMatrix mtx) { if (isColumnReplicated() == mtx.isColumnReplicated()) { checkCardinality(mtx.rowSize(), mtx.columnSize()); Vector minus = vector.minus(mtx.replicant()); return new ReplicatedVectorMatrix(minus, replicationCnt, asCol); } throw new UnsupportedOperationException(); } /** {@inheritDoc} */ @Override public Matrix plus(double x) { throw new UnsupportedOperationException(); } /** {@inheritDoc} */ @Override public Matrix plus(Matrix mtx) { throw new UnsupportedOperationException(); } /** * Specialized optimized version of plus for ReplicatedVectorMatrix. * * @param mtx Matrix to be added. * @return new ReplicatedVectorMatrix resulting from addition. */ public Matrix plus(ReplicatedVectorMatrix mtx) { if (isColumnReplicated() == mtx.isColumnReplicated()) { checkCardinality(mtx.rowSize(), mtx.columnSize()); Vector plus = vector.plus(mtx.replicant()); return new ReplicatedVectorMatrix(plus, replicationCnt, asCol); } throw new UnsupportedOperationException(); } /** * Checks that dimensions of this matrix are equal to given dimensions. * * @param rows Rows. * @param cols Columns. */ private void checkCardinality(int rows, int cols) { if (rows != rowSize()) throw new CardinalityException(rowSize(), rows); if (cols != columnSize()) throw new CardinalityException(columnSize(), cols); } /** {@inheritDoc} */ @Override public IgniteUuid guid() { return null; } /** {@inheritDoc} */ @Override public Matrix set(int row, int col, double val) { vector.set(asCol ? row : col, val); return this; } /** {@inheritDoc} */ @Override public Matrix setRow(int row, double[] data) { return null; } /** {@inheritDoc} */ @Override public Vector getRow(int row) { return null; } /** {@inheritDoc} */ @Override public Matrix setColumn(int col, double[] data) { return null; } /** {@inheritDoc} */ @Override public Vector getCol(int col) { return null; } /** {@inheritDoc} */ @Override public Matrix setX(int row, int col, double val) { return null; } /** {@inheritDoc} */ @Override public Matrix times(double x) { return new ReplicatedVectorMatrix(vector.times(x), replicationCnt, asCol); } /** {@inheritDoc} */ @Override public Matrix times(Matrix mtx) { if (!asCol) { Vector row = vector.like(mtx.columnSize()); for (int i = 0; i < mtx.columnSize(); i++) row.setX(i, vector.dot(mtx.getCol(i))); return new ReplicatedVectorMatrix(row, replicationCnt, false); } else throw new UnsupportedOperationException(); } /** {@inheritDoc} */ @Override public Vector times(Vector vec) { Vector res = vec.like(vec.size()); if (asCol) { for (int i = 0; i < rowSize(); i++) res.setX(i, vec.sum() * vector.getX(i)); } else { double val = vector.dot(vec); for (int i = 0; i < rowSize(); i++) res.setX(i, val); } return res; } /** {@inheritDoc} */ @Override public double maxAbsRowSumNorm() { return 0; } /** {@inheritDoc} */ @Override public double sum() { return vector.sum() * replicationCnt; } /** {@inheritDoc} */ @Override public Matrix transpose() { return new ReplicatedVectorMatrix(vector, replicationCnt, !asCol); } /** {@inheritDoc} */ @Override public Matrix viewPart(int[] off, int[] size) { return null; } /** {@inheritDoc} */ @Override public Matrix viewPart(int rowOff, int rows, int colOff, int cols) { return null; } /** {@inheritDoc} */ @Override public Vector viewRow(int row) { return null; } /** {@inheritDoc} */ @Override public Vector viewColumn(int col) { return null; } /** {@inheritDoc} */ @Override public Vector viewDiagonal() { return null; } /** {@inheritDoc} */ @Override public void compute(int row, int col, IgniteTriFunction<Integer, Integer, Double, Double> f) { // This operation cannot be performed because computing function depends on both indexes and therefore // result of compute will be in general case not ReplicatedVectorMatrix. throw new UnsupportedOperationException(); } /** {@inheritDoc} */ @Override public void writeExternal(ObjectOutput out) throws IOException { throw new UnsupportedOperationException(); } /** {@inheritDoc} */ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { throw new UnsupportedOperationException(); } /** {@inheritDoc} */ @Override public Map<String, Object> getMetaStorage() { return null; } /** * Returns true if matrix constructed by replicating vector as column and false otherwise. */ public boolean isColumnReplicated() { return asCol; } /** * Returns replicated vector. */ public Vector replicant() { return vector; } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.projectRoots; import com.intellij.execution.CantRunException; import com.intellij.execution.CommandLineWrapperUtil; import com.intellij.execution.ExecutionBundle; import com.intellij.execution.Platform; import com.intellij.execution.configurations.GeneralCommandLine; import com.intellij.execution.configurations.GeneralCommandLine.ParentEnvironmentType; import com.intellij.execution.configurations.ParametersList; import com.intellij.execution.configurations.SimpleJavaParameters; import com.intellij.execution.target.TargetEnvironmentConfiguration; import com.intellij.execution.target.TargetEnvironmentRequest; import com.intellij.execution.target.TargetedCommandLineBuilder; import com.intellij.execution.target.java.JavaLanguageRuntimeConfiguration; import com.intellij.execution.target.local.LocalTargetEnvironment; import com.intellij.execution.target.local.LocalTargetEnvironmentFactory; import com.intellij.execution.target.local.LocalTargetEnvironmentRequest; import com.intellij.execution.target.value.TargetValue; import com.intellij.ide.util.PropertiesComponent; import com.intellij.openapi.application.ApplicationNamesInfo; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.progress.EmptyProgressIndicator; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.OrderRootType; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.io.FileUtilRt; import com.intellij.openapi.util.io.JarUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.util.text.StringUtilRt; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.encoding.EncodingManager; import com.intellij.util.ObjectUtils; import com.intellij.util.PathUtil; import com.intellij.util.PathsList; import com.intellij.util.SystemProperties; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.execution.ParametersListUtil; import com.intellij.util.lang.JavaVersion; import com.intellij.util.lang.UrlClassLoader; import gnu.trove.THashMap; import org.jetbrains.annotations.ApiStatus; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.concurrency.Promise; import org.jetbrains.concurrency.Promises; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; import java.nio.charset.IllegalCharsetNameException; import java.nio.charset.StandardCharsets; import java.nio.charset.UnsupportedCharsetException; import java.util.*; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeoutException; import java.util.jar.Attributes; import java.util.jar.Manifest; public final class JdkUtil { public static final Key<Map<String, String>> COMMAND_LINE_CONTENT = Key.create("command.line.content"); /** * The VM property is needed to workaround incorrect escaped URLs handling in WebSphere, * see <a href="https://youtrack.jetbrains.com/issue/IDEA-126859#comment=27-778948">IDEA-126859</a> for additional details */ public static final String PROPERTY_DO_NOT_ESCAPE_CLASSPATH_URL = "idea.do.not.escape.classpath.url"; private static final String WRAPPER_CLASS = "com.intellij.rt.execution.CommandLineWrapper"; private static final String JAVAAGENT = "-javaagent"; private static final Logger LOG = Logger.getInstance(JdkUtil.class); private JdkUtil() { } /** * Returns the specified attribute of the JDK (examines 'rt.jar'), or {@code null} if cannot determine the value. */ public static @Nullable String getJdkMainAttribute(@NotNull Sdk jdk, @NotNull Attributes.Name attribute) { if (attribute == Attributes.Name.IMPLEMENTATION_VERSION) { // optimization: JDK version string is cached String versionString = jdk.getVersionString(); if (versionString != null) { int start = versionString.indexOf('"'), end = versionString.lastIndexOf('"'); if (start >= 0 && end > start) { return versionString.substring(start + 1, end); } } } String homePath = jdk.getHomePath(); if (homePath != null) { File signatureJar = FileUtil.findFirstThatExist( homePath + "/jre/lib/rt.jar", homePath + "/lib/rt.jar", homePath + "/lib/jrt-fs.jar", homePath + "/jre/lib/vm.jar", homePath + "/../Classes/classes.jar"); if (signatureJar != null) { return JarUtil.getJarAttribute(signatureJar, attribute); } } return null; } public static @Nullable String suggestJdkName(@Nullable String versionString) { JavaVersion version = JavaVersion.tryParse(versionString); if (version == null) return null; StringBuilder suggested = new StringBuilder(); if (version.feature < 9) suggested.append("1."); suggested.append(version.feature); if (version.ea) suggested.append("-ea"); return suggested.toString(); } public static boolean checkForJdk(@NotNull String homePath) { return checkForJdk(new File(FileUtil.toSystemDependentName(homePath))); } public static boolean checkForJdk(@NotNull File homePath) { return (new File(homePath, "bin/javac").isFile() || new File(homePath, "bin/javac.exe").isFile()) && checkForRuntime(homePath.getAbsolutePath()); } public static boolean checkForJre(@NotNull String homePath) { return checkForJre(new File(FileUtil.toSystemDependentName(homePath))); } public static boolean checkForJre(@NotNull File homePath) { return new File(homePath, "bin/java").isFile() || new File(homePath, "bin/java.exe").isFile(); } public static boolean checkForRuntime(@NotNull String homePath) { return new File(homePath, "jre/lib/rt.jar").exists() || // JDK new File(homePath, "lib/rt.jar").exists() || // JRE isModularRuntime(homePath) || // Jigsaw JDK/JRE new File(homePath, "../Classes/classes.jar").exists() || // Apple JDK new File(homePath, "jre/lib/vm.jar").exists() || // IBM JDK new File(homePath, "classes").isDirectory(); // custom build } public static boolean isModularRuntime(@NotNull String homePath) { return isModularRuntime(new File(FileUtil.toSystemDependentName(homePath))); } public static boolean isModularRuntime(@NotNull File homePath) { return new File(homePath, "lib/jrt-fs.jar").isFile() || isExplodedModularRuntime(homePath.getPath()); } public static boolean isExplodedModularRuntime(@NotNull String homePath) { return new File(homePath, "modules/java.base").isDirectory(); } @ApiStatus.Internal @NotNull public static TargetedCommandLineBuilder setupJVMCommandLine(@NotNull SimpleJavaParameters javaParameters, @NotNull TargetEnvironmentRequest request, @Nullable TargetEnvironmentConfiguration targetConfiguration) throws CantRunException { TargetedCommandLineBuilder commandLine = new TargetedCommandLineBuilder(); JavaLanguageRuntimeConfiguration javaConfiguration = targetConfiguration != null ? targetConfiguration.getRuntimes().findByType(JavaLanguageRuntimeConfiguration.class) : null; if (request instanceof LocalTargetEnvironmentRequest || targetConfiguration == null) { Sdk jdk = javaParameters.getJdk(); if (jdk == null) throw new CantRunException(ExecutionBundle.message("run.configuration.error.no.jdk.specified")); SdkTypeId type = jdk.getSdkType(); if (!(type instanceof JavaSdkType)) throw new CantRunException(ExecutionBundle.message("run.configuration.error.no.jdk.specified")); String exePath = ((JavaSdkType)type).getVMExecutablePath(jdk); if (exePath == null) throw new CantRunException(ExecutionBundle.message("run.configuration.cannot.find.vm.executable")); commandLine.setExePath(exePath); } else { if (javaConfiguration == null) { throw new CantRunException("Cannot find Java configuration in " + targetConfiguration.getDisplayName() + " target"); } Platform platform = request.getTargetPlatform().getPlatform(); String java = platform == Platform.WINDOWS ? "java.exe" : "java"; commandLine.setExePath(StringUtil.join(new String[]{javaConfiguration.getHomePath(), "bin", java}, String.valueOf(platform.fileSeparator))); } setupCommandLine(commandLine, request, javaParameters, javaConfiguration); return commandLine; } @NotNull public static GeneralCommandLine setupJVMCommandLine(@NotNull SimpleJavaParameters javaParameters) throws CantRunException { LocalTargetEnvironmentFactory environmentFactory = new LocalTargetEnvironmentFactory(); TargetEnvironmentRequest request = environmentFactory.createRequest(); return environmentFactory.prepareRemoteEnvironment(request, new EmptyProgressIndicator()) .createGeneralCommandLine(setupJVMCommandLine(javaParameters, request, null).build()); } private static void setupCommandLine(@NotNull TargetedCommandLineBuilder commandLine, @NotNull TargetEnvironmentRequest request, @NotNull SimpleJavaParameters javaParameters, @Nullable JavaLanguageRuntimeConfiguration runtimeConfiguration) throws CantRunException { String workingDirectory = javaParameters.getWorkingDirectory(); if (workingDirectory != null) { commandLine.setWorkingDirectory(request.createUpload(workingDirectory)); } javaParameters.getEnv().forEach((key, value) -> commandLine.addEnvironmentVariable(key, value)); if (request instanceof LocalTargetEnvironmentRequest) { ParentEnvironmentType type = javaParameters.isPassParentEnvs() ? ParentEnvironmentType.CONSOLE : ParentEnvironmentType.NONE; ((LocalTargetEnvironmentRequest)request).setParentEnvironmentType(type); } ParametersList vmParameters = javaParameters.getVMParametersList(); boolean dynamicClasspath = javaParameters.isDynamicClasspath(); boolean dynamicVMOptions = dynamicClasspath && javaParameters.isDynamicVMOptions() && useDynamicVMOptions(); boolean dynamicParameters = dynamicClasspath && javaParameters.isDynamicParameters() && useDynamicParameters(); boolean dynamicMainClass = false; // copies 'javaagent' .jar files to the beginning of the classpath to load agent classes faster if (isUrlClassloader(vmParameters)) { if (!(request instanceof LocalTargetEnvironmentRequest)) { throw new CantRunException("Cannot run application with UrlClassPath on the remote target."); } for (String parameter : vmParameters.getParameters()) { if (parameter.startsWith(JAVAAGENT)) { int agentArgsIdx = parameter.indexOf("=", JAVAAGENT.length()); javaParameters.getClassPath() .addFirst(parameter.substring(JAVAAGENT.length() + 1, agentArgsIdx > -1 ? agentArgsIdx : parameter.length())); } } } if (dynamicClasspath) { Charset cs = StandardCharsets.UTF_8; // todo detect JNU charset from VM options? Class<?> commandLineWrapper; if (javaParameters.isArgFile()) { setArgFileParams(commandLine, request, runtimeConfiguration, javaParameters, vmParameters, dynamicVMOptions, dynamicParameters, cs); dynamicMainClass = dynamicParameters; } else if (!explicitClassPath(vmParameters) && javaParameters.getJarPath() == null && (commandLineWrapper = getCommandLineWrapperClass()) != null) { if (javaParameters.isUseClasspathJar()) { setClasspathJarParams(commandLine, request, runtimeConfiguration, javaParameters, vmParameters, commandLineWrapper, dynamicVMOptions, dynamicParameters); } else if (javaParameters.isClasspathFile()) { setCommandLineWrapperParams(commandLine, request, runtimeConfiguration, javaParameters, vmParameters, commandLineWrapper, dynamicVMOptions, dynamicParameters, cs); } } else { dynamicClasspath = dynamicParameters = false; } } if (!dynamicClasspath) { appendParamsEncodingClasspath(commandLine, request, runtimeConfiguration, javaParameters, vmParameters); } if (!dynamicMainClass) { for (TargetValue<String> parameter : getMainClassParams(javaParameters, request)) { commandLine.addParameter(parameter); } } if (!dynamicParameters) { for (String parameter : javaParameters.getProgramParametersList().getList()) { commandLine.addParameter(parameter); } } } private static void setupCommandLine(@NotNull GeneralCommandLine commandLine, @NotNull SimpleJavaParameters javaParameters) throws CantRunException { TargetedCommandLineBuilder targetedCommandLineBuilder = new TargetedCommandLineBuilder(); LocalTargetEnvironmentFactory environmentFactory = new LocalTargetEnvironmentFactory(); TargetEnvironmentRequest request = environmentFactory.createRequest(); setupCommandLine(targetedCommandLineBuilder, request, javaParameters, null); LocalTargetEnvironment environment = environmentFactory.prepareRemoteEnvironment(request, new EmptyProgressIndicator()); GeneralCommandLine generalCommandLine = environment.createGeneralCommandLine(targetedCommandLineBuilder.build()); commandLine.withParentEnvironmentType(javaParameters.isPassParentEnvs() ? ParentEnvironmentType.CONSOLE : ParentEnvironmentType.NONE); commandLine.getParametersList().addAll(generalCommandLine.getParametersList().getList()); commandLine.getEnvironment().putAll(generalCommandLine.getEnvironment()); } private static boolean isUrlClassloader(ParametersList vmParameters) { return UrlClassLoader.class.getName().equals(vmParameters.getPropertyValue("java.system.class.loader")); } private static boolean explicitClassPath(ParametersList vmParameters) { return vmParameters.hasParameter("-cp") || vmParameters.hasParameter("-classpath") || vmParameters.hasParameter("--class-path"); } private static boolean explicitModulePath(ParametersList vmParameters) { return vmParameters.hasParameter("-p") || vmParameters.hasParameter("--module-path"); } private static void setArgFileParams(@NotNull TargetedCommandLineBuilder commandLine, @NotNull TargetEnvironmentRequest request, @Nullable JavaLanguageRuntimeConfiguration runtimeConfiguration, @NotNull SimpleJavaParameters javaParameters, @NotNull ParametersList vmParameters, boolean dynamicVMOptions, boolean dynamicParameters, Charset cs) throws CantRunException { try { Platform platform = request.getTargetPlatform().getPlatform(); String pathSeparator = String.valueOf(platform.pathSeparator); Collection<Promise<String>> promises = new ArrayList<>(); TargetValue<String> classPathParameter; PathsList classPath = javaParameters.getClassPath(); if (!classPath.isEmpty() && !explicitClassPath(vmParameters)) { List<TargetValue<String>> pathValues = getClassPathValues(request, runtimeConfiguration, javaParameters); classPathParameter = TargetValue.composite(pathValues, values -> StringUtil.join(values, pathSeparator)); promises.add(classPathParameter.getTargetValue()); } else { classPathParameter = null; } TargetValue<String> modulePathParameter; PathsList modulePath = javaParameters.getModulePath(); if (!modulePath.isEmpty() && !explicitModulePath(vmParameters)) { List<TargetValue<String>> pathValues = getClassPathValues(request, runtimeConfiguration, javaParameters); modulePathParameter = TargetValue.composite(pathValues, values -> StringUtil.join(values, pathSeparator)); promises.add(modulePathParameter.getTargetValue()); } else { modulePathParameter = null; } List<TargetValue<String>> mainClassParameters = dynamicParameters ? getMainClassParams(javaParameters, request) : Collections.emptyList(); promises.addAll(ContainerUtil.map(mainClassParameters, TargetValue::getTargetValue)); File argFile = FileUtil.createTempFile("idea_arg_file" + new Random().nextInt(Integer.MAX_VALUE), null); commandLine.addFileToDeleteOnTermination(argFile); Promises.collectResults(promises).onSuccess(__ -> { List<String> fileArgs = new ArrayList<>(); if (dynamicVMOptions) { fileArgs.addAll(vmParameters.getList()); } else { appendVmParameters(commandLine, request, vmParameters); } try { if (classPathParameter != null) { fileArgs.add("-classpath"); fileArgs.add(classPathParameter.getTargetValue().blockingGet(0)); } if (modulePathParameter != null) { fileArgs.add("-p"); fileArgs.add(modulePathParameter.getTargetValue().blockingGet(0)); } for (TargetValue<String> mainClassParameter : mainClassParameters) { fileArgs.add(mainClassParameter.getTargetValue().blockingGet(0)); } if (dynamicParameters) { fileArgs.addAll(javaParameters.getProgramParametersList().getList()); } CommandLineWrapperUtil.writeArgumentsFile(argFile, fileArgs, platform.lineSeparator, cs); } catch (IOException e) { //todo[remoteServers]: interrupt preparing environment } catch (ExecutionException | TimeoutException e) { LOG.error("Couldn't resolve target value", e); } }); HashMap<String, String> commandLineContent = new HashMap<>(); commandLine.putUserData(COMMAND_LINE_CONTENT, commandLineContent); appendEncoding(javaParameters, commandLine, vmParameters); TargetValue<String> argFileParameter = request.createUpload(argFile.getAbsolutePath()); commandLine.addParameter(TargetValue.map(argFileParameter, s -> "@" + s)); addCommandLineContentOnResolve(commandLineContent, argFile, argFileParameter); } catch (IOException e) { throwUnableToCreateTempFile(e); } } private static void setCommandLineWrapperParams(@NotNull TargetedCommandLineBuilder commandLine, @NotNull TargetEnvironmentRequest request, @Nullable JavaLanguageRuntimeConfiguration runtimeConfiguration, @NotNull SimpleJavaParameters javaParameters, @NotNull ParametersList vmParameters, @NotNull Class<?> commandLineWrapper, boolean dynamicVMOptions, boolean dynamicParameters, Charset cs) throws CantRunException { try { String lineSeparator = request.getTargetPlatform().getPlatform().lineSeparator; int pseudoUniquePrefix = new Random().nextInt(Integer.MAX_VALUE); File vmParamsFile = null; if (dynamicVMOptions) { List<String> toWrite = new ArrayList<>(); for (String param : vmParameters.getList()) { if (isUserDefinedProperty(param)) { toWrite.add(param); } else { appendVmParameter(commandLine, request, param); } } if (!toWrite.isEmpty()) { vmParamsFile = FileUtil.createTempFile("idea_vm_params" + pseudoUniquePrefix, null); commandLine.addFileToDeleteOnTermination(vmParamsFile); CommandLineWrapperUtil.writeWrapperFile(vmParamsFile, toWrite, lineSeparator, cs); } } else { appendVmParameters(commandLine, request, vmParameters); } appendEncoding(javaParameters, commandLine, vmParameters); File appParamsFile = null; if (dynamicParameters) { appParamsFile = FileUtil.createTempFile("idea_app_params" + pseudoUniquePrefix, null); commandLine.addFileToDeleteOnTermination(appParamsFile); CommandLineWrapperUtil.writeWrapperFile(appParamsFile, javaParameters.getProgramParametersList().getList(), lineSeparator, cs); } File classpathFile = FileUtil.createTempFile("idea_classpath" + pseudoUniquePrefix, null); commandLine.addFileToDeleteOnTermination(classpathFile); Collection<TargetValue<String>> classPathParameters = getClassPathValues(request, runtimeConfiguration, javaParameters); Promises.collectResults(ContainerUtil.map(classPathParameters, TargetValue::getTargetValue)).onSuccess(pathList -> { try { CommandLineWrapperUtil.writeWrapperFile(classpathFile, pathList, lineSeparator, cs); } catch (IOException e) { //todo[remoteServers]: interrupt preparing environment } }); Set<TargetValue<String>> classpath = new LinkedHashSet<>(); classpath.add(request.createUpload(PathUtil.getJarPathForClass(commandLineWrapper))); if (isUrlClassloader(vmParameters)) { if (!(request instanceof LocalTargetEnvironmentRequest)) { throw new CantRunException("Cannot run application with UrlClassPath on the remote target."); } classpath.add(TargetValue.fixed(PathUtil.getJarPathForClass(UrlClassLoader.class))); classpath.add(TargetValue.fixed(PathUtil.getJarPathForClass(StringUtilRt.class))); classpath.add(TargetValue.fixed(PathUtil.getJarPathForClass(THashMap.class))); //explicitly enumerate jdk classes as UrlClassLoader doesn't delegate to parent classloader when loading resources //which leads to exceptions when coverage instrumentation tries to instrument loader class and its dependencies Sdk jdk = javaParameters.getJdk(); if (jdk != null) { for (VirtualFile file : jdk.getRootProvider().getFiles(OrderRootType.CLASSES)) { String path = PathUtil.getLocalPath(file); if (StringUtil.isNotEmpty(path)) { classpath.add(TargetValue.fixed(path)); } } } } commandLine.addParameter("-classpath"); String pathSeparator = String.valueOf(request.getTargetPlatform().getPlatform().pathSeparator); commandLine.addParameter(TargetValue.composite(classpath, values -> StringUtil.join(values, pathSeparator))); commandLine.addParameter(commandLineWrapper.getName()); Map<String, String> commandLineContent = new HashMap<>(); commandLine.putUserData(COMMAND_LINE_CONTENT, commandLineContent); TargetValue<String> classPathParameter = request.createUpload(classpathFile.getAbsolutePath()); commandLine.addParameter(classPathParameter); addCommandLineContentOnResolve(commandLineContent, classpathFile, classPathParameter); if (vmParamsFile != null) { commandLine.addParameter("@vm_params"); TargetValue<String> vmParamsParameter = request.createUpload(vmParamsFile.getAbsolutePath()); commandLine.addParameter(vmParamsParameter); addCommandLineContentOnResolve(commandLineContent, vmParamsFile, vmParamsParameter); } if (appParamsFile != null) { commandLine.addParameter("@app_params"); TargetValue<String> appParamsParameter = request.createUpload(appParamsFile.getAbsolutePath()); commandLine.addParameter(appParamsParameter); addCommandLineContentOnResolve(commandLineContent, appParamsFile, appParamsParameter); } } catch (IOException e) { throwUnableToCreateTempFile(e); } } private static void addCommandLineContentOnResolve(@NotNull Map<String, String> commandLineContent, @NotNull File localFile, @NotNull TargetValue<String> value) { value.getTargetValue().onSuccess(resolved -> { try { commandLineContent.put(resolved, FileUtil.loadFile(localFile)); } catch (IOException e) { LOG.error("Cannot add command line content for value " + resolved, e); } }); } private static void setClasspathJarParams(@NotNull TargetedCommandLineBuilder commandLine, @NotNull TargetEnvironmentRequest request, @Nullable JavaLanguageRuntimeConfiguration runtimeConfiguration, @NotNull SimpleJavaParameters javaParameters, @NotNull ParametersList vmParameters, @NotNull Class<?> commandLineWrapper, boolean dynamicVMOptions, boolean dynamicParameters) throws CantRunException { try { Manifest manifest = new Manifest(); manifest.getMainAttributes().putValue("Created-By", ApplicationNamesInfo.getInstance().getFullProductName()); String manifestText = ""; if (dynamicVMOptions) { List<String> properties = new ArrayList<>(); for (String param : vmParameters.getList()) { if (isUserDefinedProperty(param)) { properties.add(param); } else { appendVmParameter(commandLine, request, param); } } manifest.getMainAttributes().putValue("VM-Options", ParametersListUtil.join(properties)); manifestText += "VM-Options: " + ParametersListUtil.join(properties) + "\n"; } else { appendVmParameters(commandLine, request, vmParameters); } appendEncoding(javaParameters, commandLine, vmParameters); if (dynamicParameters) { manifest.getMainAttributes() .putValue("Program-Parameters", ParametersListUtil.join(javaParameters.getProgramParametersList().getList())); manifestText += "Program-Parameters: " + ParametersListUtil.join(javaParameters.getProgramParametersList().getList()) + "\n"; } String jarFileContentPrefix = manifestText + "Class-Path: "; Map<String, String> commandLineContent = new HashMap<>(); commandLine.putUserData(COMMAND_LINE_CONTENT, commandLineContent); File classpathJarFile = FileUtil.createTempFile(CommandLineWrapperUtil.CLASSPATH_JAR_FILE_NAME_PREFIX + Math.abs(new Random().nextInt()), ".jar", true); commandLine.addFileToDeleteOnTermination(classpathJarFile); String jarFilePath = classpathJarFile.getAbsolutePath(); commandLine.addParameter("-classpath"); if (dynamicVMOptions || dynamicParameters) { char pathSeparator = request.getTargetPlatform().getPlatform().pathSeparator; commandLine.addParameter(request.createUpload(PathUtil.getJarPathForClass(commandLineWrapper) + pathSeparator + jarFilePath)); commandLine.addParameter(request.createUpload(commandLineWrapper.getName())); } TargetValue<String> jarFileValue = request.createUpload(jarFilePath); commandLine.addParameter(jarFileValue); Collection<TargetValue<String>> classPathParameters = getClassPathValues(request, runtimeConfiguration, javaParameters); Promises.collectResults(ContainerUtil.map(classPathParameters, TargetValue::getTargetValue)).onSuccess(targetClassPathParameters -> { try { boolean notEscape = vmParameters.hasParameter(PROPERTY_DO_NOT_ESCAPE_CLASSPATH_URL); StringBuilder classPath = new StringBuilder(); for (TargetValue<String> parameter : classPathParameters) { if (classPath.length() > 0) classPath.append(' '); String localValue = parameter.getLocalValue().blockingGet(0); String targetValue = parameter.getTargetValue().blockingGet(0); if (targetValue == null || localValue == null) { throw new ExecutionException("Couldn't resolve target value", null); } File file = new File(targetValue); String url = (notEscape ? file.toURL() : file.toURI().toURL()).toString(); classPath.append(!StringUtil.endsWithChar(url, '/') && new File(localValue).isDirectory() ? url + "/" : url); } CommandLineWrapperUtil.fillClasspathJarFile(manifest, classPath.toString(), classpathJarFile); jarFileValue.getTargetValue().onSuccess(value -> { commandLineContent.put(value, jarFileContentPrefix + classPath.toString()); }); } catch (IOException | ExecutionException e) { //todo[remoteServers]: interrupt preparing environment } catch (TimeoutException e) { LOG.error("Couldn't resolve target value", e); } }); } catch (IOException e) { throwUnableToCreateTempFile(e); } } @SuppressWarnings("SpellCheckingInspection") private static boolean isUserDefinedProperty(String param) { return param.startsWith("-D") && !(param.startsWith("-Dsun.") || param.startsWith("-Djava.")); } private static void throwUnableToCreateTempFile(IOException cause) throws CantRunException { throw new CantRunException("Failed to create a temporary file in " + FileUtilRt.getTempDirectory(), cause); } private static void appendParamsEncodingClasspath(@NotNull TargetedCommandLineBuilder commandLine, @NotNull TargetEnvironmentRequest request, @Nullable JavaLanguageRuntimeConfiguration runtimeConfiguration, @NotNull SimpleJavaParameters javaParameters, @NotNull ParametersList vmParameters) { appendVmParameters(commandLine, request, vmParameters); appendEncoding(javaParameters, commandLine, vmParameters); PathsList classPath = javaParameters.getClassPath(); if (!classPath.isEmpty() && !explicitClassPath(vmParameters)) { commandLine.addParameter("-classpath"); List<TargetValue<String>> pathValues = getClassPathValues(request, runtimeConfiguration, javaParameters); String pathSeparator = String.valueOf(request.getTargetPlatform().getPlatform().pathSeparator); commandLine.addParameter(TargetValue.composite(pathValues, values -> StringUtil.join(values, pathSeparator))); } PathsList modulePath = javaParameters.getModulePath(); if (!modulePath.isEmpty() && !explicitModulePath(vmParameters)) { commandLine.addParameter("-p"); commandLine.addParameter(modulePath.getPathsString()); } } private static void appendVmParameters(@NotNull TargetedCommandLineBuilder commandLine, @NotNull TargetEnvironmentRequest request, @NotNull ParametersList vmParameters) { for (String vmParameter : vmParameters.getList()) { appendVmParameter(commandLine, request, vmParameter); } } private static void appendVmParameter(@NotNull TargetedCommandLineBuilder commandLine, @NotNull TargetEnvironmentRequest request, @NotNull String vmParameter) { if (request instanceof LocalTargetEnvironmentRequest || SystemProperties.getBooleanProperty("remote.servers.ignore.vm.parameter", false)) { commandLine.addParameter(vmParameter); return; } if (vmParameter.startsWith("-agentpath:")) { appendVmAgentParameter(commandLine, request, vmParameter, "-agentpath:"); } else if (vmParameter.startsWith("-javaagent:")) { appendVmAgentParameter(commandLine, request, vmParameter, "-javaagent:"); } else { commandLine.addParameter(vmParameter); } } private static void appendVmAgentParameter(@NotNull TargetedCommandLineBuilder commandLine, @NotNull TargetEnvironmentRequest request, @NotNull String vmParameter, @NotNull String prefix) { String value = StringUtil.trimStart(vmParameter, prefix); int equalsSign = value.indexOf('='); String path = equalsSign > -1 ? value.substring(0, equalsSign) : value; if (!path.endsWith(".jar")) { // ignore non-cross-platform agents return; } String suffix = equalsSign > -1 ? value.substring(equalsSign) : ""; commandLine.addParameter(TargetValue.map(request.createUpload(path), v -> prefix + v + suffix)); } @NotNull private static List<TargetValue<String>> getClassPathValues(@NotNull TargetEnvironmentRequest request, @Nullable JavaLanguageRuntimeConfiguration runtimeConfiguration, @NotNull SimpleJavaParameters javaParameters) { String localJdkPath = ObjectUtils.doIfNotNull(javaParameters.getJdk(), jdk -> jdk.getHomePath()); String remoteJdkPath = runtimeConfiguration != null ? runtimeConfiguration.getHomePath() : null; ArrayList<TargetValue<String>> result = new ArrayList<>(); for (String path : javaParameters.getClassPath().getPathList()) { if (localJdkPath == null || remoteJdkPath == null || !path.startsWith(localJdkPath)) { result.add(request.createUpload(path)); } else { char separator = request.getTargetPlatform().getPlatform().fileSeparator; result.add(TargetValue.fixed(FileUtil.toCanonicalPath(remoteJdkPath + separator + StringUtil.trimStart(path, localJdkPath), separator))); } } return result; } private static void appendEncoding(@NotNull SimpleJavaParameters javaParameters, @NotNull TargetedCommandLineBuilder commandLine, @NotNull ParametersList parametersList) { // for correct handling of process's input and output, values of file.encoding and charset of CommandLine object should be in sync String encoding = parametersList.getPropertyValue("file.encoding"); if (encoding == null) { Charset charset = javaParameters.getCharset(); if (charset == null) charset = EncodingManager.getInstance().getDefaultCharset(); commandLine.addParameter("-Dfile.encoding=" + charset.name()); commandLine.setCharset(charset); } else { try { commandLine.setCharset(Charset.forName(encoding)); } catch (UnsupportedCharsetException | IllegalCharsetNameException ignore) { } } } private static List<TargetValue<String>> getMainClassParams(SimpleJavaParameters javaParameters, @NotNull TargetEnvironmentRequest request) throws CantRunException { String mainClass = javaParameters.getMainClass(); String moduleName = javaParameters.getModuleName(); String jarPath = javaParameters.getJarPath(); if (mainClass != null && moduleName != null) { return Arrays.asList(TargetValue.fixed("-m"), TargetValue.fixed(moduleName + '/' + mainClass)); } else if (mainClass != null) { return Collections.singletonList(TargetValue.fixed(mainClass)); } else if (jarPath != null) { return Arrays.asList(TargetValue.fixed("-jar"), request.createUpload(jarPath)); } else { throw new CantRunException(ExecutionBundle.message("main.class.is.not.specified.error.message")); } } private static @Nullable Class<?> getCommandLineWrapperClass() { try { return Class.forName(WRAPPER_CLASS); } catch (ClassNotFoundException e) { return null; } } public static boolean useDynamicClasspath(@Nullable Project project) { boolean hasDynamicProperty = Boolean.parseBoolean(System.getProperty("idea.dynamic.classpath", "false")); return project != null ? PropertiesComponent.getInstance(project).getBoolean("dynamic.classpath", hasDynamicProperty) : hasDynamicProperty; } public static boolean useDynamicVMOptions() { return PropertiesComponent.getInstance().getBoolean("idea.dynamic.vmoptions", true); } public static boolean useDynamicParameters() { return PropertiesComponent.getInstance().getBoolean("idea.dynamic.parameters", true); } public static boolean useClasspathJar() { return PropertiesComponent.getInstance().getBoolean("idea.dynamic.classpath.jar", true); } //<editor-fold desc="Deprecated stuff."> /** @deprecated use {@link SimpleJavaParameters#toCommandLine()} */ @ApiStatus.ScheduledForRemoval(inVersion = "2021.1") @Deprecated public static GeneralCommandLine setupJVMCommandLine(String exePath, SimpleJavaParameters javaParameters, boolean forceDynamicClasspath) { try { javaParameters.setUseDynamicClasspath(forceDynamicClasspath); GeneralCommandLine commandLine = new GeneralCommandLine(exePath); setupCommandLine(commandLine, javaParameters); return commandLine; } catch (CantRunException e) { throw new RuntimeException(e); } } //</editor-fold> }
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.transcoder; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; import java.net.MalformedURLException; import java.net.URL; import java.net.URLConnection; import org.apache.batik.dom.svg.SVGDOMImplementation; import org.apache.batik.svggen.SVGGraphics2D; import org.apache.batik.transcoder.keys.BooleanKey; import org.apache.batik.transcoder.keys.FloatKey; import org.apache.batik.transcoder.keys.IntegerKey; import org.apache.batik.util.Platform; import org.apache.batik.util.SVGConstants; import org.xml.sax.XMLFilter; import org.w3c.dom.DOMImplementation; import org.w3c.dom.Document; import org.w3c.dom.Element; /** This class allows to simplify the creation of a transcoder which transcodes to * SVG content. * <p>To use this class, you just have to implement the <i>transcode</i> method of * the <i>AbstractTranscoder</i> class : * <ul> * <li>first get the associated Document from the <i>TranscoderOutput</i> : * {@link #createDocument(TranscoderOutput)}, then create a new * {@link org.apache.batik.svggen.SVGGraphics2D} with this Document</li> * <pre> * Document doc = this.createDocument(output); * svgGenerator = new SVGGraphics2D(doc); * </pre> * <li>Perform the effective transcoding, using the * {@link org.apache.batik.svggen.SVGGraphics2D} previously created</li> * <li>then call the * {@link #writeSVGToOutput(SVGGraphics2D, Element, TranscoderOutput)} to create the * effective output file (if the output is set to be a File or URI)</li> * <pre> * Element svgRoot = svgGenerator.getRoot(); * writeSVGToOutput(svgGenerator, svgRoot, output); * </pre> * </ul> * </p> * * <p>Several transcoding hints are defined for this abstract transcoder, but no default * implementation is provided. Subclasses must implement which keys are relevant to them :</p> * <ul> * <li>KEY_INPUT_WIDTH, KEY_INPUT_HEIGHT, KEY_XOFFSET, KEY_YOFFSET : this Integer keys allows to * set the portion of the image to transcode, defined by the width, height, and offset * of this portion in Metafile units. * <li>KEY_ESCAPED : this Boolean ley allow to escape XML characters in the output</li> * </ul> * <pre> * transcoder.addTranscodingHint(ToSVGAbstractTranscoder.KEY_INPUT_WIDTH, new Integer(input_width)); * </pre> * </li> * <li>KEY_WIDTH, KEY_HEIGHT : this Float values allows to force the width and height of the output: * </ul> * <pre> * transcoder.addTranscodingHint(ToSVGAbstractTranscoder.KEY_WIDTH, new Float(width)); * </pre> * </li> * </li> * </ul> * * @version $Id$ */ public abstract class ToSVGAbstractTranscoder extends AbstractTranscoder implements SVGConstants { public static float PIXEL_TO_MILLIMETERS; public static float PIXEL_PER_INCH; static { PIXEL_TO_MILLIMETERS = 25.4f / (float)Platform.getScreenResolution(); PIXEL_PER_INCH = Platform.getScreenResolution(); } public static final int TRANSCODER_ERROR_BASE = 0xff00; public static final int ERROR_NULL_INPUT = TRANSCODER_ERROR_BASE + 0; public static final int ERROR_INCOMPATIBLE_INPUT_TYPE = TRANSCODER_ERROR_BASE + 1; public static final int ERROR_INCOMPATIBLE_OUTPUT_TYPE = TRANSCODER_ERROR_BASE + 2; /* Keys definition : width value for the output (in pixels). */ public static final TranscodingHints.Key KEY_WIDTH = new FloatKey(); /* Keys definition : height value for the output (in pixels). */ public static final TranscodingHints.Key KEY_HEIGHT = new FloatKey(); /* Keys definition : width value for the input (in pixels). */ public static final TranscodingHints.Key KEY_INPUT_WIDTH = new IntegerKey(); /* Keys definition : height value for the input (in pixels). */ public static final TranscodingHints.Key KEY_INPUT_HEIGHT = new IntegerKey(); /* Keys definition : x offset value for the output (in pixels). */ public static final TranscodingHints.Key KEY_XOFFSET = new IntegerKey(); /* Keys definition : y offset value for the output (in pixels). */ public static final TranscodingHints.Key KEY_YOFFSET = new IntegerKey(); /* Keys definition : Define if the characters will be escaped in the output. */ public static final TranscodingHints.Key KEY_ESCAPED = new BooleanKey(); protected SVGGraphics2D svgGenerator; /** Create an empty Document from a TranscoderOutput. * <ul> * <li>If the TranscoderOutput already contains an empty Document : returns this * Document</li> * <li>else create a new empty DOM Document</li> * </ul> */ protected Document createDocument(TranscoderOutput output) { // Use SVGGraphics2D to generate SVG content Document doc; if (output.getDocument() == null) { DOMImplementation domImpl = SVGDOMImplementation.getDOMImplementation(); doc = domImpl.createDocument(SVG_NAMESPACE_URI, SVG_SVG_TAG, null); } else { doc = output.getDocument(); } return doc; } /** Get the {@link org.apache.batik.svggen.SVGGraphics2D} associated * with this transcoder. */ public SVGGraphics2D getGraphics2D() { return svgGenerator; } /** Writes the SVG content held by the svgGenerator to the * <code>TranscoderOutput</code>. This method does nothing if the output already * contains a Document. */ protected void writeSVGToOutput(SVGGraphics2D svgGenerator, Element svgRoot, TranscoderOutput output) throws TranscoderException { Document doc = output.getDocument(); if (doc != null) return; // XMLFilter XMLFilter xmlFilter = output.getXMLFilter(); if (xmlFilter != null) { handler.fatalError(new TranscoderException("" + ERROR_INCOMPATIBLE_OUTPUT_TYPE)); } try { boolean escaped = false; if (hints.containsKey(KEY_ESCAPED)) { escaped = ((Boolean)hints.get(KEY_ESCAPED)).booleanValue(); } // Output stream OutputStream os = output.getOutputStream(); if (os != null) { svgGenerator.stream(svgRoot, new OutputStreamWriter(os), false, escaped); return; } // Writer Writer wr = output.getWriter(); if (wr != null) { svgGenerator.stream(svgRoot, wr, false, escaped); return; } // URI String uri = output.getURI(); if ( uri != null ){ try{ URL url = new URL(uri); URLConnection urlCnx = url.openConnection(); os = urlCnx.getOutputStream(); svgGenerator.stream(svgRoot, new OutputStreamWriter(os), false, escaped); return; } catch (MalformedURLException e){ handler.fatalError(new TranscoderException(e)); } catch (IOException e){ handler.fatalError(new TranscoderException(e)); } } } catch(IOException e){ throw new TranscoderException(e); } throw new TranscoderException("" + ERROR_INCOMPATIBLE_OUTPUT_TYPE); } }
package com.jadebyte.gitlagos.fragments; import android.content.Context; import android.graphics.PorterDuff; import android.os.Bundle; import android.os.Parcelable; import android.support.v4.app.Fragment; import android.support.v4.content.ContextCompat; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.ProgressBar; import android.widget.TextView; import com.android.volley.Cache; import com.android.volley.DefaultRetryPolicy; import com.android.volley.NetworkResponse; import com.android.volley.Response; import com.android.volley.RetryPolicy; import com.android.volley.VolleyError; import com.android.volley.toolbox.JsonObjectRequest; import com.jadebyte.gitlagos.R; import com.jadebyte.gitlagos.adapters.UserAdapter; import com.jadebyte.gitlagos.listeners.EndlessRecyclerViewScrollListener; import com.jadebyte.gitlagos.listeners.UserClickedListener; import com.jadebyte.gitlagos.pojos.UserItem; import com.jadebyte.gitlagos.utils.Constants; import com.jadebyte.gitlagos.utils.MyVolleyError; import com.jadebyte.gitlagos.utils.VolleySingleton; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.util.ArrayList; import java.util.List; import butterknife.BindView; import butterknife.ButterKnife; import butterknife.Unbinder; public class UserListFragment extends Fragment { //Constants private final String KEY_USER_ITEMS = "userItems"; //Views @BindView(R.id.user_recycler) RecyclerView recyclerView; @BindView(R.id.user_pro_bar) ProgressBar mProgressBar; @BindView(R.id.user_info_error_root) LinearLayout errorLayout; @BindView(R.id.user_info_error_text) TextView errorText; @BindView(R.id.user_info_error_button) Button retryButton; @BindView(R.id.user_info_img) ImageView errorImg; //Fields private final String userUrl = Constants.URLS.DEFAULT_URL; private String moreUsersUrl; private List<UserItem> mUserItemList; private UserAdapter adapter; private JsonObjectRequest usersRequest; private LinearLayoutManager mLayoutManager; private Unbinder mUnbinder; private boolean hasFailed = false; private String errorMessage; private UserClickedListener onUserClickedListener = sUserCallbacks; private int pageNumber = 2; private boolean isMoreLoading = false; public UserListFragment() { // Required empty public constructor } @Override public void onCreate(Bundle savedInstanceState) { setHasOptionsMenu(true); super.onCreate(savedInstanceState); if (savedInstanceState != null) { hasFailed = savedInstanceState.getBoolean("hasFailed"); errorMessage = savedInstanceState.getString("errorMessage"); } } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View view = inflater.inflate(R.layout.fragment_user_list, container, false); mUnbinder = ButterKnife.bind(this, view); if (savedInstanceState != null && savedInstanceState.containsKey(KEY_USER_ITEMS)) { // A successful network call has been made and parsed previously. mUserItemList = savedInstanceState.getParcelableArrayList(KEY_USER_ITEMS); } else if (hasFailed) { // An unsuccessful network call has been made previously. Just show the error layout errorText.setText(errorMessage); errorLayout.setVisibility(View.VISIBLE); mUserItemList = new ArrayList<>(); } else { // No network call has been made or hasn't returned any response mUserItemList = new ArrayList<>(); getUserArray(userUrl, false); } setUpWidgets(); widgetListeners(); return view; } private void setUpWidgets() { mLayoutManager = new LinearLayoutManager(getActivity()); recyclerView.setLayoutManager(mLayoutManager); adapter = new UserAdapter(mUserItemList); recyclerView.setAdapter(adapter); mProgressBar.getIndeterminateDrawable().setColorFilter(ContextCompat.getColor(getActivity (), R.color.colorAccent), PorterDuff.Mode.SRC_IN); } private void widgetListeners() { retryButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { errorLayout.setVisibility(View.GONE); getUserArray(userUrl, false); } }); //Add the scroll listener recyclerView.addOnScrollListener(new EndlessRecyclerViewScrollListener( mLayoutManager) { @Override public void onLoadMore(final int page, int totalItemsCount, RecyclerView view) { if (!isMoreLoading) { moreUsersUrl = userUrl + "&page=" + getPageNumber(); getUserArray(moreUsersUrl, true); } } }); adapter.setOnUserClickedListener(new UserClickedListener() { @Override public void onUserClicked(String userObject) { onUserClickedListener.onUserClicked(userObject); } @Override public void onLoadMoreClicked() { moreUsersUrl = userUrl + "&page=" + getPageNumber(); getUserArray(moreUsersUrl, true); } }); } //This will users from github private void getUserArray(String url, final boolean isLoadMoreUsers){ final int initialItems = 0; if (isLoadMoreUsers) { adapter.getItemCount(); isMoreLoading = true; processBottomViews(UserAdapter.Status.PRE_REQUEST); } else { mProgressBar.setVisibility(View.VISIBLE); } //Creating a json request usersRequest = new JsonObjectRequest(url, null, new Response.Listener<JSONObject>() { @Override public void onResponse(JSONObject response) { // Successful request if (isLoadMoreUsers) { isMoreLoading = false; processBottomViews(UserAdapter.Status.SUCCESS); parseUserArray(response, true); } else { if (mProgressBar != null) { mProgressBar.setVisibility(View.GONE); if (adapter.getItemCount() == 1) { parseUserArray(response, false); } } hasFailed = false; } } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { if (isLoadMoreUsers) { isMoreLoading = false; if (initialItems == adapter.getItemCount()) { setPageNumber(getPageNumber() - 1); } processBottomViews(UserAdapter.Status.FAILED); } else { if (mProgressBar != null) { mProgressBar.setVisibility(View.GONE); } if (adapter.getItemCount() == 1) { errorMessage = MyVolleyError.errorMessage(error, getActivity()); errorText.setText(errorMessage); errorLayout.setVisibility(View.VISIBLE); hasFailed = true; } } } }){ @Override protected Response<JSONObject> parseNetworkResponse(NetworkResponse response) { Response<JSONObject> resp = super.parseNetworkResponse(response); long currentTime = System.currentTimeMillis(); long cacheTime = currentTime + 60 * 60 * 1000; //keeps cache for 1 hour if (!resp.isSuccess()) { return resp; } Cache.Entry entry = resp.cacheEntry; if (entry == null) { entry = new Cache.Entry(); entry.data = response.data; entry.responseHeaders = response.headers; } entry.ttl = cacheTime; entry.softTtl = 0; return Response.success(resp.result, entry); } }; RetryPolicy policy = new DefaultRetryPolicy(5000, DefaultRetryPolicy.DEFAULT_MAX_RETRIES, DefaultRetryPolicy.DEFAULT_BACKOFF_MULT); usersRequest.setRetryPolicy(policy); usersRequest.setShouldCache(true); VolleySingleton.getInstance(getActivity()).addToRequestQueue(usersRequest); } private void parseUserArray(JSONObject object, boolean isLoadMoreUsers) { try { JSONArray array = object.getJSONArray("items"); for(int i = 0; i<array.length(); i++) { UserItem userItem = new UserItem(); JSONObject jsonObject = array.getJSONObject(i); userItem.setAvatarUrl(jsonObject.getString("avatar_url")); userItem.setUsername(jsonObject.getString("login")); userItem.setUserObject(jsonObject.toString()); mUserItemList.add(userItem); if (isLoadMoreUsers) { mUserItemList.addAll(userItem); } } if (isLoadMoreUsers) { int curSize = adapter.getItemCount(); adapter.notifyItemRangeChanged(curSize, mUserItemList.size()-1); } else { adapter.notifyItemRangeChanged(0, adapter.getItemCount()); } } catch (JSONException e) { e.printStackTrace(); } } private void processBottomViews(UserAdapter.Status status) { adapter.processBottomViews(status); } @Override public void onSaveInstanceState(Bundle savedInstanceState) { if (adapter != null && adapter.getItemCount() > 1 ) { savedInstanceState.putParcelableArrayList(KEY_USER_ITEMS, (ArrayList<? extends Parcelable>) mUserItemList); } savedInstanceState.putBoolean("hasFailed", hasFailed); savedInstanceState.putInt("userPage", pageNumber); savedInstanceState.putString("errorMessage", errorMessage); super.onSaveInstanceState(savedInstanceState); } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); if (savedInstanceState != null) { pageNumber = savedInstanceState.getInt("userPage"); } } @Override public void onAttach(Context context) { super.onAttach(context); try { onUserClickedListener = (UserClickedListener) context; } catch (ClassCastException e) { throw new ClassCastException(context.toString() + " must implement UserClickedListener"); } } public int getPageNumber() { return pageNumber++; } public void setPageNumber(int pageNumber) { this.pageNumber = pageNumber; } private static UserClickedListener sUserCallbacks = new UserClickedListener() { @Override public void onUserClicked(String userObject) { } @Override public void onLoadMoreClicked() { } }; @Override public void onDestroyView(){ if (usersRequest != null && !usersRequest.isCanceled()) { usersRequest.cancel(); } super.onDestroyView(); mUnbinder.unbind(); } }
/* ************************************************************************ # # DivConq # # http://divconq.com/ # # Copyright: # Copyright 2014 eTimeline, LLC. All rights reserved. # # License: # See the license.txt file in the project's top-level directory for details. # # Authors: # * Andy White # ************************************************************************ */ package divconq.xml; import java.io.IOException; import java.io.Reader; import java.util.HashMap; import java.util.Hashtable; import java.util.Map; import java.util.Stack; import divconq.lang.op.OperationResult; import divconq.util.IOUtil; /** * Quick and Dirty XML parser. This parser is, like the SAX parser, an event * based parser, but with much less functionality. Based off of QDParser * by Kevin Twidle see http://twicom.com/ */ public class XmlParser { private static int popMode(Stack<Integer> st) { if (!st.empty()) return st.pop().intValue(); return PRE; } private final static int TEXT = 1, ENTITY = 2, OPEN_TAG = 3, CLOSE_TAG = 4, START_TAG = 5, ATTRIBUTE_LVALUE = 6, ATTRIBUTE_EQUAL = 9, ATTRIBUTE_RVALUE = 10, QUOTE = 7, IN_TAG = 8, SINGLE_TAG = 12, COMMENT = 13, IGNORE = 14, PRE = 15, CDATA = 16, OPEN_INSTRUCTION = 17; /* * Parses XML from a reader and returns a data structure containing the * parsed XML. * * @param doc * the DocHandler that will be given the different elements of * the XML * @param reader * the Reader to get the source XML from * @throws XMLParseException * if an error in the XML is detected * @throws IOException * if an error using the Reader is detected */ public static OperationResult parse(IParseHandler doc, Reader reader) { OperationResult or = new OperationResult(); try { Stack<Integer> st = new Stack<Integer>(); int depth = 0; int mode = PRE; int c = 0; int quotec = '"'; depth = 0; StringBuffer sb = new StringBuffer(); StringBuffer etag = new StringBuffer(); String tagName = null; String lvalue = null; String rvalue = null; Map<String, String> attrs = null; doc.startDocument(or); if (or.hasErrors()) return or; int line = 1, col = 0; boolean eol = false; // TODO add support for surrogate pair, set String Builder 32 while ((c = reader.read()) != -1) { // We need to map \r, \r\n, and \n to \n // See XML spec section 2.11 if (c == '\n' && eol) { eol = false; continue; } else if (eol) { eol = false; } else if (c == '\n') { line++; col = 0; } else if (c == '\r') { eol = true; c = '\n'; line++; col = 0; } else { col++; } if (mode == TEXT) { // We are between tags collecting text. if (c == '<') { st.push(new Integer(mode)); mode = START_TAG; if (sb.length() > 0) { doc.text(or, sb.toString(), false, line, col); if (or.hasErrors()) return or; sb.setLength(0); } } else if (c == '&') { st.push(new Integer(mode)); mode = ENTITY; etag.setLength(0); } else sb.append((char) c); } else if (mode == CLOSE_TAG) { // we are processing a closing tag: e.g. </foo> if (c == '>') { mode = popMode(st); tagName = sb.toString(); sb.setLength(0); depth--; doc.endElement(or, tagName); if (or.hasErrors()) return or; if (depth == 0) { doc.endDocument(or); return or; } } else { sb.append((char) c); } } else if (mode == CDATA) { // we are processing CDATA if (c == '>' && sb.toString().endsWith("]]")) { sb.setLength(sb.length() - 2); doc.text(or, sb.toString(), true, line, col); if (or.hasErrors()) return or; sb.setLength(0); mode = popMode(st); } else sb.append((char) c); } else if (mode == COMMENT) { // we are processing a comment. We are inside // the <!-- .... --> looking for the -->. if (c == '>' && sb.toString().endsWith("--")) { sb.setLength(0); mode = popMode(st); } else sb.append((char) c); } else if (mode == PRE) { // We are outside the root tag element if (c == '<') { mode = TEXT; st.push(new Integer(mode)); mode = START_TAG; } } else if (mode == IGNORE) { // We are inside one of these <? ... ?> // or one of these <!DOCTYPE ... > if (c == '>') { mode = popMode(st); if (mode == TEXT) mode = PRE; } } else if (mode == START_TAG) { // we have just seen a < and // are wondering what we are looking at // <foo>, </foo>, <!-- ... --->, etc. mode = popMode(st); if (c == '/') { st.push(new Integer(mode)); mode = CLOSE_TAG; } else if (c == '?') { mode = IGNORE; } else if (c == '!') { st.push(new Integer(mode)); mode = OPEN_INSTRUCTION; tagName = null; attrs = new Hashtable<String, String>(); sb.append((char) c); } else if (c == '_' || Character.isLetter(c)) { st.push(new Integer(mode)); mode = OPEN_TAG; tagName = null; attrs = new Hashtable<String, String>(); sb.append((char) c); } else { or.errorTr(242, line, col, (char) c); return or; } } else if (mode == ENTITY) { // we are processing an entity, e.g. &lt;, &#187;, etc. if (c == ';') { mode = popMode(st); String cent = etag.toString(); etag.setLength(0); /* if (cent.equals("lt")) sb.append('<'); else if (cent.equals("gt")) sb.append('>'); else if (cent.equals("amp")) sb.append('&'); else if (cent.equals("quot")) sb.append('"'); else if (cent.equals("apos")) sb.append('\''); else if (cent.startsWith("#x")) sb.append((char) Integer.parseInt(cent.substring(2), 16)); else if (cent.startsWith("#")) sb.append((char) Integer.parseInt(cent.substring(1))); else { // Just keep the unknown entity sb.append('&'); sb.append(cent); sb.append(';'); // exc("Unknown entity: &" + cent + ";", line, col); } */ // APW Just keep the entity sb.append('&'); sb.append(cent); sb.append(';'); } else { etag.append((char) c); } } else if (mode == SINGLE_TAG) { // we have just seen something like this: // <foo a="b"/ // and are looking for the final >. if (tagName == null) tagName = sb.toString(); if (c != '>') { or.errorTr(241, line, col, tagName); return or; } doc.element(or, tagName, attrs, line, col); //doc.endElement(tagName); if (or.hasErrors()) return or; if (depth == 0) { doc.endDocument(or); return or; } sb.setLength(0); attrs = new HashMap<String, String>(); tagName = null; mode = popMode(st); } else if (mode == OPEN_INSTRUCTION) { // we are processing <!... >. // We already have the first character if (c == '>') { or.errorTr(241, line, col, sb.toString()); return or; } else if (c == '-' && sb.toString().equals("!-")) { mode = COMMENT; } else if (c == '[' && sb.toString().equals("![CDATA")) { mode = CDATA; sb.setLength(0); } else if (c == 'E' && sb.toString().equals("!DOCTYP")) { sb.setLength(0); mode = IGNORE; } else if (Character.isWhitespace((char) c)) { or.errorTr(240, line, col, sb.toString()); return or; } else { // We have a character to add to the instruction // Check for length if (sb.length() > 9) { or.errorTr(239, line, col, sb.toString()); return or; } // Check for validity if (c == '-' || c == '[' || Character.isLetter(c)) sb.append((char) c); else { or.errorTr(238, line, col, c, sb.toString()); return or; } } } else if (mode == OPEN_TAG) { // we are processing something // like this <foo ... >. // We already have the first character if (c == '>') { if (tagName == null) tagName = sb.toString(); sb.setLength(0); depth++; doc.startElement(or, tagName, attrs, line, col); if (or.hasErrors()) return or; tagName = null; attrs = new HashMap<String, String>(); mode = popMode(st); } else if (c == '/') { mode = SINGLE_TAG; } else if (Character.isWhitespace((char) c)) { tagName = sb.toString(); sb.setLength(0); mode = IN_TAG; } else { // We have a character to add to the name // Check for validity if (Character.isLetterOrDigit(c) || c == '_' || c == '-' || c == '.' || c == ':') sb.append((char) c); else { or.errorTr(237, line, col, c); return or; } } } else if (mode == QUOTE) { // We are processing the quoted right-hand side // of an element's attribute. if (c == quotec) { rvalue = sb.toString(); sb.setLength(0); attrs.put(lvalue, rvalue); mode = IN_TAG; // See section the XML spec, section 3.3.3 // on normalization processing. } else if (" \r\n\u0009".indexOf(c) >= 0) { sb.append(' '); } else if (c == '&') { st.push(new Integer(mode)); mode = ENTITY; etag.setLength(0); } else { sb.append((char) c); } } else if (mode == ATTRIBUTE_RVALUE) { if (c == '"' || c == '\'') { quotec = c; mode = QUOTE; } else if (Character.isWhitespace((char) c)) { ; } else { or.errorTr(236, line, col); return or; } } else if (mode == ATTRIBUTE_LVALUE) { if (Character.isWhitespace((char) c)) { lvalue = sb.toString(); sb.setLength(0); mode = ATTRIBUTE_EQUAL; } else if (c == '=') { lvalue = sb.toString(); sb.setLength(0); mode = ATTRIBUTE_RVALUE; } else { sb.append((char) c); } } else if (mode == ATTRIBUTE_EQUAL) { if (c == '=') { mode = ATTRIBUTE_RVALUE; } else if (Character.isWhitespace((char) c)) { ; } else { or.errorTr(235, line, col); return or; } } else if (mode == IN_TAG) { if (c == '>') { mode = popMode(st); doc.startElement(or, tagName, attrs, line, col); if (or.hasErrors()) return or; depth++; tagName = null; attrs = new HashMap<String, String>(); } else if (c == '/') { mode = SINGLE_TAG; } else if (Character.isWhitespace((char) c)) { ; } else { mode = ATTRIBUTE_LVALUE; sb.append((char) c); } } } if (mode != PRE) or.errorTr(234, line, col); return or; } catch (IOException x) { or.error("Erroring reading XML: " + x); } finally { IOUtil.closeQuietly(reader); } return or; } }
package me.dablakbandit.dabcore.zip.io; import java.io.File; import java.io.IOException; import java.io.OutputStream; import java.util.ArrayList; import java.util.zip.CRC32; import me.dablakbandit.dabcore.zip.core.HeaderWriter; import me.dablakbandit.dabcore.zip.crypto.AESEncrpyter; import me.dablakbandit.dabcore.zip.crypto.IEncrypter; import me.dablakbandit.dabcore.zip.crypto.StandardEncrypter; import me.dablakbandit.dabcore.zip.exception.ZipException; import me.dablakbandit.dabcore.zip.model.AESExtraDataRecord; import me.dablakbandit.dabcore.zip.model.CentralDirectory; import me.dablakbandit.dabcore.zip.model.EndCentralDirRecord; import me.dablakbandit.dabcore.zip.model.FileHeader; import me.dablakbandit.dabcore.zip.model.LocalFileHeader; import me.dablakbandit.dabcore.zip.model.ZipModel; import me.dablakbandit.dabcore.zip.model.ZipParameters; import me.dablakbandit.dabcore.zip.util.InternalZipConstants; import me.dablakbandit.dabcore.zip.util.Raw; import me.dablakbandit.dabcore.zip.util.Zip4jConstants; import me.dablakbandit.dabcore.zip.util.Zip4jUtil; public class CipherOutputStream extends BaseOutputStream{ protected OutputStream outputStream; private File sourceFile; protected FileHeader fileHeader; protected LocalFileHeader localFileHeader; private IEncrypter encrypter; protected ZipParameters zipParameters; protected ZipModel zipModel; private long totalBytesWritten; protected CRC32 crc; private long bytesWrittenForThisFile; private byte[] pendingBuffer; private int pendingBufferLength; private long totalBytesRead; public CipherOutputStream(OutputStream outputStream, ZipModel zipModel){ this.outputStream = outputStream; initZipModel(zipModel); crc = new CRC32(); this.totalBytesWritten = 0; this.bytesWrittenForThisFile = 0; this.pendingBuffer = new byte[InternalZipConstants.AES_BLOCK_SIZE]; this.pendingBufferLength = 0; this.totalBytesRead = 0; } public void putNextEntry(File file, ZipParameters zipParameters) throws ZipException{ if(!zipParameters.isSourceExternalStream() && file == null){ throw new ZipException("input file is null"); } if(!zipParameters.isSourceExternalStream() && !Zip4jUtil.checkFileExists(file)){ throw new ZipException("input file does not exist"); } try{ sourceFile = file; this.zipParameters = (ZipParameters)zipParameters.clone(); if(!zipParameters.isSourceExternalStream()){ if(sourceFile.isDirectory()){ this.zipParameters.setEncryptFiles(false); this.zipParameters.setEncryptionMethod(-1); this.zipParameters.setCompressionMethod(Zip4jConstants.COMP_STORE); } }else{ if(!Zip4jUtil.isStringNotNullAndNotEmpty(this.zipParameters.getFileNameInZip())){ throw new ZipException("file name is empty for external stream"); } if(this.zipParameters.getFileNameInZip().endsWith("/") || this.zipParameters.getFileNameInZip().endsWith("\\")){ this.zipParameters.setEncryptFiles(false); this.zipParameters.setEncryptionMethod(-1); this.zipParameters.setCompressionMethod(Zip4jConstants.COMP_STORE); } } createFileHeader(); createLocalFileHeader(); if(zipModel.isSplitArchive()){ if(zipModel.getCentralDirectory() == null || zipModel.getCentralDirectory().getFileHeaders() == null || zipModel.getCentralDirectory().getFileHeaders().size() == 0){ byte[] intByte = new byte[4]; Raw.writeIntLittleEndian(intByte, 0, (int)InternalZipConstants.SPLITSIG); outputStream.write(intByte); totalBytesWritten += 4; } } if(this.outputStream instanceof SplitOutputStream){ if(totalBytesWritten == 4){ fileHeader.setOffsetLocalHeader(4); }else{ fileHeader.setOffsetLocalHeader(((SplitOutputStream)outputStream).getFilePointer()); } }else{ if(totalBytesWritten == 4){ fileHeader.setOffsetLocalHeader(4); }else{ fileHeader.setOffsetLocalHeader(totalBytesWritten); } } HeaderWriter headerWriter = new HeaderWriter(); totalBytesWritten += headerWriter.writeLocalFileHeader(zipModel, localFileHeader, outputStream); if(this.zipParameters.isEncryptFiles()){ initEncrypter(); if(encrypter != null){ if(zipParameters.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_STANDARD){ byte[] headerBytes = ((StandardEncrypter)encrypter).getHeaderBytes(); outputStream.write(headerBytes); totalBytesWritten += headerBytes.length; bytesWrittenForThisFile += headerBytes.length; }else if(zipParameters.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_AES){ byte[] saltBytes = ((AESEncrpyter)encrypter).getSaltBytes(); byte[] passwordVerifier = ((AESEncrpyter)encrypter).getDerivedPasswordVerifier(); outputStream.write(saltBytes); outputStream.write(passwordVerifier); totalBytesWritten += saltBytes.length + passwordVerifier.length; bytesWrittenForThisFile += saltBytes.length + passwordVerifier.length; } } } crc.reset(); }catch(CloneNotSupportedException e){ throw new ZipException(e); }catch(ZipException e){ throw e; }catch(Exception e){ throw new ZipException(e); } } private void initEncrypter() throws ZipException{ if(!zipParameters.isEncryptFiles()){ encrypter = null; return; } switch(zipParameters.getEncryptionMethod()){ case Zip4jConstants.ENC_METHOD_STANDARD: // Since we do not know the crc here, we use the modification time // for encrypting. encrypter = new StandardEncrypter(zipParameters.getPassword(), (localFileHeader.getLastModFileTime() & 0x0000ffff) << 16); break; case Zip4jConstants.ENC_METHOD_AES: encrypter = new AESEncrpyter(zipParameters.getPassword(), zipParameters.getAesKeyStrength()); break; default: throw new ZipException("invalid encprytion method"); } } private void initZipModel(ZipModel zipModel){ if(zipModel == null){ this.zipModel = new ZipModel(); }else{ this.zipModel = zipModel; } if(this.zipModel.getEndCentralDirRecord() == null){ this.zipModel.setEndCentralDirRecord(new EndCentralDirRecord()); } if(this.zipModel.getCentralDirectory() == null){ this.zipModel.setCentralDirectory(new CentralDirectory()); } if(this.zipModel.getCentralDirectory().getFileHeaders() == null){ this.zipModel.getCentralDirectory().setFileHeaders(new ArrayList()); } if(this.zipModel.getLocalFileHeaderList() == null){ this.zipModel.setLocalFileHeaderList(new ArrayList()); } if(this.outputStream instanceof SplitOutputStream){ if(((SplitOutputStream)outputStream).isSplitZipFile()){ this.zipModel.setSplitArchive(true); this.zipModel.setSplitLength(((SplitOutputStream)outputStream).getSplitLength()); } } this.zipModel.getEndCentralDirRecord().setSignature(InternalZipConstants.ENDSIG); } @Override public void write(int bval) throws IOException{ byte[] b = new byte[1]; b[0] = (byte)bval; write(b, 0, 1); } @Override public void write(byte[] b) throws IOException{ if(b == null){ throw new NullPointerException(); } if(b.length == 0){ return; } write(b, 0, b.length); } @Override public void write(byte[] b, int off, int len) throws IOException{ if(len == 0){ return; } if(zipParameters.isEncryptFiles() && zipParameters.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_AES){ if(pendingBufferLength != 0){ if(len >= InternalZipConstants.AES_BLOCK_SIZE - pendingBufferLength){ System.arraycopy(b, off, pendingBuffer, pendingBufferLength, InternalZipConstants.AES_BLOCK_SIZE - pendingBufferLength); encryptAndWrite(pendingBuffer, 0, pendingBuffer.length); off = InternalZipConstants.AES_BLOCK_SIZE - pendingBufferLength; len = len - off; pendingBufferLength = 0; }else{ System.arraycopy(b, off, pendingBuffer, pendingBufferLength, len); pendingBufferLength += len; return; } } if(len != 0 && len % 16 != 0){ System.arraycopy(b, len + off - len % 16, pendingBuffer, 0, len % 16); pendingBufferLength = len % 16; len = len - pendingBufferLength; } } if(len != 0){ encryptAndWrite(b, off, len); } } private void encryptAndWrite(byte[] b, int off, int len) throws IOException{ if(encrypter != null){ try{ encrypter.encryptData(b, off, len); }catch(ZipException e){ throw new IOException(e.getMessage()); } } outputStream.write(b, off, len); totalBytesWritten += len; bytesWrittenForThisFile += len; } public void closeEntry() throws IOException, ZipException{ if(this.pendingBufferLength != 0){ encryptAndWrite(pendingBuffer, 0, pendingBufferLength); pendingBufferLength = 0; } if(this.zipParameters.isEncryptFiles() && this.zipParameters.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_AES){ if(encrypter instanceof AESEncrpyter){ outputStream.write(((AESEncrpyter)encrypter).getFinalMac()); bytesWrittenForThisFile += 10; totalBytesWritten += 10; }else{ throw new ZipException("invalid encrypter for AES encrypted file"); } } fileHeader.setCompressedSize(bytesWrittenForThisFile); localFileHeader.setCompressedSize(bytesWrittenForThisFile); if(zipParameters.isSourceExternalStream()){ fileHeader.setUncompressedSize(totalBytesRead); if(localFileHeader.getUncompressedSize() != totalBytesRead){ localFileHeader.setUncompressedSize(totalBytesRead); } } long crc32 = crc.getValue(); if(fileHeader.isEncrypted()){ if(fileHeader.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_AES){ crc32 = 0; } } if(zipParameters.isEncryptFiles() && zipParameters.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_AES){ fileHeader.setCrc32(0); localFileHeader.setCrc32(0); }else{ fileHeader.setCrc32(crc32); localFileHeader.setCrc32(crc32); } zipModel.getLocalFileHeaderList().add(localFileHeader); zipModel.getCentralDirectory().getFileHeaders().add(fileHeader); HeaderWriter headerWriter = new HeaderWriter(); totalBytesWritten += headerWriter.writeExtendedLocalHeader(localFileHeader, outputStream); crc.reset(); bytesWrittenForThisFile = 0; encrypter = null; totalBytesRead = 0; } public void finish() throws IOException, ZipException{ zipModel.getEndCentralDirRecord().setOffsetOfStartOfCentralDir(totalBytesWritten); HeaderWriter headerWriter = new HeaderWriter(); headerWriter.finalizeZipFile(zipModel, outputStream); } @Override public void close() throws IOException{ if(outputStream != null){ outputStream.close(); } } private void createFileHeader() throws ZipException{ this.fileHeader = new FileHeader(); fileHeader.setSignature((int)InternalZipConstants.CENSIG); fileHeader.setVersionMadeBy(20); fileHeader.setVersionNeededToExtract(20); if(zipParameters.isEncryptFiles() && zipParameters.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_AES){ fileHeader.setCompressionMethod(Zip4jConstants.ENC_METHOD_AES); fileHeader.setAesExtraDataRecord(generateAESExtraDataRecord(zipParameters)); }else{ fileHeader.setCompressionMethod(zipParameters.getCompressionMethod()); } if(zipParameters.isEncryptFiles()){ fileHeader.setEncrypted(true); fileHeader.setEncryptionMethod(zipParameters.getEncryptionMethod()); } String fileName = null; if(zipParameters.isSourceExternalStream()){ fileHeader.setLastModFileTime((int)Zip4jUtil.javaToDosTime(System.currentTimeMillis())); if(!Zip4jUtil.isStringNotNullAndNotEmpty(zipParameters.getFileNameInZip())){ throw new ZipException("fileNameInZip is null or empty"); } fileName = zipParameters.getFileNameInZip(); }else{ fileHeader.setLastModFileTime((int)Zip4jUtil.javaToDosTime(Zip4jUtil.getLastModifiedFileTime(sourceFile, zipParameters.getTimeZone()))); fileHeader.setUncompressedSize(sourceFile.length()); fileName = Zip4jUtil.getRelativeFileName(sourceFile.getAbsolutePath(), zipParameters.getRootFolderInZip(), zipParameters.getDefaultFolderPath()); } if(!Zip4jUtil.isStringNotNullAndNotEmpty(fileName)){ throw new ZipException("fileName is null or empty. unable to create file header"); } fileHeader.setFileName(fileName); if(Zip4jUtil.isStringNotNullAndNotEmpty(zipModel.getFileNameCharset())){ fileHeader.setFileNameLength(Zip4jUtil.getEncodedStringLength(fileName, zipModel.getFileNameCharset())); }else{ fileHeader.setFileNameLength(Zip4jUtil.getEncodedStringLength(fileName)); } if(outputStream instanceof SplitOutputStream){ fileHeader.setDiskNumberStart(((SplitOutputStream)outputStream).getCurrSplitFileCounter()); }else{ fileHeader.setDiskNumberStart(0); } int fileAttrs = 0; if(!zipParameters.isSourceExternalStream()){ fileAttrs = getFileAttributes(sourceFile); } byte[] externalFileAttrs = {(byte)fileAttrs, 0, 0, 0}; fileHeader.setExternalFileAttr(externalFileAttrs); if(zipParameters.isSourceExternalStream()){ fileHeader.setDirectory(fileName.endsWith("/") || fileName.endsWith("\\")); }else{ fileHeader.setDirectory(this.sourceFile.isDirectory()); } if(fileHeader.isDirectory()){ fileHeader.setCompressedSize(0); fileHeader.setUncompressedSize(0); }else{ if(!zipParameters.isSourceExternalStream()){ long fileSize = Zip4jUtil.getFileLengh(sourceFile); if(zipParameters.getCompressionMethod() == Zip4jConstants.COMP_STORE){ if(zipParameters.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_STANDARD){ fileHeader.setCompressedSize(fileSize + InternalZipConstants.STD_DEC_HDR_SIZE); }else if(zipParameters.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_AES){ int saltLength = 0; switch(zipParameters.getAesKeyStrength()){ case Zip4jConstants.AES_STRENGTH_128: saltLength = 8; break; case Zip4jConstants.AES_STRENGTH_256: saltLength = 16; break; default: throw new ZipException("invalid aes key strength, cannot determine key sizes"); } fileHeader.setCompressedSize(fileSize + saltLength + InternalZipConstants.AES_AUTH_LENGTH + 2); // 2 // is // password // verifier }else{ fileHeader.setCompressedSize(0); } }else{ fileHeader.setCompressedSize(0); } fileHeader.setUncompressedSize(fileSize); } } if(zipParameters.isEncryptFiles() && zipParameters.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_STANDARD){ fileHeader.setCrc32(zipParameters.getSourceFileCRC()); } byte[] shortByte = new byte[2]; shortByte[0] = Raw.bitArrayToByte(generateGeneralPurposeBitArray(fileHeader.isEncrypted(), zipParameters.getCompressionMethod())); boolean isFileNameCharsetSet = Zip4jUtil.isStringNotNullAndNotEmpty(zipModel.getFileNameCharset()); if(isFileNameCharsetSet && zipModel.getFileNameCharset().equalsIgnoreCase(InternalZipConstants.CHARSET_UTF8) || !isFileNameCharsetSet && Zip4jUtil.detectCharSet(fileHeader.getFileName()).equals(InternalZipConstants.CHARSET_UTF8)){ shortByte[1] = 8; }else{ shortByte[1] = 0; } fileHeader.setGeneralPurposeFlag(shortByte); } private void createLocalFileHeader() throws ZipException{ if(fileHeader == null){ throw new ZipException("file header is null, cannot create local file header"); } this.localFileHeader = new LocalFileHeader(); localFileHeader.setSignature((int)InternalZipConstants.LOCSIG); localFileHeader.setVersionNeededToExtract(fileHeader.getVersionNeededToExtract()); localFileHeader.setCompressionMethod(fileHeader.getCompressionMethod()); localFileHeader.setLastModFileTime(fileHeader.getLastModFileTime()); localFileHeader.setUncompressedSize(fileHeader.getUncompressedSize()); localFileHeader.setFileNameLength(fileHeader.getFileNameLength()); localFileHeader.setFileName(fileHeader.getFileName()); localFileHeader.setEncrypted(fileHeader.isEncrypted()); localFileHeader.setEncryptionMethod(fileHeader.getEncryptionMethod()); localFileHeader.setAesExtraDataRecord(fileHeader.getAesExtraDataRecord()); localFileHeader.setCrc32(fileHeader.getCrc32()); localFileHeader.setCompressedSize(fileHeader.getCompressedSize()); localFileHeader.setGeneralPurposeFlag(fileHeader.getGeneralPurposeFlag().clone()); } private int getFileAttributes(File file) throws ZipException{ if(file == null){ throw new ZipException("input file is null, cannot get file attributes"); } if(!file.exists()){ return 0; } if(file.isDirectory()){ if(file.isHidden()){ return InternalZipConstants.FOLDER_MODE_HIDDEN; }else{ return InternalZipConstants.FOLDER_MODE_NONE; } }else{ if(!file.canWrite() && file.isHidden()){ return InternalZipConstants.FILE_MODE_READ_ONLY_HIDDEN; }else if(!file.canWrite()){ return InternalZipConstants.FILE_MODE_READ_ONLY; }else if(file.isHidden()){ return InternalZipConstants.FILE_MODE_HIDDEN; }else{ return InternalZipConstants.FILE_MODE_NONE; } } } private int[] generateGeneralPurposeBitArray(boolean isEncrpyted, int compressionMethod){ int[] generalPurposeBits = new int[8]; if(isEncrpyted){ generalPurposeBits[0] = 1; }else{ generalPurposeBits[0] = 0; } if(compressionMethod == Zip4jConstants.COMP_DEFLATE){ // Have to set flags for deflate }else{ generalPurposeBits[1] = 0; generalPurposeBits[2] = 0; } generalPurposeBits[3] = 1; return generalPurposeBits; } private AESExtraDataRecord generateAESExtraDataRecord(ZipParameters parameters) throws ZipException{ if(parameters == null){ throw new ZipException("zip parameters are null, cannot generate AES Extra Data record"); } AESExtraDataRecord aesDataRecord = new AESExtraDataRecord(); aesDataRecord.setSignature(InternalZipConstants.AESSIG); aesDataRecord.setDataSize(7); aesDataRecord.setVendorID("AE"); // Always set the version number to 2 as we do not store CRC for any AES // encrypted files // only MAC is stored and as per the specification, if version number is // 2, then MAC is read // and CRC is ignored aesDataRecord.setVersionNumber(2); if(parameters.getAesKeyStrength() == Zip4jConstants.AES_STRENGTH_128){ aesDataRecord.setAesStrength(Zip4jConstants.AES_STRENGTH_128); }else if(parameters.getAesKeyStrength() == Zip4jConstants.AES_STRENGTH_256){ aesDataRecord.setAesStrength(Zip4jConstants.AES_STRENGTH_256); }else{ throw new ZipException("invalid AES key strength, cannot generate AES Extra data record"); } aesDataRecord.setCompressionMethod(parameters.getCompressionMethod()); return aesDataRecord; } public void decrementCompressedFileSize(int value){ if(value <= 0){ return; } if(value <= this.bytesWrittenForThisFile){ this.bytesWrittenForThisFile -= value; } } protected void updateTotalBytesRead(int toUpdate){ if(toUpdate > 0){ totalBytesRead += toUpdate; } } public void setSourceFile(File sourceFile){ this.sourceFile = sourceFile; } public File getSourceFile(){ return sourceFile; } }
package de.uni_hildesheim.sse.smell.filter.util; import java.io.BufferedReader; import java.io.File; import java.io.FileFilter; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.util.ArrayList; import java.util.Comparator; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.regex.Pattern; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; import de.uni_hildesheim.sse.smell.IProgressPrinter; import de.uni_hildesheim.sse.smell.data.AnnotatedConfigurationMismatchResult; import de.uni_hildesheim.sse.smell.data.AnnotatedConfigurationMismatchResult.Location; import de.uni_hildesheim.sse.smell.data.AnnotatedConfigurationMismatchResult.UnkownLocationException; import de.uni_hildesheim.sse.smell.data.IDataElement; import de.uni_hildesheim.sse.smell.data.VariableWithSolutions; import de.uni_hildesheim.sse.smell.filter.FilterException; import de.uni_hildesheim.sse.smell.filter.IFilter; import de.uni_hildesheim.sse.smell.filter.WrongFilterException; /** * <b>Input</b>: {@link VariableWithSolutions}s<br /> * <b>Output</b>: {@link AnnotatedConfigurationMismatchResult}s<br /> * * @author Adam Krafczyk */ public class ConfigurationMismatchSolutionAnnotator implements IFilter { private File linuxTree; private Line[] relevantLines; private HashSet<String> variablesWithPrompt; public ConfigurationMismatchSolutionAnnotator(String linuxTree, String rsfFile) throws Exception { this.linuxTree = new File(linuxTree); if (!this.linuxTree.isDirectory()) { throw new FileNotFoundException(linuxTree + " is not a directory"); } loadRsfPrompts(rsfFile); } @Override public List<IDataElement> run(List<IDataElement> data, IProgressPrinter progressPrinter) throws FilterException { List<IDataElement> result = new ArrayList<IDataElement>(); progressPrinter.start("ConfigurationMismatchSolutionAnnotator - Searching Linux Tree", 1); List<Line> relevantLinesList = findRelevantLines(linuxTree); relevantLines = relevantLinesList.toArray(new Line[] {}); relevantLinesList = null; progressPrinter.finishedOne(); progressPrinter.start(this, data.size()); for (IDataElement element : data) { if (!(element instanceof VariableWithSolutions)) { throw new WrongFilterException(ConfigurationMismatchSolutionAnnotator.class, VariableWithSolutions.class, element); } VariableWithSolutions var = (VariableWithSolutions) element; String varName = var.getVariable(); if (varName.endsWith("_MODULE")) { varName = varName.substring(0, varName.length() - "_MODULE".length()); } boolean hasPrompt = variablesWithPrompt.contains(varName); AnnotatedConfigurationMismatchResult annotatedSmell = new AnnotatedConfigurationMismatchResult(var); annotatedSmell.setHasPrompt(hasPrompt); for (Location loc : findLocations(annotatedSmell.getVariable())) { try { annotatedSmell.addLocation(loc); } catch (UnkownLocationException e) { System.out.println("Can't add location for variable \"" + annotatedSmell.getVariable() + "\": " + e.getMessage()); } } result.add(annotatedSmell); progressPrinter.finishedOne(); } result.sort(new Comparator<IDataElement>() { @Override public int compare(IDataElement o1, IDataElement o2) { AnnotatedConfigurationMismatchResult r1 = (AnnotatedConfigurationMismatchResult) o1; AnnotatedConfigurationMismatchResult r2 = (AnnotatedConfigurationMismatchResult) o2; return r1.getVariable().compareTo(r2.getVariable()); } }); return result; } private List<Location> findLocations(String variable) { List<Location> results = new LinkedList<>(); // don't differentiate between module and normal variable if (variable.endsWith("_MODULE")) { variable = variable.substring(0, variable.length() - "_MODULE".length()); } String kconfigName = variable; if (kconfigName.startsWith("CONFIG_")) { kconfigName = kconfigName.substring("CONFIG_".length()); } Pattern kconfigPattern = Pattern.compile("^\\s*(menu)?config\\s*" + kconfigName + "\\s*$"); Pattern sourcePattern = Pattern.compile(".*" + variable + "(_MODULE)?(([^A-Za-z0-9_].*)|$)"); for (Line line : relevantLines) { if (line.filename.contains(File.separatorChar + "Kconfig")) { if (kconfigPattern.matcher(line.text).matches()) { results.add(new Location(line.filename, line.lineNumber)); } } else { if (sourcePattern.matcher(line.text).matches()) { results.add(new Location(line.filename, line.lineNumber)); } } } return results; } private List<Line> findRelevantLines(File fileTree) { List<Line> results = new LinkedList<>(); File[] filtered = fileTree.listFiles(new FileFilter() { @Override public boolean accept(File pathname) { return pathname.isDirectory() || pathname.getName().endsWith(".c") || pathname.getName().endsWith(".S") || pathname.getName().endsWith(".h") || pathname.getName().startsWith("Kconfig") || pathname.getName().startsWith("Makefile") || pathname.getName().startsWith("Kbuild"); } }); for (File file : filtered) { if (file.isDirectory()) { results.addAll(findRelevantLines(file)); } else { try { if (file.getName().startsWith("Kconfig")) { results.addAll(findRelevantLinesInFile("^\\s*(menu)?config\\s*[A-Za-z0-9_]+\\s*$", file)); } else { results.addAll(findRelevantLinesInFile(".*CONFIG_.*", file)); } } catch (IOException e) { System.out.println("Can't search in file \"" + file.getName() + "\":"); e.printStackTrace(System.out); } } } return results; } private List<Line> findRelevantLinesInFile(String regex, File file) throws IOException { List<Line> results = new LinkedList<>(); Pattern pattern = Pattern.compile(regex); BufferedReader in = new BufferedReader(new FileReader(file)); String line; int lineNumber = 1; while ((line = in.readLine()) != null) { if (pattern.matcher(line).matches()) { results.add(new Line(relativeName(file, linuxTree), lineNumber, line)); } lineNumber++; } in.close(); return results; } private static String relativeName(File file, File directory) { return file.getAbsolutePath().substring(directory.getAbsolutePath().length()); } private static final class Line { private String filename; private int lineNumber; private String text; public Line(String filename, int lineNumber, String text) { this.filename = filename; this.lineNumber = lineNumber; this.text = text; } } private void loadRsfPrompts(String rsfModelFile) throws ParserConfigurationException, SAXException, IOException { variablesWithPrompt = new HashSet<>(); File rsfFile = new File(rsfModelFile); try (FileInputStream in = new FileInputStream(rsfFile)) { // skip everything until the "\n.\n" char[] lastThree = {' ', ' ', ' '}; int index = 0; while (lastThree[0] != '\n' || lastThree[1] != '.' || lastThree[2] != '\n') { int read = in.read(); if (read == -1) { throw new IOException("Beginning of XML document inside RSF file not found."); } lastThree[index] = (char) read; index = (index + 1) % lastThree.length; } // Start of XML part found DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder db = dbf.newDocumentBuilder(); Document document = db.parse(in); NodeList nodeList = document.getElementsByTagName("name"); for (int x = 0; x < nodeList.getLength(); x++) { // search prompt Node node = nodeList.item(x); boolean hasprompt = false; while ((node = node.getNextSibling()) != null) { if (node.hasAttributes()) { Node attr = node.getAttributes().getNamedItem("type"); if (attr != null) { String type = attr.getTextContent(); if (type.equals("prompt") || type.equals("menu")) { hasprompt = true; break; } } } } if (hasprompt) { variablesWithPrompt.add("CONFIG_" + nodeList.item(x).getTextContent()); } } } } }
package org.apache.maven.plugin.assembly.archive.phase; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.apache.maven.model.Model; import org.apache.maven.plugin.assembly.AssemblerConfigurationSource; import org.apache.maven.plugin.assembly.DefaultAssemblyContext; import org.apache.maven.plugin.assembly.InvalidAssemblerConfigurationException; import org.apache.maven.plugin.assembly.archive.ArchiveCreationException; import org.apache.maven.plugin.assembly.archive.phase.wrappers.RepoBuilderConfigSourceWrapper; import org.apache.maven.plugin.assembly.archive.phase.wrappers.RepoInfoWrapper; import org.apache.maven.plugin.assembly.format.AssemblyFormattingException; import org.apache.maven.plugin.assembly.model.Assembly; import org.apache.maven.plugin.assembly.model.Repository; import org.apache.maven.plugin.assembly.testutils.MockManager; import org.apache.maven.plugin.assembly.testutils.TestFileManager; import org.apache.maven.plugin.assembly.utils.TypeConversionUtils; import org.apache.maven.project.MavenProject; import org.apache.maven.shared.repository.RepositoryAssembler; import org.apache.maven.shared.repository.RepositoryAssemblyException; import org.codehaus.plexus.archiver.Archiver; import org.codehaus.plexus.archiver.ArchiverException; import org.codehaus.plexus.archiver.FileSet; import org.codehaus.plexus.archiver.util.DefaultFileSet; import org.codehaus.plexus.logging.Logger; import org.codehaus.plexus.logging.console.ConsoleLogger; import org.codehaus.plexus.util.StringUtils; import org.easymock.AbstractMatcher; import org.easymock.MockControl; import java.io.File; import java.io.IOException; import java.util.Arrays; import junit.framework.Assert; import junit.framework.TestCase; public class RepositoryAssemblyPhaseTest extends TestCase { private final TestFileManager fileManager = new TestFileManager( "repository-phase.test.", "" ); @Override public void tearDown() throws IOException { fileManager.cleanUp(); } public void testExecute_ShouldNotIncludeRepositoryIfNonSpecifiedInAssembly() throws ArchiveCreationException, AssemblyFormattingException, InvalidAssemblerConfigurationException { final MockManager mm = new MockManager(); final MockAndControlForRepositoryAssembler macRepo = new MockAndControlForRepositoryAssembler( mm ); final MockAndControlForArchiver macArchiver = new MockAndControlForArchiver( mm ); final MockAndControlForConfigSource macCS = new MockAndControlForConfigSource( mm ); final File tempRoot = fileManager.createTempDir(); macCS.expectGetTemporaryRootDirectory( tempRoot ); final Assembly assembly = new Assembly(); assembly.setId( "test" ); mm.replayAll(); createPhase( macRepo.repositoryAssembler, new ConsoleLogger( Logger.LEVEL_DEBUG, "test" ) ).execute( assembly, macArchiver.archiver, macCS.configSource, new DefaultAssemblyContext() ); mm.verifyAll(); } public void testExecute_ShouldIncludeOneRepository() throws ArchiveCreationException, AssemblyFormattingException, InvalidAssemblerConfigurationException { final MockManager mm = new MockManager(); final MockAndControlForRepositoryAssembler macRepo = new MockAndControlForRepositoryAssembler( mm ); final MockAndControlForArchiver macArchiver = new MockAndControlForArchiver( mm ); final MockAndControlForConfigSource macCS = new MockAndControlForConfigSource( mm ); final File tempRoot = fileManager.createTempDir(); macCS.expectGetTemporaryRootDirectory( tempRoot ); macCS.expectGetProject( new MavenProject( new Model() ) ); macCS.expectGetFinalName( "final-name" ); final Assembly assembly = new Assembly(); assembly.setId( "test" ); final Repository repo = new Repository(); repo.setOutputDirectory( "out" ); repo.setDirectoryMode( "777" ); repo.setFileMode( "777" ); final int mode = TypeConversionUtils.modeToInt( "777", new ConsoleLogger( Logger.LEVEL_DEBUG, "test" ) ); final File outDir = new File( tempRoot, "out" ); macArchiver.expectModeChange( -1, -1, mode, mode, true ); macArchiver.expectAddDirectory( outDir, "out/", null, null ); macRepo.expectAssemble( outDir, repo, macCS.configSource ); assembly.addRepository( repo ); mm.replayAll(); createPhase( macRepo.repositoryAssembler, new ConsoleLogger( Logger.LEVEL_DEBUG, "test" ) ).execute( assembly, macArchiver.archiver, macCS.configSource, new DefaultAssemblyContext() ); mm.verifyAll(); } private RepositoryAssemblyPhase createPhase( final RepositoryAssembler repositoryAssembler, final Logger logger ) { final RepositoryAssemblyPhase phase = new RepositoryAssemblyPhase( repositoryAssembler ); phase.enableLogging( logger ); return phase; } private final class MockAndControlForArchiver { Archiver archiver; MockControl control; public MockAndControlForArchiver( final MockManager mockManager ) { control = MockControl.createControl( Archiver.class ); mockManager.add( control ); archiver = (Archiver) control.getMock(); } public void expectAddDirectory( final File outDir, final String location, final String[] includes, final String[] excludes ) { try { final DefaultFileSet fs = new DefaultFileSet(); fs.setDirectory( outDir ); fs.setPrefix( location ); fs.setIncludes( includes ); fs.setExcludes( excludes ); archiver.addFileSet( fs ); } catch ( final ArchiverException e ) { Assert.fail( "Should never happen." ); } control.setMatcher( new AbstractMatcher() { @Override protected boolean argumentMatches( final Object expected, final Object actual ) { final FileSet e = (FileSet) expected; final FileSet a = (FileSet) actual; if ( !eq( e.getDirectory(), a.getDirectory() ) ) { System.out.println( "FileSet directory expected: " + e.getDirectory() + "\nActual: " + a.getDirectory() ); return false; } if ( !eq( e.getPrefix(), a.getPrefix() ) ) { System.out.println( "FileSet prefix expected: " + e.getPrefix() + "\nActual: " + a.getPrefix() ); return false; } if ( !areq( e.getIncludes(), a.getIncludes() ) ) { System.out.println( "FileSet includes expected: " + arToStr( e.getIncludes() ) + "\nActual: " + arToStr( a.getIncludes() ) ); return false; } if ( !areq( e.getExcludes(), a.getExcludes() ) ) { System.out.println( "FileSet excludes expected: " + arToStr( e.getExcludes() ) + "\nActual: " + arToStr( a.getExcludes() ) ); return false; } return true; } @Override protected String argumentToString( final Object argument ) { final FileSet a = (FileSet) argument; return argument == null ? "Null FileSet" : "FileSet:[dir=" + a.getDirectory() + ", prefix: " + a.getPrefix() + "\nincludes:\n" + arToStr( a.getIncludes() ) + "\nexcludes:\n" + arToStr( a.getExcludes() ) + "]"; } private String arToStr( final String[] array ) { return array == null ? "-EMPTY-" : StringUtils.join( array, "\n\t" ); } private boolean areq( final String[] first, final String[] second ) { if ( ( first == null || first.length == 0 ) && ( second == null || second.length == 0 ) ) { return true; } else if ( first == null && second != null ) { return false; } else if ( first != null && second == null ) { return false; } else { return Arrays.equals( first, second ); } } private boolean eq( final Object first, final Object second ) { if ( first == null && second == null ) { return true; } else if ( first == null && second != null ) { return false; } else if ( first != null && second == null ) { return false; } else { return first.equals( second ); } } } ); control.setVoidCallable( MockControl.ONE_OR_MORE ); } void expectModeChange( final int defaultDirMode, final int defaultFileMode, final int dirMode, final int fileMode, final boolean expectTwoSets ) { archiver.getOverrideDirectoryMode(); control.setReturnValue( defaultDirMode ); archiver.getOverrideFileMode(); control.setReturnValue( defaultFileMode ); if ( expectTwoSets ) { archiver.setDirectoryMode( dirMode ); archiver.setFileMode( fileMode ); } archiver.setDirectoryMode( defaultDirMode ); archiver.setFileMode( defaultFileMode ); } // public void expectAddFile( File file, String outputLocation, int fileMode ) // { // try // { // archiver.addFile( file, outputLocation, fileMode ); // } // catch ( ArchiverException e ) // { // Assert.fail( "Should never happen." ); // } // } } private final class MockAndControlForConfigSource { AssemblerConfigurationSource configSource; MockControl control; public MockAndControlForConfigSource( final MockManager mockManager ) { control = MockControl.createControl( AssemblerConfigurationSource.class ); mockManager.add( control ); configSource = (AssemblerConfigurationSource) control.getMock(); configSource.getMavenSession(); control.setReturnValue( null, MockControl.ZERO_OR_MORE ); } public void expectGetProject( final MavenProject project ) { configSource.getProject(); control.setReturnValue( project, MockControl.ONE_OR_MORE ); } public void expectGetFinalName( final String finalName ) { configSource.getFinalName(); control.setReturnValue( finalName, MockControl.ONE_OR_MORE ); } public void expectGetTemporaryRootDirectory( final File tempRoot ) { configSource.getTemporaryRootDirectory(); control.setReturnValue( tempRoot, MockControl.ONE_OR_MORE ); } // // public void expectGetBasedir( File basedir ) // { // configSource.getBasedir(); // control.setReturnValue( basedir, MockControl.ONE_OR_MORE ); // } } private final class MockAndControlForRepositoryAssembler { RepositoryAssembler repositoryAssembler; MockControl control; MockAndControlForRepositoryAssembler( final MockManager mockManager ) { control = MockControl.createControl( RepositoryAssembler.class ); mockManager.add( control ); repositoryAssembler = (RepositoryAssembler) control.getMock(); } public void expectAssemble( final File dir, final Repository repo, final AssemblerConfigurationSource configSource ) { try { repositoryAssembler.buildRemoteRepository( dir, new RepoInfoWrapper( repo ), new RepoBuilderConfigSourceWrapper( configSource ) ); control.setMatcher( MockControl.ALWAYS_MATCHER ); } catch ( final RepositoryAssemblyException e ) { Assert.fail( "Should never happen" ); } control.setVoidCallable( MockControl.ONE_OR_MORE ); } } }
/* * Copyright 2004,2005 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.dataservices.core.test.sql; import org.apache.axiom.om.OMElement; import org.wso2.carbon.dataservices.core.test.DataServiceBaseTestCase; import org.wso2.carbon.dataservices.core.test.util.TestUtils; import java.util.HashMap; import java.util.Map; /** * Class to represent stored procedure test cases. */ public abstract class AbstractStoredProcedureServiceTest extends DataServiceBaseTestCase { private String epr = null; public AbstractStoredProcedureServiceTest(String testName, String serviceName) { super(testName); this.epr = this.baseEpr + serviceName; } /** * Test with a stored procedure call with no params. */ protected void storedProcNoParams() { TestUtils.showMessage(this.epr + " - storedProcNoParams"); try { TestUtils.checkForService(this.epr); OMElement result = TestUtils.callOperation(this.epr, "stored_procedure_noparam_op", null); assertTrue(TestUtils.validateResultStructure(result, TestUtils.CUSTOMER_XSD_PATH)); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } /** * Test with a stored procedure call with params. */ protected void storedProcWithParams() { TestUtils.showMessage(this.epr + " - storedProcWithParams"); Map<String, String> params = new HashMap<String, String>(); params.put("customerNumber", "103"); params.put("contactLastName", "Schmitt"); try { TestUtils.checkForService(this.epr); OMElement result = TestUtils.callOperation(this.epr, "stored_procedure_withparam_op", params); assertTrue(TestUtils.validateResultStructure(result, TestUtils.CUSTOMER_XSD_PATH)); String val = TestUtils.getFirstValue(result, "/Customers/Customer/customerNumber", TestUtils.DEFAULT_DS_WS_NAMESPACE); assertTrue(params.get("customerNumber").equals(val)); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } /** * Test with a nested stored procedure call - 1. */ protected void storedProcNested1() { TestUtils.showMessage(this.epr + " - storedProcNested1"); try { TestUtils.checkForService(this.epr); OMElement result = TestUtils.callOperation(this.epr, "stored_procedure_nested_op1", null); assertTrue(TestUtils.validateResultStructure(result, TestUtils.PAYMENT_INFO_NESTED_XSD_PATH)); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } /** * Test with a nested stored procedure call - 1 - For DateTime */ protected void storedProcNested1ForDateTime(){ TestUtils.showMessage(this.epr + " - storedProcNested1ForDateTime"); try { TestUtils.checkForService(this.epr); OMElement result = TestUtils.callOperation(this.epr, "stored_procedure_nested_for_date_time_op1", null); assertTrue(TestUtils.validateResultStructure(result, TestUtils.PAYMENT_INFO_NESTED_WITH_DATE_TIME_XSD_PATH)); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } /** * Test with a nested stored procedure call - 2. */ protected void storedProcNested2() { TestUtils.showMessage(this.epr + " - storedProcNested2"); try { TestUtils.checkForService(this.epr); OMElement result = TestUtils.callOperation(this.epr, "stored_procedure_nested_op2", null); assertTrue(TestUtils.validateResultStructure(result, TestUtils.PAYMENT_INFO_NESTED_XSD_PATH)); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } /** * Test with a nested stored procedure call -2 - For DateTime */ protected void storedProcNested2ForDateTime(){ TestUtils.showMessage(this.epr + " - storedProcNested2ForDateTime"); try { TestUtils.checkForService(this.epr); OMElement result = TestUtils.callOperation(this.epr, "stored_procedure_nested_for_date_time_op2", null); assertTrue(TestUtils.validateResultStructure(result, TestUtils.PAYMENT_INFO_NESTED_WITH_DATE_TIME_XSD_PATH)); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } /** * Test with a nested stored procedure call - 3. */ protected void storedProcNested3() { TestUtils.showMessage(this.epr + " - storedProcNested3"); try { TestUtils.checkForService(this.epr); OMElement result = TestUtils.callOperation(this.epr, "stored_procedure_nested_op3", null); assertTrue(TestUtils.validateResultStructure(result, TestUtils.PAYMENT_INFO_NESTED_XSD_PATH)); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } /** * Test with a nested stored procedure call -3 - For DateTime */ protected void storedProcNested3ForDateTime(){ TestUtils.showMessage(this.epr + " - storedProcNested3ForDateTime"); try { TestUtils.checkForService(this.epr); OMElement result = TestUtils.callOperation(this.epr, "stored_procedure_nested_for_date_time_op3", null); assertTrue(TestUtils.validateResultStructure(result, TestUtils.PAYMENT_INFO_NESTED_WITH_DATE_TIME_XSD_PATH)); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } /** * Test with a result set in config of the stored procedure * but does not return anything. i.e Insert query */ protected void storedProcWithWrongResultSet() { TestUtils.showMessage(this.epr + " - storedProcWithWrongResultSet"); Map<String, String> params = new HashMap<String, String>(); params.put("customerNumber", "5005"); params.put("customerName", "Will<&''\"\"&>Smith"); params.put("contactLastName", "&Silva"); params.put("contactFirstName", "Kelvin"); try { TestUtils.checkForService(this.epr); TestUtils.callOperation(this.epr, "stored_procedure_with_wrong_result_set", params); Map<String, String> params1 = new HashMap<String, String>(); params1.put("customerNumber", "5005"); OMElement result = TestUtils.callOperation(this.epr, "select_op_count", params1); String val = TestUtils.getFirstValue(result, "/Customers/CustomerCount/customerCount", TestUtils.DEFAULT_DS_WS_NAMESPACE); assertTrue(Integer.parseInt(val) == 1); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } /** * Test with a stored function call with no params. */ protected void storedFuncNoParams() { TestUtils.showMessage(this.epr + " - storedFuncNoParams"); try { TestUtils.checkForService(this.epr); OMElement result = TestUtils.callOperation(this.epr, "stored_func_noparam_op", null); String val = TestUtils.getFirstValue(result, "/CreditLimit/AverageCreditLimit/value", TestUtils.DEFAULT_DS_WS_NAMESPACE); assertTrue(Double.parseDouble(val) > 0.0); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } /** * Test with a stored function call with params. */ protected void storedFuncWithParams() { TestUtils.showMessage(this.epr + " - storedFuncWithParams"); Map<String, String> params = new HashMap<String, String>(); params.put("customerNumber", "128"); try { TestUtils.checkForService(this.epr); OMElement result = TestUtils.callOperation(this.epr, "stored_func_withparam_op", params); String val = TestUtils.getFirstValue(result, "/Customer/Phone/value", TestUtils.DEFAULT_DS_WS_NAMESPACE); assertTrue(val.equals("+49 69 66 90 2555")); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } /** * Test with a stored function call with params. */ protected void storedProcWithFaulty() { TestUtils.showMessage(this.epr + " - storedProcWithFaulty"); Map<String, String> params = new HashMap<String, String>(); params.put("customerNumber", "103"); params.put("contactLastName", "Scott"); try { TestUtils.checkForService(this.epr); TestUtils.callUpdateOperation(this.epr, "stored_procedure_withfaulty_op", params); OMElement result = TestUtils.callOperation(this.epr, "stored_procedure_withparam_op", params); OMElement ss = result.getFirstElement(); assertTrue(ss==null); } catch (Exception e) { e.printStackTrace(); } } }
/* * Copyright 2018 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.optaplanner.examples.rocktour.persistence; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.time.DayOfWeek; import java.time.LocalDate; import java.time.temporal.ChronoUnit; import java.time.temporal.TemporalAdjusters; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.NavigableSet; import java.util.Objects; import java.util.TreeSet; import java.util.stream.Stream; import org.apache.commons.lang3.tuple.Pair; import org.apache.poi.ss.usermodel.Workbook; import org.apache.poi.ss.util.CellRangeAddress; import org.apache.poi.xssf.usermodel.XSSFCell; import org.apache.poi.xssf.usermodel.XSSFWorkbook; import org.optaplanner.examples.common.persistence.AbstractXlsxSolutionFileIO; import org.optaplanner.examples.rocktour.app.RockTourApp; import org.optaplanner.examples.rocktour.domain.RockBus; import org.optaplanner.examples.rocktour.domain.RockLocation; import org.optaplanner.examples.rocktour.domain.RockShow; import org.optaplanner.examples.rocktour.domain.RockTourConstraintConfiguration; import org.optaplanner.examples.rocktour.domain.RockTourSolution; import static java.util.stream.Collectors.*; import static org.optaplanner.examples.rocktour.domain.RockTourConstraintConfiguration.*; public class RockTourXlsxFileIO extends AbstractXlsxSolutionFileIO<RockTourSolution> { @Override public RockTourSolution read(File inputSolutionFile) { try (InputStream in = new BufferedInputStream(new FileInputStream(inputSolutionFile))) { XSSFWorkbook workbook = new XSSFWorkbook(in); return new RockTourXlsxReader(workbook).read(); } catch (IOException | RuntimeException e) { throw new IllegalStateException("Failed reading inputSolutionFile (" + inputSolutionFile + ").", e); } } private static class RockTourXlsxReader extends AbstractXlsxReader<RockTourSolution> { public RockTourXlsxReader(XSSFWorkbook workbook) { super(workbook, RockTourApp.SOLVER_CONFIG); } @Override public RockTourSolution read() { solution = new RockTourSolution(); readConfiguration(); readBus(); readShowList(); readDrivingTime(); return solution; } private void readConfiguration() { nextSheet("Configuration"); nextRow(); readHeaderCell("Tour name"); solution.setTourName(nextStringCell().getStringCellValue()); if (!VALID_NAME_PATTERN.matcher(solution.getTourName()).matches()) { throw new IllegalStateException(currentPosition() + ": The tour name (" + solution.getTourName() + ") must match to the regular expression (" + VALID_NAME_PATTERN + ")."); } RockTourConstraintConfiguration constraintConfiguration = new RockTourConstraintConfiguration(); readLongConstraintParameterLine(EARLY_LATE_BREAK_DRIVING_SECONDS, constraintConfiguration::setEarlyLateBreakDrivingSecondsBudget, "Maximum driving time in seconds between 2 shows on the same day."); readLongConstraintParameterLine(NIGHT_DRIVING_SECONDS, constraintConfiguration::setNightDrivingSecondsBudget, "Maximum driving time in seconds per night between 2 shows."); readLongConstraintParameterLine(HOS_WEEK_DRIVING_SECONDS_BUDGET, constraintConfiguration::setHosWeekDrivingSecondsBudget, "Maximum driving time in seconds since last weekend rest."); readIntConstraintParameterLine(HOS_WEEK_CONSECUTIVE_DRIVING_DAYS_BUDGET, constraintConfiguration::setHosWeekConsecutiveDrivingDaysBudget, "Maximum driving days since last weekend rest."); readIntConstraintParameterLine(HOS_WEEK_REST_DAYS, constraintConfiguration::setHosWeekRestDays, "Minimum weekend rest in days (actually in full night sleeps: 2 days guarantees only 32 hours)."); readScoreConstraintHeaders(); constraintConfiguration.setId(0L); constraintConfiguration.setRequiredShow(readScoreConstraintLine(REQUIRED_SHOW, "Penalty per required show that isn't assigned.")); constraintConfiguration.setUnassignedShow(readScoreConstraintLine(UNASSIGNED_SHOW, "Penalty per show that isn't assigned.")); constraintConfiguration.setRevenueOpportunity(readScoreConstraintLine(REVENUE_OPPORTUNITY, "Reward per revenue opportunity.")); constraintConfiguration.setDrivingTimeToShowPerSecond(readScoreConstraintLine(DRIVING_TIME_TO_SHOW_PER_SECOND, "Driving time cost per second, excluding after the last show.")); constraintConfiguration.setDrivingTimeToBusArrivalPerSecond(readScoreConstraintLine(DRIVING_TIME_TO_BUS_ARRIVAL_PER_SECOND, "Driving time cost per second from the last show to the bus arrival location.")); constraintConfiguration.setDelayShowCostPerDay(readScoreConstraintLine(DELAY_SHOW_COST_PER_DAY, "Cost per day for each day that a show is assigned later in the schedule.")); constraintConfiguration.setShortenDrivingTimePerMillisecondSquared(readScoreConstraintLine(SHORTEN_DRIVING_TIME_PER_MILLISECOND_SQUARED, "Avoid long driving times: Penalty per millisecond of continuous driving time squared.")); solution.setConstraintConfiguration(constraintConfiguration); } private void readBus() { nextSheet("Bus"); RockBus bus = new RockBus(); bus.setId(0L); nextRow(); readHeaderCell(""); readHeaderCell("City name"); readHeaderCell("Latitude"); readHeaderCell("Longitude"); readHeaderCell("Date"); nextRow(); readHeaderCell("Bus start"); String startCityName = nextStringCell().getStringCellValue(); double startLatitude = nextNumericCell().getNumericCellValue(); double startLongitude = nextNumericCell().getNumericCellValue(); bus.setStartLocation(new RockLocation(startCityName, startLatitude, startLongitude)); bus.setStartDate(LocalDate.parse(nextStringCell().getStringCellValue(), DAY_FORMATTER)); nextRow(); readHeaderCell("Bus end"); String endCityName = nextStringCell().getStringCellValue(); double endLatitude = nextNumericCell().getNumericCellValue(); double endLongitude = nextNumericCell().getNumericCellValue(); bus.setEndLocation(new RockLocation(endCityName, endLatitude, endLongitude)); bus.setEndDate(LocalDate.parse(nextStringCell().getStringCellValue(), DAY_FORMATTER)); solution.setBus(bus); } private void readShowList() { nextSheet("Shows"); LocalDate startDate = solution.getBus().getStartDate(); LocalDate endDate = solution.getBus().getEndDate(); nextRow(false); readHeaderCell(""); readHeaderCell(""); readHeaderCell(""); readHeaderCell(""); readHeaderCell(""); readHeaderCell(""); readHeaderCell(""); readHeaderCell("Availability"); nextRow(false); readHeaderCell(""); readHeaderCell(""); readHeaderCell(""); readHeaderCell(""); readHeaderCell(""); readHeaderCell(""); readHeaderCell(""); for (LocalDate date = startDate; date.compareTo(endDate) < 0; date = date.plusDays(1)) { if (date.equals(startDate) || date.getDayOfMonth() == 1) { readHeaderCell(MONTH_FORMATTER.format(date)); } else { readHeaderCell(""); } } nextRow(false); readHeaderCell("Venue name"); readHeaderCell("City name"); readHeaderCell("Latitude"); readHeaderCell("Longitude"); readHeaderCell("Duration (in days)"); readHeaderCell("Revenue opportunity"); readHeaderCell("Required"); for (LocalDate date = startDate; date.compareTo(endDate) < 0; date = date.plusDays(1)) { readHeaderCell(Integer.toString(date.getDayOfMonth())); } List<RockShow> showList = new ArrayList<>(currentSheet.getLastRowNum() - 1); long id = 0L; while (nextRow()) { RockShow show = new RockShow(); show.setId(id); show.setVenueName(nextStringCell().getStringCellValue()); String cityName = nextStringCell().getStringCellValue(); double latitude = nextNumericCell().getNumericCellValue(); double longitude = nextNumericCell().getNumericCellValue(); show.setLocation(new RockLocation(cityName, latitude, longitude)); double duration = nextNumericCell().getNumericCellValue(); int durationInHalfDay = (int) (duration * 2.0); if (((double) durationInHalfDay) != duration * 2.0) { throw new IllegalStateException(currentPosition() + ": The duration (" + duration + ") should be a multiple of 0.5."); } if (durationInHalfDay < 1) { throw new IllegalStateException(currentPosition() + ": The duration (" + duration + ") should be at least 0.5."); } show.setDurationInHalfDay(durationInHalfDay); double revenueOpportunityDouble = nextNumericCell().getNumericCellValue(); if (revenueOpportunityDouble != (double) (int) revenueOpportunityDouble) { throw new IllegalStateException(currentPosition() + ": The show (" + show.getVenueName() + ")'s revenue opportunity (" + revenueOpportunityDouble + ") must be an integer number."); } show.setRevenueOpportunity((int) revenueOpportunityDouble); show.setRequired(nextBooleanCell().getBooleanCellValue()); NavigableSet<LocalDate> availableDateSet = new TreeSet<>(); for (LocalDate date = startDate; date.compareTo(endDate) < 0; date = date.plusDays(1)) { XSSFCell cell = nextStringCell(); if (!Objects.equals(extractColor(cell, UNAVAILABLE_COLOR), UNAVAILABLE_COLOR)) { availableDateSet.add(date); } if (!cell.getStringCellValue().isEmpty()) { throw new IllegalStateException(currentPosition() + ": The cell (" + cell.getStringCellValue() + ") should be empty."); } } if (availableDateSet.isEmpty()) { throw new IllegalStateException(currentPosition() + ": The show (" + show.getVenueName() + ")'s has no available date: all dates are unavailable."); } show.setAvailableDateSet(availableDateSet); id++; showList.add(show); } solution.setShowList(showList); } private void readDrivingTime() { Map<Pair<Double, Double>, List<RockLocation>> latLongToLocationMap = Stream.concat( Stream.of(solution.getBus().getStartLocation(), solution.getBus().getEndLocation()), solution.getShowList().stream().map(RockShow::getLocation)) .distinct() .sorted(Comparator.comparing(RockLocation::getLatitude).thenComparing(RockLocation::getLongitude).thenComparing(RockLocation::getCityName)) .collect(groupingBy(location -> Pair.of(location.getLatitude(), location.getLongitude()), LinkedHashMap::new, toList())); if (!hasSheet("Driving time")) { latLongToLocationMap.forEach((fromLatLong, fromLocationList) -> { for (RockLocation fromLocation : fromLocationList) { fromLocation.setDrivingSecondsMap(new LinkedHashMap<>(fromLocationList.size())); } latLongToLocationMap.forEach((toLatLong, toLocationList) -> { long drivingTime = 0L; for (RockLocation fromLocation : fromLocationList) { for (RockLocation toLocation : toLocationList) { // TODO use haversine air distance and convert to average seconds for truck drivingTime = fromLocation.getAirDistanceTo(toLocation); fromLocation.getDrivingSecondsMap().put(toLocation, drivingTime); } } }); }); return; } nextSheet("Driving time"); nextRow(); readHeaderCell("Driving time in seconds. Delete this sheet to generate it from air distances."); nextRow(); readHeaderCell("Latitude"); readHeaderCell(""); for (Pair<Double, Double> latLong : latLongToLocationMap.keySet()) { readHeaderCell(latLong.getLeft()); } nextRow(); readHeaderCell(""); readHeaderCell("Longitude"); for (Pair<Double, Double> latLong : latLongToLocationMap.keySet()) { readHeaderCell(latLong.getRight()); } latLongToLocationMap.forEach((fromLatLong, fromLocationList) -> { nextRow(); readHeaderCell(fromLatLong.getLeft()); readHeaderCell(fromLatLong.getRight()); for (RockLocation fromLocation : fromLocationList) { fromLocation.setDrivingSecondsMap(new LinkedHashMap<>(fromLocationList.size())); } latLongToLocationMap.forEach((toLatLong, toLocationList) -> { double drivingTimeDouble = nextNumericCell().getNumericCellValue(); long drivingTime = (long) drivingTimeDouble; if (drivingTimeDouble != (double) drivingTime) { throw new IllegalStateException(currentPosition() + ": The driving time (" + drivingTimeDouble + ") should be an integer number."); } for (RockLocation fromLocation : fromLocationList) { for (RockLocation toLocation : toLocationList) { fromLocation.getDrivingSecondsMap().put(toLocation, drivingTime); } } }); }); } } @Override public void write(RockTourSolution solution, File outputSolutionFile) { try (FileOutputStream out = new FileOutputStream(outputSolutionFile)) { Workbook workbook = new RockTourXlsxWriter(solution).write(); workbook.write(out); } catch (IOException | RuntimeException e) { throw new IllegalStateException("Failed writing outputSolutionFile (" + outputSolutionFile + ") for solution (" + solution + ").", e); } } private static class RockTourXlsxWriter extends AbstractXlsxWriter<RockTourSolution> { public RockTourXlsxWriter(RockTourSolution solution) { super(solution, RockTourApp.SOLVER_CONFIG); } @Override public Workbook write() { writeSetup(); writeConfiguration(); writeBus(); writeShowList(); writeDrivingTime(); writeStopsView(); writeScoreView(justificationList -> justificationList.stream() .filter(o -> o instanceof RockShow).map(o -> ((RockShow) o).getVenueName()) .collect(joining(", "))); return workbook; } private void writeConfiguration() { nextSheet("Configuration", 1, 8, false); nextRow(); nextHeaderCell("Tour name"); nextCell().setCellValue(solution.getTourName()); RockTourConstraintConfiguration constraintConfiguration = solution.getConstraintConfiguration(); writeLongConstraintParameterLine(EARLY_LATE_BREAK_DRIVING_SECONDS, constraintConfiguration::getEarlyLateBreakDrivingSecondsBudget, "Maximum driving time in seconds between 2 shows on the same day."); writeLongConstraintParameterLine(NIGHT_DRIVING_SECONDS, constraintConfiguration::getNightDrivingSecondsBudget, "Maximum driving time in seconds per night between 2 shows."); writeLongConstraintParameterLine(HOS_WEEK_DRIVING_SECONDS_BUDGET, constraintConfiguration::getHosWeekDrivingSecondsBudget, "Maximum driving time in seconds since last weekend rest."); writeIntConstraintParameterLine(HOS_WEEK_CONSECUTIVE_DRIVING_DAYS_BUDGET, constraintConfiguration::getHosWeekConsecutiveDrivingDaysBudget, "Maximum driving days since last weekend rest."); writeIntConstraintParameterLine(HOS_WEEK_REST_DAYS, constraintConfiguration::getHosWeekRestDays, "Minimum weekend rest in days (actually in full night sleeps: 2 days guarantees only 32 hours)."); nextRow(); writeScoreConstraintHeaders(); writeScoreConstraintLine(REQUIRED_SHOW, constraintConfiguration.getRequiredShow(), "Penalty per required show that isn't assigned."); writeScoreConstraintLine(UNASSIGNED_SHOW, constraintConfiguration.getUnassignedShow(), "Penalty per show that isn't assigned."); writeScoreConstraintLine(REVENUE_OPPORTUNITY, constraintConfiguration.getRevenueOpportunity(), "Reward per revenue opportunity."); writeScoreConstraintLine(DRIVING_TIME_TO_SHOW_PER_SECOND, constraintConfiguration.getDrivingTimeToShowPerSecond(), "Driving time cost per second, excluding after the last show."); writeScoreConstraintLine(DRIVING_TIME_TO_BUS_ARRIVAL_PER_SECOND, constraintConfiguration.getDrivingTimeToBusArrivalPerSecond(), "Driving time cost per second from the last show to the bus arrival location."); writeScoreConstraintLine(DELAY_SHOW_COST_PER_DAY, constraintConfiguration.getDelayShowCostPerDay(), "Cost per day for each day that a show is assigned later in the schedule."); writeScoreConstraintLine(SHORTEN_DRIVING_TIME_PER_MILLISECOND_SQUARED, constraintConfiguration.getShortenDrivingTimePerMillisecondSquared(), "Avoid long driving times: Penalty per millisecond of continuous driving time squared."); autoSizeColumnsWithHeader(); } private void writeBus() { nextSheet("Bus", 1, 0, false); nextRow(); nextHeaderCell(""); nextHeaderCell("City name"); nextHeaderCell("Latitude"); nextHeaderCell("Longitude"); nextHeaderCell("Date"); RockBus bus = solution.getBus(); nextRow(); nextHeaderCell("Bus start"); nextCell().setCellValue(bus.getStartLocation().getCityName()); nextCell().setCellValue(bus.getStartLocation().getLatitude()); nextCell().setCellValue(bus.getStartLocation().getLongitude()); nextCell().setCellValue(DAY_FORMATTER.format(bus.getStartDate())); nextRow(); nextHeaderCell("Bus end"); nextCell().setCellValue(bus.getEndLocation().getCityName()); nextCell().setCellValue(bus.getEndLocation().getLatitude()); nextCell().setCellValue(bus.getEndLocation().getLongitude()); nextCell().setCellValue(DAY_FORMATTER.format(bus.getEndDate())); autoSizeColumnsWithHeader(); } private void writeShowList() { nextSheet("Shows", 1, 3, false); LocalDate startDate = solution.getBus().getStartDate(); LocalDate endDate = solution.getBus().getEndDate(); nextRow(); nextHeaderCell(""); nextHeaderCell(""); nextHeaderCell(""); nextHeaderCell(""); nextHeaderCell(""); nextHeaderCell(""); nextHeaderCell(""); nextHeaderCell("Availability"); currentSheet.addMergedRegion(new CellRangeAddress(currentRowNumber, currentRowNumber, currentColumnNumber, currentColumnNumber + (int) ChronoUnit.DAYS.between(startDate, endDate) - 1)); nextRow(); nextHeaderCell(""); nextHeaderCell(""); nextHeaderCell(""); nextHeaderCell(""); nextHeaderCell(""); nextHeaderCell(""); nextHeaderCell(""); for (LocalDate date = startDate; date.compareTo(endDate) < 0; date = date.plusDays(1)) { if (date.equals(startDate) || date.getDayOfMonth() == 1) { nextHeaderCell(MONTH_FORMATTER.format(date)); LocalDate startNextMonthDate = date.with(TemporalAdjusters.firstDayOfNextMonth()); if (endDate.compareTo(startNextMonthDate) < 0) { startNextMonthDate = endDate; } currentSheet.addMergedRegion(new CellRangeAddress(currentRowNumber, currentRowNumber, currentColumnNumber, currentColumnNumber + (int) ChronoUnit.DAYS.between(date, startNextMonthDate) - 1)); } else { nextCell(); } } nextRow(); nextHeaderCell("Venue name"); nextHeaderCell("City name"); nextHeaderCell("Latitude"); nextHeaderCell("Longitude"); nextHeaderCell("Duration (in days)"); nextHeaderCell("Revenue opportunity"); nextHeaderCell("Required"); for (LocalDate date = startDate; date.compareTo(endDate) < 0; date = date.plusDays(1)) { nextHeaderCell(Integer.toString(date.getDayOfMonth())); } for (RockShow show : solution.getShowList()) { nextRow(); nextCell().setCellValue(show.getVenueName()); nextCell().setCellValue(show.getLocation().getCityName()); nextCell().setCellValue(show.getLocation().getLatitude()); nextCell().setCellValue(show.getLocation().getLongitude()); nextCell().setCellValue(show.getDurationInHalfDay() * 0.5); nextCell().setCellValue(show.getRevenueOpportunity()); nextCell().setCellValue(show.isRequired()); for (LocalDate date = startDate; date.compareTo(endDate) < 0; date = date.plusDays(1)) { if (show.getAvailableDateSet().contains(date)) { nextCell(); } else { nextCell(unavailableStyle); } } } autoSizeColumnsWithHeader(); } private void writeDrivingTime() { nextSheet("Driving time", 2, 3, false); nextRow(); nextHeaderCell("Driving time in seconds. Delete this sheet to generate it from air distances."); currentSheet.addMergedRegion(new CellRangeAddress(currentRowNumber, currentRowNumber, currentColumnNumber, currentColumnNumber + 10)); Map<Pair<Double, Double>, List<RockLocation>> latLongToLocationMap = Stream.concat( Stream.of(solution.getBus().getStartLocation(), solution.getBus().getEndLocation()), solution.getShowList().stream().map(RockShow::getLocation)) .distinct() .sorted(Comparator.comparing(RockLocation::getLatitude).thenComparing(RockLocation::getLongitude).thenComparing(RockLocation::getCityName)) .collect(groupingBy(location -> Pair.of(location.getLatitude(), location.getLongitude()), LinkedHashMap::new, toList())); nextRow(); nextHeaderCell("Latitude"); nextHeaderCell(""); for (Pair<Double, Double> latLong : latLongToLocationMap.keySet()) { nextHeaderCell(latLong.getLeft()); } nextRow(); nextHeaderCell(""); nextHeaderCell("Longitude"); for (Pair<Double, Double> latLong : latLongToLocationMap.keySet()) { nextHeaderCell(latLong.getRight()); } latLongToLocationMap.forEach((fromLatLong, fromLocationList) -> { nextRow(); nextHeaderCell(fromLatLong.getLeft()); nextHeaderCell(fromLatLong.getRight()); latLongToLocationMap.forEach((toLatLong, toLocationList) -> { long drivingTime = fromLocationList.get(0).getDrivingTimeTo(toLocationList.get(0)); for (RockLocation fromLocation : fromLocationList) { for (RockLocation toLocation : toLocationList) { if (fromLocation.getDrivingTimeTo(toLocation) != drivingTime) { throw new IllegalStateException("The driving time (" + drivingTime + ") from (" + fromLocationList.get(0) + ") to (" + toLocationList.get(0) + ") is not the driving time (" + fromLocation.getDrivingTimeTo(toLocation) + ") from (" + fromLocation + ") to (" + toLocation + ")."); } } } nextCell().setCellValue(drivingTime); }); }); autoSizeColumnsWithHeader(); } private void writeStopsView() { nextSheet("Stops", 2, 1, true); nextRow(); nextHeaderCell("Date"); nextHeaderCell("Venue name"); nextHeaderCell("City name"); nextHeaderCell("Driving time"); nextHeaderCell("Driving time per week"); nextHeaderCell("Latitude"); nextHeaderCell("Longitude"); nextHeaderCell("Duration (in days)"); nextHeaderCell("Revenue opportunity"); nextHeaderCell("Required"); nextHeaderCell("Available dates size"); LocalDate startDate = solution.getBus().getStartDate(); LocalDate endDate = solution.getBus().getEndDate(); Map<LocalDate, List<RockShow>> dateToShowListMap = solution.getShowList().stream() .filter(show -> show.getDate() != null) .collect(groupingBy(RockShow::getDate)); long drivingTimeWeekTotal = 0L; for (LocalDate date = startDate; date.compareTo(endDate) < 0; date = date.plusDays(1)) { List<RockShow> showList = dateToShowListMap.computeIfAbsent(date, k -> Collections.emptyList()); showList.sort(Comparator.comparing(RockShow::getTimeOfDay).thenComparing(RockShow::getVenueName)); nextRow(); if (date.getDayOfWeek() == DayOfWeek.SATURDAY || date.getDayOfWeek() == DayOfWeek.SUNDAY) { nextCell(unavailableStyle).setCellValue(DAY_FORMATTER.format(date)); } else { nextHeaderCell(DAY_FORMATTER.format(date)); } if (!showList.isEmpty()) { boolean first = true; for (RockShow show : showList) { if (!first) { nextRow(); nextCell(); } nextCell().setCellValue(show.getVenueName()); nextCell().setCellValue(show.getLocation().getCityName()); long drivingTime = show.getDrivingTimeFromPreviousStandstill(); drivingTimeWeekTotal += drivingTime; nextCell().setCellValue(toHoursAndMinutes(drivingTime)); if (date.getDayOfWeek() == DayOfWeek.SUNDAY) { nextCell().setCellValue(toHoursAndMinutes(drivingTimeWeekTotal)); drivingTimeWeekTotal = 0; } else { nextCell(); } nextCell().setCellValue(show.getLocation().getLatitude()); nextCell().setCellValue(show.getLocation().getLongitude()); nextCell().setCellValue(show.getDurationInHalfDay() * 0.5); nextCell().setCellValue(show.getRevenueOpportunity()); nextCell().setCellValue(show.isRequired()); nextCell().setCellValue(show.getAvailableDateSet().size()); first = false; } } else { nextCell(); nextCell(); nextCell(); if (date.getDayOfWeek() == DayOfWeek.SUNDAY) { nextCell().setCellValue(toHoursAndMinutes(drivingTimeWeekTotal)); drivingTimeWeekTotal = 0; } } } nextRow(); nextRow(); long revenueOpportunityLoss = 0L; for (RockShow show : solution.getShowList().stream() .filter(show -> show.getDate() == null) .sorted(Comparator.comparing(RockShow::getVenueName)) .collect(toList())) { nextRow(); nextHeaderCell("Unassigned"); nextCell().setCellValue(show.getVenueName()); nextCell().setCellValue(show.getLocation().getCityName()); nextCell().setCellValue("0"); nextCell().setCellValue(show.getLocation().getLatitude()); nextCell().setCellValue(show.getLocation().getLongitude()); nextCell().setCellValue(show.getDurationInHalfDay() * 0.5); nextCell().setCellValue(show.getRevenueOpportunity()); revenueOpportunityLoss += show.getRevenueOpportunity(); nextCell().setCellValue(show.isRequired()); nextCell().setCellValue(show.getAvailableDateSet().size()); } nextRow(); nextHeaderCell("Total revenue opportunity loss"); nextCell(); nextCell(); nextCell(); nextCell(); nextCell(); nextCell(); nextCell().setCellValue(revenueOpportunityLoss); autoSizeColumnsWithHeader(); } private static String toHoursAndMinutes(long drivingTimeWeekTotal) { return (drivingTimeWeekTotal / 3600L) + " hours " + (drivingTimeWeekTotal % 3600 / 60) + " minutes"; } } }
/*========================================================================= * Copyright (c) 2010-2014 Pivotal Software, Inc. All Rights Reserved. * This product is protected by U.S. and international copyright * and intellectual property laws. Pivotal products are covered by * one or more patents listed at http://www.pivotal.io/patents. *========================================================================= */ package com.gemstone.gemfire.internal.cache.ha; import java.util.Iterator; import java.util.Properties; import com.gemstone.gemfire.cache.AttributesFactory; import com.gemstone.gemfire.cache.Cache; import com.gemstone.gemfire.cache.CacheFactory; import com.gemstone.gemfire.cache.DataPolicy; import com.gemstone.gemfire.cache.Region; import com.gemstone.gemfire.cache.RegionAttributes; import com.gemstone.gemfire.cache.Scope; import com.gemstone.gemfire.cache.client.internal.PoolImpl; import com.gemstone.gemfire.cache.util.BridgeServer; import com.gemstone.gemfire.cache30.BridgeTestCase; import com.gemstone.gemfire.distributed.DistributedSystem; import com.gemstone.gemfire.internal.AvailablePort; import dunit.DistributedTestCase; import dunit.Host; import dunit.VM; /** * This is Dunit test for bug 36109. This test has a cache-client having a primary * and a secondary cache-server as its endpoint. Primary does some operations * and is stopped, the client fails over to secondary and does some operations * and it is verified that the 'invalidates' stats at the client is same as the * total number of operations done by both primary and secondary. The bug was * appearing because invalidate stats was part of Endpoint which used to get * closed during fail over , with the failed endpoint getting closed. This bug * has been fixed by moving the invalidate stat to be part of our implementation. * * @author Dinesh Patel * */ public class StatsBugDUnitDisabledTest extends DistributedTestCase { /** primary cache server */ VM primary = null; /** secondary cache server */ VM secondary = null; /** the cache client */ VM client1 = null; /** the cache */ private static Cache cache = null; /** port for the primary cache server */ private static int PORT1; /** port for the secondary cache server */ private static int PORT2; /** name of the test region */ private static final String REGION_NAME = "StatsBugDUnitTest_Region"; /** brige-writer instance( used to get connection proxy handle) */ private static PoolImpl pool = null; /** total number of cache servers */ private static final int TOTAL_SERVERS = 2; /** number of puts done by each server */ private static final int PUTS_PER_SERVER = 10; /** prefix added to the keys of events generated on primary */ private static final String primaryPrefix = "primary_"; /** prefix added to the keys of events generated on secondary */ private static final String secondaryPrefix = "secondary_"; /** * Constructor * * @param name - * name for this test instance */ public StatsBugDUnitDisabledTest(String name) { super(name); } /** * Creates the primary and the secondary cache servers * * @throws Exception - * thrown if any problem occurs in initializing the test */ public void setUp() throws Exception { disconnectAllFromDS(); super.setUp(); final Host host = Host.getHost(0); primary = host.getVM(0); secondary = host.getVM(1); client1 = host.getVM(2); PORT1 = ((Integer)primary.invoke(StatsBugDUnitDisabledTest.class, "createServerCache")).intValue(); PORT2 = ((Integer)secondary.invoke(StatsBugDUnitDisabledTest.class, "createServerCache")).intValue(); } /** * Create the cache * * @param props - * properties for DS * @return the cache instance * @throws Exception - * thrown if any problem occurs in cache creation */ private Cache createCache(Properties props) throws Exception { DistributedSystem ds = getSystem(props); ds.disconnect(); ds = getSystem(props); Cache cache = null; cache = CacheFactory.create(ds); if (cache == null) { throw new Exception("CacheFactory.create() returned null "); } return cache; } /** * close the cache instances in server and client during tearDown * * @throws Exception * thrown if any problem occurs in closing cache */ public void tearDown2() throws Exception { super.tearDown2(); // close client client1.invoke(StatsBugDUnitDisabledTest.class, "closeCache"); // close server primary.invoke(StatsBugDUnitDisabledTest.class, "closeCache"); secondary.invoke(StatsBugDUnitDisabledTest.class, "closeCache"); } /** * This test does the following:<br> * 1)Create and populate the client<br> * 2)Do some operations from the primary cache-server<br> * 3)Stop the primary cache-server<br> * 4)Wait some time to allow client to failover to secondary and do some * operations from secondary<br> * 5)Verify that the invalidates stats at the client accounts for the * operations done by both, primary and secondary. * * @throws Exception - * thrown if any problem occurs in test execution */ public void testBug36109() throws Exception { getLogWriter().info("testBug36109 : BEGIN"); client1.invoke(StatsBugDUnitDisabledTest.class, "createClientCacheForInvalidates", new Object[] { getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2) }); client1.invoke(StatsBugDUnitDisabledTest.class, "prepopulateClient"); primary.invoke(StatsBugDUnitDisabledTest.class, "doEntryOperations", new Object[] { primaryPrefix }); pause(3000); primary.invoke(StatsBugDUnitDisabledTest.class, "stopServer"); try { Thread.sleep(5000); } catch (InterruptedException ignore) { fail("interrupted"); } secondary.invoke(StatsBugDUnitDisabledTest.class, "doEntryOperations", new Object[] { secondaryPrefix }); try { Thread.sleep(5000); } catch (InterruptedException ignore) { fail("interrupted"); } client1.invoke(StatsBugDUnitDisabledTest.class, "verifyNumInvalidates"); getLogWriter().info("testBug36109 : END"); } /** * Creates and starts the cache-server * * @return - the port on which cache-server is running * @throws Exception - * thrown if any problem occurs in cache/server creation */ public static Integer createServerCache() throws Exception { StatsBugDUnitDisabledTest test = new StatsBugDUnitDisabledTest("temp"); Properties props = new Properties(); cache = test.createCache(props); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.DISTRIBUTED_ACK); factory.setDataPolicy(DataPolicy.REPLICATE); RegionAttributes attrs = factory.create(); cache.createRegion(REGION_NAME, attrs); BridgeServer server = cache.addBridgeServer(); int port = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET); server.setPort(port); server.setNotifyBySubscription(false); server.setSocketBufferSize(32768); server.start(); getLogWriter().info("Server started at PORT = " + port); return new Integer(port); } /** * Initializes the cache client * * @param port1 - * port for the primary cache-server * @param port2-port * for the secondary cache-server * @throws Exception-thrown * if any problem occurs in initializing the client */ public static void createClientCache(String host, Integer port1, Integer port2) throws Exception { StatsBugDUnitDisabledTest test = new StatsBugDUnitDisabledTest("temp"); cache = test.createCache(createProperties1()); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.DISTRIBUTED_ACK); pool = (PoolImpl)BridgeTestCase.configureConnectionPool(factory, host, new int[] {port1.intValue(),port2.intValue()}, true, -1, 3, null); RegionAttributes attrs = factory.create(); Region region = cache.createRegion(REGION_NAME, attrs); region.registerInterest("ALL_KEYS"); getLogWriter().info("Client cache created"); } /** * Initializes the cache client * * @param port1 - * port for the primary cache-server * @param port2-port * for the secondary cache-server * @throws Exception-thrown * if any problem occurs in initializing the client */ public static void createClientCacheForInvalidates(String host, Integer port1, Integer port2) throws Exception { StatsBugDUnitDisabledTest test = new StatsBugDUnitDisabledTest("temp"); cache = test.createCache(createProperties1()); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.DISTRIBUTED_ACK); pool = (PoolImpl)BridgeTestCase.configureConnectionPool(factory, host, new int[] {port1.intValue(),port2.intValue()}, true, -1, 3, null); RegionAttributes attrs = factory.create(); Region region = cache.createRegion(REGION_NAME, attrs); region.registerInterest("ALL_KEYS", false, false); getLogWriter().info("Client cache created"); } /** * Verify that the invalidates stats at the client accounts for the operations * done by both, primary and secondary. * */ public static void verifyNumInvalidates() { long invalidatesRecordedByStats = pool.getInvalidateCount(); getLogWriter().info( "invalidatesRecordedByStats = " + invalidatesRecordedByStats); int expectedInvalidates = TOTAL_SERVERS * PUTS_PER_SERVER; getLogWriter().info("expectedInvalidates = " + expectedInvalidates); if (invalidatesRecordedByStats != expectedInvalidates) { fail("Invalidates received by client(" + invalidatesRecordedByStats + ") does not match with the number of operations(" + expectedInvalidates + ") done at server"); } } /** * Stops the cache server * */ public static void stopServer() { try { Iterator iter = cache.getBridgeServers().iterator(); if (iter.hasNext()) { BridgeServer server = (BridgeServer)iter.next(); server.stop(); } } catch (Exception e) { fail("failed while stopServer()" + e); } } /** * create properties for a loner VM */ private static Properties createProperties1() { Properties props = new Properties(); props.setProperty("mcast-port", "0"); props.setProperty("locators", ""); return props; } /** * Do PUT operations * * @param keyPrefix - * string prefix for the keys for all the entries do be done * @throws Exception - * thrown if any exception occurs in doing PUTs */ public static void doEntryOperations(String keyPrefix) throws Exception { Region r1 = cache.getRegion(Region.SEPARATOR + REGION_NAME); for (int i = 0; i < PUTS_PER_SERVER; i++) { r1.put(keyPrefix + i, keyPrefix + "val-" + i); } } /** * Prepopulate the client with the entries that will be done by cache-servers * * @throws Exception */ public static void prepopulateClient() throws Exception { doEntryOperations(primaryPrefix); doEntryOperations(secondaryPrefix); } /** * Close the cache * */ public static void closeCache() { if (cache != null && !cache.isClosed()) { cache.close(); cache.getDistributedSystem().disconnect(); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager; import java.lang.reflect.UndeclaredThrowableException; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.yarn.api.ApplicationMasterProtocol; import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest; import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse; import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest; import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest; import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ExecutionType; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.api.records.ExecutionTypeRequest; import org.apache.hadoop.yarn.api.records.ResourceSizing; import org.apache.hadoop.yarn.api.records.SchedulingRequest; import org.apache.hadoop.yarn.api.records.UpdateContainerRequest; import org.apache.hadoop.yarn.api.resource.PlacementConstraint; import org.apache.hadoop.yarn.api.resource.PlacementConstraints; import org.apache.hadoop.yarn.security.AMRMTokenIdentifier; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState; import org.apache.hadoop.yarn.util.Records; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class MockAM { private static final Logger LOG = LoggerFactory.getLogger(MockAM.class); private volatile int responseId = 0; private final ApplicationAttemptId attemptId; private RMContext context; private ApplicationMasterProtocol amRMProtocol; private UserGroupInformation ugi; private volatile AllocateResponse lastResponse; private Map<Set<String>, PlacementConstraint> placementConstraints = new HashMap<>(); private List<SchedulingRequest> schedulingRequests = new ArrayList<>(); private final List<ResourceRequest> requests = new ArrayList<ResourceRequest>(); private final List<ContainerId> releases = new ArrayList<ContainerId>(); public MockAM(RMContext context, ApplicationMasterProtocol amRMProtocol, ApplicationAttemptId attemptId) { this.context = context; this.amRMProtocol = amRMProtocol; this.attemptId = attemptId; } public void setAMRMProtocol(ApplicationMasterProtocol amRMProtocol, RMContext context) { this.context = context; this.amRMProtocol = amRMProtocol; } /** * Wait until an attempt has reached a specified state. * The timeout is 40 seconds. * @param finalState the attempt state waited * @throws InterruptedException * if interrupted while waiting for the state transition */ private void waitForState(RMAppAttemptState finalState) throws InterruptedException { RMApp app = context.getRMApps().get(attemptId.getApplicationId()); RMAppAttempt attempt = app.getRMAppAttempt(attemptId); MockRM.waitForState(attempt, finalState); } public RegisterApplicationMasterResponse registerAppAttempt() throws Exception { return registerAppAttempt(true); } public void addPlacementConstraint(Set<String> tags, PlacementConstraint constraint) { placementConstraints.put(tags, constraint); } public MockAM addSchedulingRequest(List<SchedulingRequest> reqs) { schedulingRequests.addAll(reqs); return this; } public RegisterApplicationMasterResponse registerAppAttempt(boolean wait) throws Exception { if (wait) { waitForState(RMAppAttemptState.LAUNCHED); } responseId = 0; final RegisterApplicationMasterRequest req = Records.newRecord(RegisterApplicationMasterRequest.class); req.setHost(""); req.setRpcPort(1); req.setTrackingUrl(""); if (!placementConstraints.isEmpty()) { req.setPlacementConstraints(this.placementConstraints); } if (ugi == null) { ugi = UserGroupInformation.createRemoteUser( attemptId.toString()); Token<AMRMTokenIdentifier> token = context.getRMApps().get(attemptId.getApplicationId()) .getRMAppAttempt(attemptId).getAMRMToken(); ugi.addTokenIdentifier(token.decodeIdentifier()); } try { return ugi .doAs( new PrivilegedExceptionAction<RegisterApplicationMasterResponse>() { @Override public RegisterApplicationMasterResponse run() throws Exception { return amRMProtocol.registerApplicationMaster(req); } }); } catch (UndeclaredThrowableException e) { throw (Exception) e.getCause(); } } public boolean setApplicationLastResponseId(int newLastResponseId) { ApplicationMasterService applicationMasterService = (ApplicationMasterService) amRMProtocol; responseId = newLastResponseId; return applicationMasterService.setAttemptLastResponseId(attemptId, newLastResponseId); } public int getResponseId() { return responseId; } public void addRequests(String[] hosts, int memory, int priority, int containers) throws Exception { addRequests(hosts, memory, priority, containers, 0L); } public void addRequests(String[] hosts, int memory, int priority, int containers, long allocationRequestId) throws Exception { requests.addAll( createReq(hosts, memory, priority, containers, allocationRequestId)); } public AllocateResponse schedule() throws Exception { AllocateResponse response = allocate(requests, releases); requests.clear(); releases.clear(); return response; } public void addContainerToBeReleased(ContainerId containerId) { releases.add(containerId); } public AllocateResponse allocate( String host, int memory, int numContainers, List<ContainerId> releases) throws Exception { return allocate(host, memory, numContainers, releases, null); } public AllocateResponse allocate( String host, int memory, int numContainers, List<ContainerId> releases, String labelExpression) throws Exception { return allocate(host, memory, numContainers, 1, releases, labelExpression); } public AllocateResponse allocate( String host, int memory, int numContainers, int priority, List<ContainerId> releases, String labelExpression) throws Exception { List<ResourceRequest> reqs = createReq(new String[] { host }, memory, priority, numContainers, labelExpression, -1); return allocate(reqs, releases); } public AllocateResponse allocate( String host, Resource cap, int numContainers, List<ContainerId> rels, String labelExpression) throws Exception { List<ResourceRequest> reqs = new ArrayList<>(); ResourceRequest oneReq = createResourceReq(host, cap, numContainers, labelExpression); reqs.add(oneReq); return allocate(reqs, rels); } public List<ResourceRequest> createReq(String[] hosts, int memory, int priority, int containers, long allocationRequestId) throws Exception { return createReq(hosts, memory, priority, containers, null, allocationRequestId); } public List<ResourceRequest> createReq(String[] hosts, int memory, int priority, int containers, String labelExpression, long allocationRequestId) throws Exception { List<ResourceRequest> reqs = new ArrayList<ResourceRequest>(); if (hosts != null) { for (String host : hosts) { // only add host/rack request when asked host isn't ANY if (!host.equals(ResourceRequest.ANY)) { ResourceRequest hostReq = createResourceReq(host, memory, priority, containers, labelExpression); hostReq.setAllocationRequestId(allocationRequestId); reqs.add(hostReq); ResourceRequest rackReq = createResourceReq("/default-rack", memory, priority, containers, labelExpression); rackReq.setAllocationRequestId(allocationRequestId); reqs.add(rackReq); } } } ResourceRequest offRackReq = createResourceReq(ResourceRequest.ANY, memory, priority, containers, labelExpression); offRackReq.setAllocationRequestId(allocationRequestId); reqs.add(offRackReq); return reqs; } public ResourceRequest createResourceReq(String resource, int memory, int priority, int containers) throws Exception { return createResourceReq(resource, memory, priority, containers, null); } public ResourceRequest createResourceReq(String resource, int memory, int priority, int containers, String labelExpression) throws Exception { return createResourceReq(resource, memory, priority, containers, labelExpression, ExecutionTypeRequest.newInstance()); } public ResourceRequest createResourceReq(String resource, int memory, int priority, int containers, String labelExpression, ExecutionTypeRequest executionTypeRequest) throws Exception { ResourceRequest req = Records.newRecord(ResourceRequest.class); req.setResourceName(resource); req.setNumContainers(containers); Priority pri = Records.newRecord(Priority.class); pri.setPriority(priority); req.setPriority(pri); Resource capability = Records.newRecord(Resource.class); capability.setMemorySize(memory); req.setCapability(capability); if (labelExpression != null) { req.setNodeLabelExpression(labelExpression); } req.setExecutionTypeRequest(executionTypeRequest); return req; } public ResourceRequest createResourceReq(String host, Resource cap, int containers, String labelExpression) throws Exception { ResourceRequest req = Records.newRecord(ResourceRequest.class); req.setResourceName(host); req.setNumContainers(containers); Priority pri = Records.newRecord(Priority.class); pri.setPriority(1); req.setPriority(pri); req.setCapability(cap); if (labelExpression != null) { req.setNodeLabelExpression(labelExpression); } req.setExecutionTypeRequest(ExecutionTypeRequest.newInstance()); return req; } public AllocateResponse allocate( List<ResourceRequest> resourceRequest, List<ContainerId> releases) throws Exception { final AllocateRequest req = AllocateRequest.newInstance(0, 0F, resourceRequest, releases, null); if (!schedulingRequests.isEmpty()) { req.setSchedulingRequests(schedulingRequests); schedulingRequests.clear(); } return allocate(req); } public AllocateResponse allocate(List<ResourceRequest> resourceRequest, List<SchedulingRequest> newSchedulingRequests, List<ContainerId> releases) throws Exception { final AllocateRequest req = AllocateRequest.newInstance(0, 0F, resourceRequest, releases, null); if (newSchedulingRequests != null) { addSchedulingRequest(newSchedulingRequests); } if (!schedulingRequests.isEmpty()) { req.setSchedulingRequests(schedulingRequests); schedulingRequests.clear(); } return allocate(req); } public AllocateResponse allocateIntraAppAntiAffinity( ResourceSizing resourceSizing, Priority priority, long allocationId, Set<String> allocationTags, String... targetTags) throws Exception { return allocateAppAntiAffinity(resourceSizing, priority, allocationId, null, allocationTags, targetTags); } public AllocateResponse allocateAppAntiAffinity( ResourceSizing resourceSizing, Priority priority, long allocationId, String namespace, Set<String> allocationTags, String... targetTags) throws Exception { return this.allocate(null, Arrays.asList(SchedulingRequest.newBuilder().executionType( ExecutionTypeRequest.newInstance(ExecutionType.GUARANTEED)) .allocationRequestId(allocationId).priority(priority) .allocationTags(allocationTags).placementConstraintExpression( PlacementConstraints .targetNotIn(PlacementConstraints.NODE, PlacementConstraints.PlacementTargets .allocationTagWithNamespace(namespace, targetTags)) .build()) .resourceSizing(resourceSizing).build()), null); } public AllocateResponse allocateIntraAppAntiAffinity( String nodePartition, ResourceSizing resourceSizing, Priority priority, long allocationId, String... tags) throws Exception { return this.allocate(null, Arrays.asList(SchedulingRequest.newBuilder().executionType( ExecutionTypeRequest.newInstance(ExecutionType.GUARANTEED)) .allocationRequestId(allocationId).priority(priority) .placementConstraintExpression(PlacementConstraints .targetNotIn(PlacementConstraints.NODE, PlacementConstraints.PlacementTargets .allocationTag(tags), PlacementConstraints.PlacementTargets .nodePartition(nodePartition)).build()) .resourceSizing(resourceSizing).build()), null); } public AllocateResponse sendContainerResizingRequest( List<UpdateContainerRequest> updateRequests) throws Exception { final AllocateRequest req = AllocateRequest.newInstance(0, 0F, null, null, updateRequests, null); return allocate(req); } public AllocateResponse sendContainerUpdateRequest( List<UpdateContainerRequest> updateRequests) throws Exception { final AllocateRequest req = AllocateRequest.newInstance(0, 0F, null, null, updateRequests, null); return allocate(req); } public AllocateResponse allocate(AllocateRequest allocateRequest) throws Exception { UserGroupInformation ugi = UserGroupInformation.createRemoteUser(attemptId.toString()); Token<AMRMTokenIdentifier> token = context.getRMApps().get(attemptId.getApplicationId()) .getRMAppAttempt(attemptId).getAMRMToken(); ugi.addTokenIdentifier(token.decodeIdentifier()); lastResponse = doAllocateAs(ugi, allocateRequest); return lastResponse; } public AllocateResponse doAllocateAs(UserGroupInformation ugi, final AllocateRequest req) throws Exception { req.setResponseId(responseId); try { AllocateResponse response = ugi.doAs(new PrivilegedExceptionAction<AllocateResponse>() { @Override public AllocateResponse run() throws Exception { return amRMProtocol.allocate(req); } }); responseId = response.getResponseId(); return response; } catch (UndeclaredThrowableException e) { throw (Exception) e.getCause(); } } public AllocateResponse doHeartbeat() throws Exception { return allocate(null, null); } public void unregisterAppAttempt() throws Exception { waitForState(RMAppAttemptState.RUNNING); unregisterAppAttempt(true); } public void unregisterAppAttempt(boolean waitForStateRunning) throws Exception { final FinishApplicationMasterRequest req = FinishApplicationMasterRequest.newInstance( FinalApplicationStatus.SUCCEEDED, "", ""); unregisterAppAttempt(req, waitForStateRunning); } public void unregisterAppAttempt(final FinishApplicationMasterRequest req, boolean waitForStateRunning) throws Exception { if (waitForStateRunning) { waitForState(RMAppAttemptState.RUNNING); } if (ugi == null) { ugi = UserGroupInformation.createRemoteUser(attemptId.toString()); Token<AMRMTokenIdentifier> token = context.getRMApps() .get(attemptId.getApplicationId()) .getRMAppAttempt(attemptId).getAMRMToken(); ugi.addTokenIdentifier(token.decodeIdentifier()); } try { ugi.doAs(new PrivilegedExceptionAction<Object>() { @Override public Object run() throws Exception { amRMProtocol.finishApplicationMaster(req); return null; } }); } catch (UndeclaredThrowableException e) { throw (Exception) e.getCause(); } } public ApplicationAttemptId getApplicationAttemptId() { return this.attemptId; } public List<Container> allocateAndWaitForContainers(int nContainer, int memory, MockNM nm) throws Exception { return allocateAndWaitForContainers("ANY", nContainer, memory, nm); } public List<Container> allocateAndWaitForContainers(String host, int nContainer, int memory, MockNM nm) throws Exception { // AM request for containers allocate(host, memory, nContainer, null); // kick the scheduler nm.nodeHeartbeat(true); List<Container> conts = allocate(new ArrayList<ResourceRequest>(), null) .getAllocatedContainers(); while (conts.size() < nContainer) { nm.nodeHeartbeat(true); conts.addAll(allocate(new ArrayList<ResourceRequest>(), new ArrayList<ContainerId>()).getAllocatedContainers()); Thread.sleep(500); } return conts; } }
package com.krishagni.catissueplus.core.biospecimen.repository; import java.util.List; import com.fasterxml.jackson.annotation.JsonAlias; import com.fasterxml.jackson.annotation.JsonProperty; import com.krishagni.catissueplus.core.common.access.SiteCpPair; import com.krishagni.catissueplus.core.common.events.AbstractListCriteria; public class SpecimenListCriteria extends AbstractListCriteria<SpecimenListCriteria> { private Long cpId; private String cpShortTitle; private String[] lineages; private String[] collectionStatuses; private List<SiteCpPair> siteCps; private List<String> labels; private List<String> barcodes; private Long specimenListId; private boolean useMrnSites; private String storageLocationSite; private Long cprId; private String ppid; private Long visitId; private List<String> visitNames; private Long ancestorId; private String anatomicSite; private String type; private String container; private Long containerId; private Long ancestorContainerId; private boolean available; private boolean noQty; private Long reservedForDp; private boolean minimalInfo; private boolean includeOnlyTbr; @Override public SpecimenListCriteria self() { return this; } public Long cpId() { return cpId; } @JsonProperty("cpId") public SpecimenListCriteria cpId(Long cpId) { this.cpId = cpId; return self(); } public String cpShortTitle() { return cpShortTitle; } @JsonProperty("cpShortTitle") public SpecimenListCriteria cpShortTitle(String cpShortTitle) { this.cpShortTitle = cpShortTitle; return self(); } public String[] lineages() { return lineages; } @JsonProperty("lineages") @JsonAlias({"lineage"}) public SpecimenListCriteria lineages(String[] lineages) { this.lineages = lineages; return self(); } public String[] collectionStatuses() { return collectionStatuses; } @JsonProperty("collectionStatuses") @JsonAlias({"collectionStatus"}) public SpecimenListCriteria collectionStatuses(String[] collectionStatuses) { this.collectionStatuses = collectionStatuses; return self(); } public List<SiteCpPair> siteCps() { return siteCps; } public SpecimenListCriteria siteCps(List<SiteCpPair> siteCps) { this.siteCps = siteCps; return self(); } public List<String> labels() { return labels; } @JsonProperty("labels") @JsonAlias({"label"}) public SpecimenListCriteria labels(List<String> labels) { this.labels = labels; return self(); } public List<String> barcodes() { return barcodes; } @JsonProperty("barcodes") @JsonAlias({"barcode"}) public SpecimenListCriteria barcodes(List<String> barcodes) { this.barcodes = barcodes; return self(); } public Long specimenListId() { return specimenListId; } @JsonProperty("listId") @JsonAlias({"specimenListId"}) public SpecimenListCriteria specimenListId(Long specimenListId) { this.specimenListId = specimenListId; return self(); } public boolean useMrnSites() { return this.useMrnSites; } public SpecimenListCriteria useMrnSites(boolean useMrnSites) { this.useMrnSites = useMrnSites; return self(); } public String storageLocationSite() { return storageLocationSite; } @JsonProperty("storageLocationSite") @JsonAlias({"locationSite"}) public SpecimenListCriteria storageLocationSite(String storageLocationSite) { this.storageLocationSite = storageLocationSite; return self(); } public Long cprId() { return cprId; } @JsonProperty("cprId") public SpecimenListCriteria cprId(Long cprId) { this.cprId = cprId; return self(); } public String ppid() { return ppid; } @JsonProperty("ppid") public SpecimenListCriteria ppid(String ppid) { this.ppid = ppid; return self(); } public Long visitId() { return visitId; } @JsonProperty("visitId") public SpecimenListCriteria visitId(Long visitId) { this.visitId = visitId; return self(); } public List<String> visitNames() { return visitNames; } @JsonProperty("visitNames") public SpecimenListCriteria visitNames(List<String> visitNames) { this.visitNames = visitNames; return self(); } public Long ancestorId() { return ancestorId; } @JsonProperty("ancestorId") public SpecimenListCriteria ancestorId(Long ancestorId) { this.ancestorId = ancestorId; return self(); } public String anatomicSite() { return anatomicSite; } @JsonProperty("anatomicSite") public SpecimenListCriteria anatomicSite(String anatomicSite) { this.anatomicSite = anatomicSite; return self(); } public String type() { return type; } @JsonProperty("type") public SpecimenListCriteria type(String type) { this.type = type; return self(); } public String container() { return container; } @JsonProperty("container") public SpecimenListCriteria container(String container) { this.container = container; return self(); } public Long containerId() { return containerId; } @JsonProperty("containerId") public SpecimenListCriteria containerId(Long containerId) { this.containerId = containerId; return self(); } public Long ancestorContainerId() { return ancestorContainerId; } @JsonProperty("ancestorContainerId") public SpecimenListCriteria ancestorContainerId(Long ancestorContainerId) { this.ancestorContainerId = ancestorContainerId; return self(); } public boolean available() { return this.available; } @JsonProperty("available") public SpecimenListCriteria available(boolean available) { this.available = available; return self(); } public boolean noQty() { return this.noQty; } @JsonProperty("noQty") public SpecimenListCriteria noQty(boolean noQty) { this.noQty = noQty; return self(); } public Long reservedForDp() { return reservedForDp; } @JsonProperty("reservedForDp") public SpecimenListCriteria reservedForDp(Long reservedForDp) { this.reservedForDp = reservedForDp; return self(); } public boolean minimalInfo() { return minimalInfo; } @JsonProperty("minimalInfo") public SpecimenListCriteria minimalInfo(boolean minimalInfo) { this.minimalInfo = minimalInfo; return self(); } public boolean includeOnlyTbr() { return includeOnlyTbr; } @JsonProperty("includeOnlyTbr") public SpecimenListCriteria includeOnlyTbr(boolean includeOnlyTbr) { this.includeOnlyTbr = includeOnlyTbr; return self(); } }
/* * Licensed to ElasticSearch and Shay Banon under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. ElasticSearch licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.facet.terms.strings; import com.google.common.collect.ImmutableList; import gnu.trove.iterator.TObjectIntIterator; import gnu.trove.map.hash.TObjectIntHashMap; import org.elasticsearch.common.CacheRecycler; import org.elasticsearch.common.collect.BoundedTreeSet; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.search.facet.Facet; import org.elasticsearch.search.facet.terms.InternalTermsFacet; import org.elasticsearch.search.facet.terms.TermsFacet; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; /** * */ public class InternalStringTermsFacet extends InternalTermsFacet { private static final String STREAM_TYPE = "tTerms"; public static void registerStream() { Streams.registerStream(STREAM, STREAM_TYPE); } static Stream STREAM = new Stream() { @Override public Facet readFacet(String type, StreamInput in) throws IOException { return readTermsFacet(in); } }; @Override public String streamType() { return STREAM_TYPE; } public static class StringEntry implements Entry { private String term; private int count; public StringEntry(String term, int count) { this.term = term; this.count = count; } public String term() { return term; } public String getTerm() { return term; } @Override public Number termAsNumber() { return Double.parseDouble(term); } @Override public Number getTermAsNumber() { return termAsNumber(); } public int count() { return count; } public int getCount() { return count(); } @Override public int compareTo(Entry o) { int i = term.compareTo(o.term()); if (i == 0) { i = count - o.count(); if (i == 0) { i = System.identityHashCode(this) - System.identityHashCode(o); } } return i; } } private String name; int requiredSize; long missing; long total; Collection<StringEntry> entries = ImmutableList.of(); ComparatorType comparatorType; InternalStringTermsFacet() { } public InternalStringTermsFacet(String name, ComparatorType comparatorType, int requiredSize, Collection<StringEntry> entries, long missing, long total) { this.name = name; this.comparatorType = comparatorType; this.requiredSize = requiredSize; this.entries = entries; this.missing = missing; this.total = total; } @Override public String name() { return this.name; } @Override public String getName() { return this.name; } @Override public String type() { return TYPE; } @Override public String getType() { return type(); } @Override public List<StringEntry> entries() { if (!(entries instanceof List)) { entries = ImmutableList.copyOf(entries); } return (List<StringEntry>) entries; } @Override public List<StringEntry> getEntries() { return entries(); } @SuppressWarnings({"unchecked"}) @Override public Iterator<Entry> iterator() { return (Iterator) entries.iterator(); } @Override public long missingCount() { return this.missing; } @Override public long getMissingCount() { return missingCount(); } @Override public long totalCount() { return this.total; } @Override public long getTotalCount() { return totalCount(); } @Override public long otherCount() { long other = total; for (Entry entry : entries) { other -= entry.count(); } return other; } @Override public long getOtherCount() { return otherCount(); } @Override public Facet reduce(String name, List<Facet> facets) { if (facets.size() == 1) { return facets.get(0); } InternalStringTermsFacet first = (InternalStringTermsFacet) facets.get(0); TObjectIntHashMap<String> aggregated = CacheRecycler.popObjectIntMap(); long missing = 0; long total = 0; for (Facet facet : facets) { InternalStringTermsFacet mFacet = (InternalStringTermsFacet) facet; missing += mFacet.missingCount(); total += mFacet.totalCount(); for (InternalStringTermsFacet.StringEntry entry : mFacet.entries) { aggregated.adjustOrPutValue(entry.term(), entry.count(), entry.count()); } } BoundedTreeSet<StringEntry> ordered = new BoundedTreeSet<StringEntry>(first.comparatorType.comparator(), first.requiredSize); for (TObjectIntIterator<String> it = aggregated.iterator(); it.hasNext(); ) { it.advance(); ordered.add(new StringEntry(it.key(), it.value())); } first.entries = ordered; first.missing = missing; first.total = total; CacheRecycler.pushObjectIntMap(aggregated); return first; } static final class Fields { static final XContentBuilderString _TYPE = new XContentBuilderString("_type"); static final XContentBuilderString MISSING = new XContentBuilderString("missing"); static final XContentBuilderString TOTAL = new XContentBuilderString("total"); static final XContentBuilderString OTHER = new XContentBuilderString("other"); static final XContentBuilderString TERMS = new XContentBuilderString("terms"); static final XContentBuilderString TERM = new XContentBuilderString("term"); static final XContentBuilderString COUNT = new XContentBuilderString("count"); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(name); builder.field(Fields._TYPE, TermsFacet.TYPE); builder.field(Fields.MISSING, missing); builder.field(Fields.TOTAL, total); builder.field(Fields.OTHER, otherCount()); builder.startArray(Fields.TERMS); for (Entry entry : entries) { builder.startObject(); builder.field(Fields.TERM, entry.term()); builder.field(Fields.COUNT, entry.count()); builder.endObject(); } builder.endArray(); builder.endObject(); return builder; } public static InternalStringTermsFacet readTermsFacet(StreamInput in) throws IOException { InternalStringTermsFacet facet = new InternalStringTermsFacet(); facet.readFrom(in); return facet; } @Override public void readFrom(StreamInput in) throws IOException { name = in.readUTF(); comparatorType = ComparatorType.fromId(in.readByte()); requiredSize = in.readVInt(); missing = in.readVLong(); total = in.readVLong(); int size = in.readVInt(); entries = new ArrayList<StringEntry>(size); for (int i = 0; i < size; i++) { entries.add(new StringEntry(in.readUTF(), in.readVInt())); } } @Override public void writeTo(StreamOutput out) throws IOException { out.writeUTF(name); out.writeByte(comparatorType.id()); out.writeVInt(requiredSize); out.writeVLong(missing); out.writeVLong(total); out.writeVInt(entries.size()); for (Entry entry : entries) { out.writeUTF(entry.term()); out.writeVInt(entry.count()); } } }
package org.axonframework.springcloud.commandhandling; import com.google.common.collect.ImmutableList; import org.axonframework.commandhandling.CommandMessage; import org.axonframework.commandhandling.GenericCommandMessage; import org.axonframework.commandhandling.distributed.ConsistentHash; import org.axonframework.commandhandling.distributed.Member; import org.axonframework.commandhandling.distributed.RoutingStrategy; import org.axonframework.commandhandling.distributed.SimpleMember; import org.axonframework.commandhandling.distributed.commandfilter.CommandNameFilter; import org.axonframework.common.ReflectionUtils; import org.axonframework.serialization.SerializedObject; import org.axonframework.serialization.SerializedType; import org.axonframework.serialization.Serializer; import org.axonframework.serialization.SimpleSerializedObject; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; import org.springframework.cloud.client.ServiceInstance; import org.springframework.cloud.client.discovery.DiscoveryClient; import org.springframework.cloud.client.discovery.event.HeartbeatEvent; import java.lang.reflect.Field; import java.net.URI; import java.util.Collections; import java.util.HashMap; import java.util.Optional; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; import static org.junit.Assert.*; import static org.mockito.Mockito.*; @RunWith(MockitoJUnitRunner.class) public class SpringCloudCommandRouterTest { private static final int LOAD_FACTOR = 1; private static final CommandMessage<Object> TEST_COMMAND = GenericCommandMessage.asCommandMessage("testCommand"); private static final String ROUTING_KEY = "routingKey"; private static final String SERVICE_INSTANCE_ID = "SERVICEID"; private static final URI SERVICE_INSTANCE_URI = URI.create("endpoint"); private static final CommandNameFilter COMMAND_NAME_FILTER = new CommandNameFilter(String.class.getName()); private static final String SERIALIZED_COMMAND_FILTER = "dummyCommandFilterData"; private static final String SERIALIZED_COMMAND_FILTER_CLASS_NAME = String.class.getName(); @InjectMocks private SpringCloudCommandRouter testSubject; @Mock private DiscoveryClient discoveryClient; @Mock private RoutingStrategy routingStrategy; @Mock private Serializer serializer; private Field atomicConsistentHashField; @Mock private SerializedObject<String> serializedObject; private HashMap<String, String> serviceInstanceMetadata; @Mock private ServiceInstance serviceInstance; private SimpleSerializedObject<String> expectedSerializedObject; @Before public void setUp() throws Exception { String atomicConsistentHashFieldName = "atomicConsistentHash"; atomicConsistentHashField = SpringCloudCommandRouter.class.getDeclaredField(atomicConsistentHashFieldName); SerializedType serializedType = mock(SerializedType.class); when(serializedType.getName()).thenReturn(SERIALIZED_COMMAND_FILTER_CLASS_NAME); when(serializedObject.getType()).thenReturn(serializedType); when(serializedObject.getData()).thenReturn(SERIALIZED_COMMAND_FILTER); serviceInstanceMetadata = new HashMap<>(); when(serviceInstance.getServiceId()).thenReturn(SERVICE_INSTANCE_ID); when(serviceInstance.getUri()).thenReturn(SERVICE_INSTANCE_URI); when(serviceInstance.getMetadata()).thenReturn(serviceInstanceMetadata); expectedSerializedObject = new SimpleSerializedObject<>( SERIALIZED_COMMAND_FILTER, String.class, SERIALIZED_COMMAND_FILTER_CLASS_NAME, null); when(discoveryClient.getLocalServiceInstance()).thenReturn(serviceInstance); when(discoveryClient.getServices()).thenReturn(Collections.singletonList(SERVICE_INSTANCE_ID)); when(discoveryClient.getInstances(SERVICE_INSTANCE_ID)).thenReturn(Collections.singletonList(serviceInstance)); when(routingStrategy.getRoutingKey(any())).thenReturn(ROUTING_KEY); when(serializer.serialize(COMMAND_NAME_FILTER, String.class)).thenReturn(serializedObject); when(serializer.deserialize(serializedObject)).thenReturn(COMMAND_NAME_FILTER); } @Test public void testFindDestinationReturnsEmptyOptionalMemberForCommandMessage() throws Exception { Optional<Member> result = testSubject.findDestination(TEST_COMMAND); assertFalse(result.isPresent()); verify(routingStrategy).getRoutingKey(TEST_COMMAND); } @Test public void testFindDestinationReturnsMemberForCommandMessage() throws Exception { SimpleMember<URI> testMember = new SimpleMember<>(SERVICE_INSTANCE_ID, SERVICE_INSTANCE_URI, null); AtomicReference<ConsistentHash> testAtomicConsistentHash = new AtomicReference<>(new ConsistentHash().with(testMember, LOAD_FACTOR, commandMessage -> true)); ReflectionUtils.setFieldValue(atomicConsistentHashField, testSubject, testAtomicConsistentHash); Optional<Member> resultOptional = testSubject.findDestination(TEST_COMMAND); assertTrue(resultOptional.isPresent()); Member resultMember = resultOptional.orElseThrow(IllegalStateException::new); assertMember(SERVICE_INSTANCE_ID, SERVICE_INSTANCE_URI, resultMember); verify(routingStrategy).getRoutingKey(TEST_COMMAND); } @Test public void testUpdateMembershipUpdatesLocalServiceInstance() throws Exception { testSubject.updateMembership(LOAD_FACTOR, COMMAND_NAME_FILTER); assertEquals(serviceInstanceMetadata.get("loadFactor"), Integer.toString(LOAD_FACTOR)); assertEquals(serviceInstanceMetadata.get("serializedCommandFilter"), SERIALIZED_COMMAND_FILTER); assertEquals(serviceInstanceMetadata.get("serializedCommandFilterClassName"), SERIALIZED_COMMAND_FILTER_CLASS_NAME); verify(discoveryClient).getLocalServiceInstance(); verify(serializer).serialize(COMMAND_NAME_FILTER, String.class); verify(serializer).deserialize(expectedSerializedObject); } @Test public void testUpdateMemberShipUpdatesConsistentHash() throws Exception { testSubject.updateMembership(LOAD_FACTOR, COMMAND_NAME_FILTER); AtomicReference<ConsistentHash> resultAtomicConsistentHash = ReflectionUtils.getFieldValue(atomicConsistentHashField, testSubject); Set<Member> resultMemberSet = resultAtomicConsistentHash.get().getMembers(); assertFalse(resultMemberSet.isEmpty()); assertMember(SERVICE_INSTANCE_ID, SERVICE_INSTANCE_URI, resultMemberSet.iterator().next()); verify(discoveryClient).getLocalServiceInstance(); verify(serializer).serialize(COMMAND_NAME_FILTER, String.class); verify(serializer).deserialize(expectedSerializedObject); } @Test public void testUpdateMembershipsOnHeartbeatEventUpdatesConsistentHash() throws Exception { serviceInstanceMetadata.put("loadFactor", Integer.toString(LOAD_FACTOR)); serviceInstanceMetadata.put("serializedCommandFilter", SERIALIZED_COMMAND_FILTER); serviceInstanceMetadata.put("serializedCommandFilterClassName", SERIALIZED_COMMAND_FILTER_CLASS_NAME); testSubject.updateMemberships(mock(HeartbeatEvent.class)); AtomicReference<ConsistentHash> resultAtomicConsistentHash = ReflectionUtils.getFieldValue(atomicConsistentHashField, testSubject); Set<Member> resultMemberSet = resultAtomicConsistentHash.get().getMembers(); assertFalse(resultMemberSet.isEmpty()); assertMember(SERVICE_INSTANCE_ID, SERVICE_INSTANCE_URI, resultMemberSet.iterator().next()); verify(discoveryClient).getServices(); verify(discoveryClient).getInstances(SERVICE_INSTANCE_ID); verify(serializer).deserialize(expectedSerializedObject); } @Test public void testUpdateMembershipsOnHeartbeatEventFiltersInstancesWithoutCommandRouterSpecificMetadata() throws Exception { int expectedMemberSetSize = 1; String expectedServiceInstanceId = "nonCommandRouterServiceInstance"; serviceInstanceMetadata.put("loadFactor", Integer.toString(LOAD_FACTOR)); serviceInstanceMetadata.put("serializedCommandFilter", SERIALIZED_COMMAND_FILTER); serviceInstanceMetadata.put("serializedCommandFilterClassName", SERIALIZED_COMMAND_FILTER_CLASS_NAME); ServiceInstance nonCommandRouterServiceInstance = mock(ServiceInstance.class); when(nonCommandRouterServiceInstance.getServiceId()).thenReturn(expectedServiceInstanceId); when(discoveryClient.getServices()) .thenReturn(ImmutableList.of(SERVICE_INSTANCE_ID, expectedServiceInstanceId)); when(discoveryClient.getInstances(SERVICE_INSTANCE_ID)) .thenReturn(ImmutableList.of(serviceInstance, nonCommandRouterServiceInstance)); testSubject.updateMemberships(mock(HeartbeatEvent.class)); AtomicReference<ConsistentHash> resultAtomicConsistentHash = ReflectionUtils.getFieldValue(atomicConsistentHashField, testSubject); Set<Member> resultMemberSet = resultAtomicConsistentHash.get().getMembers(); assertEquals(expectedMemberSetSize, resultMemberSet.size()); verify(discoveryClient).getServices(); verify(discoveryClient).getInstances(SERVICE_INSTANCE_ID); verify(discoveryClient).getInstances(expectedServiceInstanceId); verify(serializer).deserialize(expectedSerializedObject); } private void assertMember(String expectedMemberName, URI expectedEndpoint, Member resultMember) { assertEquals(resultMember.getClass(), ConsistentHash.ConsistentHashMember.class); ConsistentHash.ConsistentHashMember result = (ConsistentHash.ConsistentHashMember) resultMember; assertEquals(result.name(), expectedMemberName); assertEquals(result.segmentCount(), LOAD_FACTOR); Optional<URI> connectionEndpointOptional = result.getConnectionEndpoint(URI.class); assertTrue(connectionEndpointOptional.isPresent()); URI resultEndpoint = connectionEndpointOptional.orElseThrow(IllegalStateException::new); assertEquals(resultEndpoint, expectedEndpoint); } }
/* * Copyright 2015 Realm Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.realm.entities; import java.util.Date; import io.realm.RealmObject; import io.realm.annotations.Required; // Always follow below order and put comments like below to make NullTypes Related cases // 1 String // 2 Bytes // 3 Boolean // 4 Byte // 5 Short // 6 Integer // 7 Long // 8 Float // 9 Double // 10 Date // 11 Object public class NoPrimaryKeyNullTypes extends RealmObject { public static String FIELD_STRING_NOT_NULL = "fieldStringNotNull"; public static String FIELD_STRING_NULL = "fieldStringNull"; public static String FIELD_BOOLEAN_NOT_NULL = "fieldBooleanNotNull"; public static String FIELD_BOOLEAN_NULL = "fieldBooleanNull"; public static String FIELD_BYTE_NOT_NULL = "fieldByteNotNull"; public static String FIELD_BYTE_NULL = "fieldByteNull"; public static String FIELD_SHORT_NOT_NULL = "fieldShortNotNull"; public static String FIELD_SHORT_NULL = "fieldShortNull"; public static String FIELD_INTEGER_NOT_NULL = "fieldIntegerNotNull"; public static String FIELD_INTEGER_NULL = "fieldIntegerNull"; public static String FIELD_LONG_NOT_NULL = "fieldLongNotNull"; public static String FIELD_LONG_NULL = "fieldLongNull"; public static String FIELD_FLOAT_NOT_NULL = "fieldFloatNotNull"; public static String FIELD_FLOAT_NULL = "fieldFloatNull"; public static String FIELD_DOUBLE_NOT_NULL = "fieldDoubleNotNull"; public static String FIELD_DOUBLE_NULL = "fieldDoubleNull"; public static String FIELD_DATE_NOT_NULL = "fieldDateNotNull"; public static String FIELD_DATE_NULL = "fieldDateNull"; @Required private String fieldStringNotNull = ""; private String fieldStringNull; @Required private byte[] fieldBytesNotNull = new byte[0]; private byte[] fieldBytesNull; @Required private Boolean fieldBooleanNotNull = false; private Boolean fieldBooleanNull; @Required private Byte fieldByteNotNull = 0; private Byte fieldByteNull; @Required private Short fieldShortNotNull = 0; private Short fieldShortNull; @Required private Integer fieldIntegerNotNull = 0; private Integer fieldIntegerNull; @Required private Long fieldLongNotNull = 0L; private Long fieldLongNull; @Required private Float fieldFloatNotNull = 0F; private Float fieldFloatNull; @Required private Double fieldDoubleNotNull = 0D; private Double fieldDoubleNull; @Required private Date fieldDateNotNull = new Date(0); private Date fieldDateNull; private NoPrimaryKeyNullTypes fieldObjectNull; public String getFieldStringNotNull() { return fieldStringNotNull; } public void setFieldStringNotNull(String fieldStringNotNull) { this.fieldStringNotNull = fieldStringNotNull; } public String getFieldStringNull() { return fieldStringNull; } public void setFieldStringNull(String fieldStringNull) { this.fieldStringNull = fieldStringNull; } public byte[] getFieldBytesNull() { return fieldBytesNull; } public void setFieldBytesNull(byte[] fieldBytesNull) { this.fieldBytesNull = fieldBytesNull; } public byte[] getFieldBytesNotNull() { return fieldBytesNotNull; } public void setFieldBytesNotNull(byte[] fieldBytesNotNull) { this.fieldBytesNotNull = fieldBytesNotNull; } public Boolean getFieldBooleanNotNull() { return fieldBooleanNotNull; } public void setFieldBooleanNotNull(Boolean fieldBooleanNotNull) { this.fieldBooleanNotNull = fieldBooleanNotNull; } public Boolean getFieldBooleanNull() { return fieldBooleanNull; } public void setFieldBooleanNull(Boolean fieldBooleanNull) { this.fieldBooleanNull = fieldBooleanNull; } public Byte getFieldByteNotNull() { return fieldByteNotNull; } public void setFieldByteNotNull(Byte fieldByteNotNull) { this.fieldByteNotNull = fieldByteNotNull; } public Byte getFieldByteNull() { return fieldByteNull; } public void setFieldByteNull(Byte fieldByteNull) { this.fieldByteNull = fieldByteNull; } public Short getFieldShortNotNull() { return fieldShortNotNull; } public void setFieldShortNotNull(Short fieldShortNotNull) { this.fieldShortNotNull = fieldShortNotNull; } public Short getFieldShortNull() { return fieldShortNull; } public void setFieldShortNull(Short fieldShortNull) { this.fieldShortNull = fieldShortNull; } public Integer getFieldIntegerNotNull() { return fieldIntegerNotNull; } public void setFieldIntegerNotNull(Integer fieldIntegerNotNull) { this.fieldIntegerNotNull = fieldIntegerNotNull; } public Integer getFieldIntegerNull() { return fieldIntegerNull; } public void setFieldIntegerNull(Integer fieldIntegerNull) { this.fieldIntegerNull = fieldIntegerNull; } public Long getFieldLongNotNull() { return fieldLongNotNull; } public void setFieldLongNotNull(Long fieldLongNotNull) { this.fieldLongNotNull = fieldLongNotNull; } public Long getFieldLongNull() { return fieldLongNull; } public void setFieldLongNull(Long fieldLongNull) { this.fieldLongNull = fieldLongNull; } public Float getFieldFloatNotNull() { return fieldFloatNotNull; } public void setFieldFloatNotNull(Float fieldFloatNotNull) { this.fieldFloatNotNull = fieldFloatNotNull; } public Float getFieldFloatNull() { return fieldFloatNull; } public void setFieldFloatNull(Float fieldFloatNull) { this.fieldFloatNull = fieldFloatNull; } public Double getFieldDoubleNotNull() { return fieldDoubleNotNull; } public void setFieldDoubleNotNull(Double fieldDoubleNotNull) { this.fieldDoubleNotNull = fieldDoubleNotNull; } public Double getFieldDoubleNull() { return fieldDoubleNull; } public void setFieldDoubleNull(Double fieldDoubleNull) { this.fieldDoubleNull = fieldDoubleNull; } public Date getFieldDateNotNull() { return fieldDateNotNull; } public void setFieldDateNotNull(Date fieldDateNotNull) { this.fieldDateNotNull = fieldDateNotNull; } public Date getFieldDateNull() { return fieldDateNull; } public void setFieldDateNull(Date fieldDateNull) { this.fieldDateNull = fieldDateNull; } public NoPrimaryKeyNullTypes getFieldObjectNull() { return fieldObjectNull; } public void setFieldObjectNull(NoPrimaryKeyNullTypes fieldObjectNull) { this.fieldObjectNull = fieldObjectNull; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.cloudera.sqoop.manager; import com.cloudera.sqoop.ConnFactory; import com.cloudera.sqoop.SqoopOptions; import com.cloudera.sqoop.testutil.CommonArgs; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.cloudera.sqoop.testutil.ExportJobTestCase; import org.apache.hadoop.conf.Configuration; import org.junit.After; import org.junit.Before; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.Writer; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; /** * Please see instructions in SQLServerManagerImportManualTest. */ public class SQLServerManagerExportManualTest extends ExportJobTestCase { public static final Log LOG = LogFactory.getLog( SQLServerManagerExportManualTest.class.getName()); static final String HOST_URL = System.getProperty( "sqoop.test.sqlserver.connectstring.host_url", "jdbc:sqlserver://sqlserverhost:1433"); static final String DATABASE_NAME = "SQOOPTEST"; static final String DATABASE_USER = "SQOOPUSER"; static final String DATABASE_PASSWORD = "PASSWORD"; static final String SCHEMA_DBO = "dbo"; static final String DBO_TABLE_NAME = "EMPLOYEES_MSSQL"; static final String DBO_BINARY_TABLE_NAME = "BINARYTYPE_MSSQL"; static final String SCHEMA_SCH = "sch"; static final String SCH_TABLE_NAME = "PRIVATE_TABLE"; static final String CONNECT_STRING = HOST_URL + ";databaseName=" + DATABASE_NAME; static final String CONNECTOR_FACTORY = System.getProperty( "sqoop.test.msserver.connector.factory", ConnFactory.DEFAULT_FACTORY_CLASS_NAMES); // instance variables populated during setUp, used during tests private SQLServerManager manager; private Configuration conf = new Configuration(); private Connection conn = null; @Override protected Configuration getConf() { return conf; } @Override protected boolean useHsqldbTestServer() { return false; } @Before public void setUp() { super.setUp(); SqoopOptions options = new SqoopOptions(CONNECT_STRING, DBO_TABLE_NAME); options.setUsername(DATABASE_USER); options.setPassword(DATABASE_PASSWORD); manager = new SQLServerManager(options); createTableAndPopulateData(SCHEMA_DBO, DBO_TABLE_NAME); createTableAndPopulateData(SCHEMA_SCH, SCH_TABLE_NAME); // To test with Microsoft SQL server connector, copy the connector jar to // sqoop.thirdparty.lib.dir and set sqoop.test.msserver.connector.factory // to com.microsoft.sqoop.SqlServer.MSSQLServerManagerFactory. By default, // the built-in SQL server connector is used. conf.setStrings(ConnFactory.FACTORY_CLASS_NAMES_KEY, CONNECTOR_FACTORY); } public void createTableAndPopulateData(String schema, String table) { String fulltableName = manager.escapeObjectName(schema) + "." + manager.escapeObjectName(table); Statement stmt = null; // Create schema if needed try { conn = manager.getConnection(); stmt = conn.createStatement(); stmt.execute("CREATE SCHEMA " + schema); conn.commit(); } catch (SQLException sqlE) { LOG.info("Can't create schema: " + sqlE.getMessage()); } finally { try { if (null != stmt) { stmt.close(); } } catch (Exception ex) { LOG.warn("Exception while closing stmt", ex); } } // Drop the existing table, if there is one. try { conn = manager.getConnection(); stmt = conn.createStatement(); stmt.execute("DROP TABLE " + fulltableName); conn.commit(); } catch (SQLException sqlE) { LOG.info("Table was not dropped: " + sqlE.getMessage()); } finally { try { if (null != stmt) { stmt.close(); } } catch (Exception ex) { LOG.warn("Exception while closing stmt", ex); } } // Create and populate table try { conn = manager.getConnection(); conn.setAutoCommit(false); stmt = conn.createStatement(); // create the database table and populate it with data. stmt.executeUpdate("CREATE TABLE " + fulltableName + " (" + "id INT NOT NULL, " + "name VARCHAR(24) NOT NULL, " + "salary FLOAT, " + "dept VARCHAR(32), " + "PRIMARY KEY (id))"); conn.commit(); } catch (SQLException sqlE) { LOG.error("Encountered SQL Exception: ", sqlE); sqlE.printStackTrace(); fail("SQLException when running test setUp(): " + sqlE); } finally { try { if (null != stmt) { stmt.close(); } } catch (Exception ex) { LOG.warn("Exception while closing connection/stmt", ex); } } } public void createSQLServerBinaryTypeTable(String schema, String table) { String fulltableName = manager.escapeObjectName(schema) + "." + manager.escapeObjectName(table); Statement stmt = null; // Create schema if needed try { conn = manager.getConnection(); stmt = conn.createStatement(); stmt.execute("CREATE SCHEMA " + schema); conn.commit(); } catch (SQLException sqlE) { LOG.info("Can't create schema: " + sqlE.getMessage()); } finally { try { if (null != stmt) { stmt.close(); } } catch (Exception ex) { LOG.warn("Exception while closing stmt", ex); } } // Drop the existing table, if there is one. try { conn = manager.getConnection(); stmt = conn.createStatement(); stmt.execute("DROP TABLE " + fulltableName); conn.commit(); } catch (SQLException sqlE) { LOG.info("Table was not dropped: " + sqlE.getMessage()); } finally { try { if (null != stmt) { stmt.close(); } } catch (Exception ex) { LOG.warn("Exception while closing stmt", ex); } } // Create and populate table try { conn = manager.getConnection(); conn.setAutoCommit(false); stmt = conn.createStatement(); // create the database table and populate it with data. stmt.executeUpdate("CREATE TABLE " + fulltableName + " (" + "id INT PRIMARY KEY, " + "b1 BINARY(10), " + "b2 VARBINARY(10))"); conn.commit(); } catch (SQLException sqlE) { LOG.error("Encountered SQL Exception: ", sqlE); sqlE.printStackTrace(); fail("SQLException when running test setUp(): " + sqlE); } finally { try { if (null != stmt) { stmt.close(); } } catch (Exception ex) { LOG.warn("Exception while closing connection/stmt", ex); } } } @After public void tearDown() { super.tearDown(); try { conn.close(); manager.close(); } catch (SQLException sqlE) { LOG.error("Got SQLException: " + sqlE.toString()); fail("Got SQLException: " + sqlE.toString()); } } private String [] getArgv(String tableName, String... extraArgs) { ArrayList<String> args = new ArrayList<String>(); CommonArgs.addHadoopFlags(args); args.add("--table"); args.add(tableName); args.add("--export-dir"); args.add(getWarehouseDir()); args.add("--fields-terminated-by"); args.add(","); args.add("--lines-terminated-by"); args.add("\\n"); args.add("--connect"); args.add(CONNECT_STRING); args.add("--username"); args.add(DATABASE_USER); args.add("--password"); args.add(DATABASE_PASSWORD); args.add("-m"); args.add("1"); for (String arg : extraArgs) { args.add(arg); } return args.toArray(new String[0]); } protected void createTestFile(String filename, String[] lines) throws IOException { new File(getWarehouseDir()).mkdirs(); File file = new File(getWarehouseDir() + "/" + filename); Writer output = new BufferedWriter(new FileWriter(file)); for(String line : lines) { output.write(line); output.write("\n"); } output.close(); } public void testExport() throws IOException, SQLException { createTestFile("inputFile", new String[] { "2,Bob,400,sales", "3,Fred,15,marketing", }); runExport(getArgv(DBO_TABLE_NAME)); assertRowCount(2, escapeObjectName(DBO_TABLE_NAME), conn); } public void testExportCustomSchema() throws IOException, SQLException { createTestFile("inputFile", new String[] { "2,Bob,400,sales", "3,Fred,15,marketing", }); String[] extra = new String[] {"--", "--schema", SCHEMA_SCH, }; runExport(getArgv(SCH_TABLE_NAME, extra)); assertRowCount( 2, escapeObjectName(SCHEMA_SCH) + "." + escapeObjectName(SCH_TABLE_NAME), conn ); } public void testExportTableHints() throws IOException, SQLException { createTestFile("inputFile", new String[] { "2,Bob,400,sales", "3,Fred,15,marketing", }); String []extra = new String[] {"--", "--table-hints", "ROWLOCK", }; runExport(getArgv(DBO_TABLE_NAME, extra)); assertRowCount(2, escapeObjectName(DBO_TABLE_NAME), conn); } public void testExportTableHintsMultiple() throws IOException, SQLException { createTestFile("inputFile", new String[] { "2,Bob,400,sales", "3,Fred,15,marketing", }); String []extra = new String[] {"--", "--table-hints", "ROWLOCK,NOWAIT", }; runExport(getArgv(DBO_TABLE_NAME, extra)); assertRowCount(2, escapeObjectName(DBO_TABLE_NAME), conn); } public void testSQLServerBinaryType() throws IOException, SQLException { createSQLServerBinaryTypeTable(SCHEMA_DBO, DBO_BINARY_TABLE_NAME); createTestFile("inputFile", new String[] { "1,73 65 63 72 65 74 00 00 00 00,73 65 63 72 65 74" }); String[] expectedContent = {"73656372657400000000", "736563726574"}; runExport(getArgv(DBO_BINARY_TABLE_NAME)); assertRowCount(1, escapeObjectName(DBO_BINARY_TABLE_NAME), conn); checkSQLBinaryTableContent(expectedContent, escapeObjectName(DBO_BINARY_TABLE_NAME), conn); } /** Make sure mixed update/insert export work correctly. */ public void testUpsertTextExport() throws IOException, SQLException { createTestFile("inputFile", new String[] { "2,Bob,400,sales", "3,Fred,15,marketing", }); // first time will be insert. runExport(getArgv(SCH_TABLE_NAME, "--update-key", "id", "--update-mode", "allowinsert")); // second time will be update. runExport(getArgv(SCH_TABLE_NAME, "--update-key", "id", "--update-mode", "allowinsert")); assertRowCount(2, escapeObjectName(SCH_TABLE_NAME), conn); } public static void checkSQLBinaryTableContent(String[] expected, String tableName, Connection connection){ Statement stmt = null; ResultSet rs = null; try { stmt = connection.createStatement(); rs = stmt.executeQuery("SELECT TOP 1 [b1], [b2] FROM " + tableName); rs.next(); assertEquals(expected[0], rs.getString("b1")); assertEquals(expected[1], rs.getString("b2")); } catch (SQLException e) { LOG.error("Can't verify table content", e); fail(); } finally { try { connection.commit(); if (stmt != null) { stmt.close(); } if (rs != null) { rs.close(); } } catch (SQLException ex) { LOG.info("Ignored exception in finally block."); } } } public static void assertRowCount(long expected, String tableName, Connection connection) { Statement stmt = null; ResultSet rs = null; try { stmt = connection.createStatement(); rs = stmt.executeQuery("SELECT count(*) FROM " + tableName); rs.next(); assertEquals(expected, rs.getLong(1)); } catch (SQLException e) { LOG.error("Can't verify number of rows", e); fail(); } finally { try { connection.commit(); if (stmt != null) { stmt.close(); } if (rs != null) { rs.close(); } } catch (SQLException ex) { LOG.info("Ignored exception in finally block."); } } } public String escapeObjectName(String objectName) { return "[" + objectName + "]"; } }