gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright (c) 2013-2017 Cinchapi Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.cinchapi.concourse.server.storage; import java.nio.MappedByteBuffer; import java.nio.channels.FileChannel.MapMode; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.concurrent.locks.StampedLock; import javax.annotation.concurrent.ThreadSafe; import com.cinchapi.concourse.server.io.FileSystem; import com.cinchapi.concourse.util.Integers; import com.cinchapi.concourse.util.LongBitSet; import com.google.common.collect.Lists; /** * The {@link Inventory} is a persistent collection of longs that represents a * listing of all the records that exist within an environment (e.g. have data * or a history of data). * * @author Jeff Nelson */ @ThreadSafe public class Inventory { /** * Return a new {@link Inventory} that is persisted to the * {@code backingStore} in calls to {@link #sync()}. * * @param backingStore * @return the Inventory */ public static Inventory create(String backingStore) { return new Inventory(backingStore); } /** * The amount of memory and disk space allocated each time we need to extend * the backing store. This value is rounded up to the nearest power of two * for the most efficient I/O. */ private static final int MEMORY_MAPPING_SIZE = 8 * 20000; /** * The location where the inventory is stored on disk. */ private final String backingStore; /** * The bitset that contains the read-efficient version of the data in the * inventory. */ private final LongBitSet bitSet; /** * A memory mapped buffer that is used to handle writes to the backing * store. */ private MappedByteBuffer content; /** * A collection of dirty writes that have not been synced to disk yet. */ protected final transient List<Long> dirty = Lists .newArrayListWithExpectedSize(1); // visible for testing /** * Concurrency control */ private final StampedLock lock = new StampedLock(); /** * Construct a new instance. If the {@code backingStore} has data then it * will be read into memory. * * @param backingStore */ private Inventory(String backingStore) { this.backingStore = backingStore; this.bitSet = LongBitSet.create(); this.content = FileSystem.map(backingStore, MapMode.READ_ONLY, 0, FileSystem.getFileSize(backingStore)); while (content.position() < content.capacity()) { long record = content.getLong(); if(record == 0 && content.getLong() == 0) { // if there is a null // (0) record, check to // see if the next one // is also null which // will tell us we've // read too far content.position(content.position() - 16); break; } else { bitSet.set(record); } } map0(content.position(), MEMORY_MAPPING_SIZE); } /** * Add an {@code record} to the inventory. * * @param record */ public void add(long record) { long stamp = lock.writeLock(); try { if(bitSet.set(record)) { dirty.add(record); } } finally { lock.unlockWrite(stamp); } } /** * Return {@code true} if the {@code record} exists within the inventory. * * @param record * @return {@code true} if the record is contained */ public boolean contains(long record) { long stamp = lock.tryOptimisticRead(); boolean result = bitSet.get(record); if(lock.validate(stamp)) { return result; } else { stamp = lock.readLock(); try { return bitSet.get(record); } finally { lock.unlockRead(stamp); } } } /** * Return {@code Set<Long>} if records that ever had data exist. * * @return {@code Set<Long>} */ public Set<Long> getAll() { return (Set<Long>) bitSet.toIterable(); } /** * Perform an fsync and flush any dirty writes to disk. */ public void sync() { long stamp = lock.writeLock(); try { if(!dirty.isEmpty()) { Iterator<Long> it = dirty.iterator(); while (it.hasNext()) { if(content.remaining() < 8) { map0(content.position(), content.position() * 2); } content.putLong(it.next()); } content.force(); dirty.clear(); } } finally { lock.unlockWrite(stamp); } } /** * Map the {@link #backingStore} in memory for the specified * {@code position} for at least {@code length} bytes. * * @param position * @param length */ private void map0(long position, int length) { FileSystem.unmap(content); content = FileSystem.map(backingStore, MapMode.READ_WRITE, position, Integers.nextPowerOfTwo(length)); } }
package com.github.phenomics.ontolib.io.obo.hpo; import com.github.phenomics.ontolib.base.OntoLibRuntimeException; import com.github.phenomics.ontolib.formats.hpo.HpoRelationQualifier; import com.github.phenomics.ontolib.formats.hpo.HpoTerm; import com.github.phenomics.ontolib.formats.hpo.HpoTermRelation; import com.github.phenomics.ontolib.io.obo.DbXref; import com.github.phenomics.ontolib.io.obo.OboImmutableOntologyLoader; import com.github.phenomics.ontolib.io.obo.OboOntologyEntryFactory; import com.github.phenomics.ontolib.io.obo.Stanza; import com.github.phenomics.ontolib.io.obo.StanzaEntry; import com.github.phenomics.ontolib.io.obo.StanzaEntryAltId; import com.github.phenomics.ontolib.io.obo.StanzaEntryComment; import com.github.phenomics.ontolib.io.obo.StanzaEntryCreatedBy; import com.github.phenomics.ontolib.io.obo.StanzaEntryCreationDate; import com.github.phenomics.ontolib.io.obo.StanzaEntryDef; import com.github.phenomics.ontolib.io.obo.StanzaEntryDisjointFrom; import com.github.phenomics.ontolib.io.obo.StanzaEntryId; import com.github.phenomics.ontolib.io.obo.StanzaEntryIntersectionOf; import com.github.phenomics.ontolib.io.obo.StanzaEntryIsA; import com.github.phenomics.ontolib.io.obo.StanzaEntryIsObsolete; import com.github.phenomics.ontolib.io.obo.StanzaEntryName; import com.github.phenomics.ontolib.io.obo.StanzaEntryRelationship; import com.github.phenomics.ontolib.io.obo.StanzaEntrySubset; import com.github.phenomics.ontolib.io.obo.StanzaEntrySynonym; import com.github.phenomics.ontolib.io.obo.StanzaEntryType; import com.github.phenomics.ontolib.io.obo.StanzaEntryUnionOf; import com.github.phenomics.ontolib.io.obo.StanzaEntryXref; import com.github.phenomics.ontolib.io.obo.TrailingModifier.KeyValue; import com.github.phenomics.ontolib.ontology.data.Dbxref; import com.github.phenomics.ontolib.ontology.data.ImmutableDbxref; import com.github.phenomics.ontolib.ontology.data.ImmutableTermId; import com.github.phenomics.ontolib.ontology.data.ImmutableTermSynonym; import com.github.phenomics.ontolib.ontology.data.ImmutableTermXref; import com.github.phenomics.ontolib.ontology.data.TermId; import com.github.phenomics.ontolib.ontology.data.TermSynonym; import com.github.phenomics.ontolib.ontology.data.TermSynonymScope; import com.github.phenomics.ontolib.ontology.data.TermXref; import com.google.common.collect.Lists; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.SortedMap; import java.util.stream.Collectors; /** * Factory class for constructing {@link HpoTerm} and {@link HpoTermRelation} objects from * {@link Stanza} objects for usage in {@link OboOntologyEntryFactory}. * * @author <a href="mailto:manuel.holtgrewe@bihealth.de">Manuel Holtgrewe</a> */ class HpoOboFactory implements OboOntologyEntryFactory<HpoTerm, HpoTermRelation> { /** * Mapping from string representation of term Id to {@link TermId}. * * <p> * All occuring termIds must be previously registered into this map before calling any of this * object's functions. This happens in {@link OboImmutableOntologyLoader}. * </p> */ private SortedMap<String, ImmutableTermId> termIds = null; /** Id of next relation. */ private int nextRelationId = 1; @Override public void setTermIds(SortedMap<String, ImmutableTermId> termIds) { this.termIds = termIds; } @Override public HpoTerm constructTerm(Stanza stanza) { final TermId id = termIds.get(this.<StanzaEntryId>getCardinalityOneEntry(stanza, StanzaEntryType.ID).getId()); final String name = this.<StanzaEntryName>getCardinalityOneEntry(stanza, StanzaEntryType.NAME).getName(); final List<TermId> altTermIds; final List<StanzaEntry> altEntryList = stanza.getEntryByType().get(StanzaEntryType.ALT_ID); if (altEntryList == null) { altTermIds = Lists.newArrayList(); } else { altTermIds = altEntryList.stream().map(e -> termIds.get(((StanzaEntryAltId) e).getAltId())) .collect(Collectors.toList()); } final StanzaEntryDef defEntry = this.<StanzaEntryDef>getCardinalityZeroOrOneEntry(stanza, StanzaEntryType.DEF); final String definition = (defEntry == null) ? null : defEntry.getText(); final StanzaEntryComment commentEntry = this.<StanzaEntryComment>getCardinalityZeroOrOneEntry(stanza, StanzaEntryType.COMMENT); final String comment = (commentEntry == null) ? null : commentEntry.getText(); final List<String> subsets; final List<StanzaEntry> subsetEntryList = stanza.getEntryByType().get(StanzaEntryType.SUBSET); if (subsetEntryList == null) { subsets = Lists.newArrayList(); } else { subsets = subsetEntryList.stream().map(e -> ((StanzaEntrySubset) e).getName()) .collect(Collectors.toList()); } final List<TermSynonym> synonyms; final List<StanzaEntry> synonymEntryList = stanza.getEntryByType().get(StanzaEntryType.SYNONYM); if (synonymEntryList == null) { synonyms = Lists.newArrayList(); } else { synonyms = synonymEntryList.stream().map(e -> { final StanzaEntrySynonym s = (StanzaEntrySynonym) e; final String value = s.getText(); final TermSynonymScope scope = s.getTermSynonymScope(); final String synonymTypeName = s.getSynonymTypeName(); final List<TermXref> termXrefs = s.getDbXrefList().getDbXrefs().stream() .map(xref -> new ImmutableTermXref(termIds.get(xref.getName()), xref.getDescription())) .collect(Collectors.toList()); return new ImmutableTermSynonym(value, scope, synonymTypeName, termXrefs); }).collect(Collectors.toList()); } final StanzaEntryIsObsolete isObsoleteEntry = this.< StanzaEntryIsObsolete>getCardinalityZeroOrOneEntry(stanza, StanzaEntryType.IS_OBSOLETE); final boolean obsolete = (isObsoleteEntry == null) ? false : isObsoleteEntry.getValue(); final StanzaEntryCreatedBy createdByEntry = this.<StanzaEntryCreatedBy>getCardinalityZeroOrOneEntry(stanza, StanzaEntryType.CREATED_BY); final String createdBy = (createdByEntry == null) ? null : createdByEntry.getCreator(); final StanzaEntryCreationDate creationDateEntry = this.< StanzaEntryCreationDate>getCardinalityZeroOrOneEntry(stanza, StanzaEntryType.CREATION_DATE); final String creationDateStr = (creationDateEntry == null) ? null : creationDateEntry.getValue(); final SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"); Date creationDate = null; if (creationDateStr != null) { try { creationDate = format.parse(creationDateStr); } catch (ParseException e) { throw new OntoLibRuntimeException("Problem parsing date string " + creationDateStr, e); } } final List<StanzaEntry> entryList = stanza.getEntryByType().get(StanzaEntryType.XREF); final List<Dbxref> dbxrefList = new ArrayList<>(); if (entryList != null) { final List<StanzaEntryXref> xrefs = entryList.stream().map(entry -> (StanzaEntryXref) entry).collect(Collectors.toList()); for (StanzaEntryXref xref : xrefs) { final DbXref dbXref = xref.getDbXref(); final Map<String, String> trailingModifiers; if (dbXref.getTrailingModifier() != null) { trailingModifiers = new HashMap<>(); for (KeyValue kv : dbXref.getTrailingModifier().getKeyValue()) { trailingModifiers.put(kv.getKey(), kv.getValue()); } } else { trailingModifiers = null; } dbxrefList .add(new ImmutableDbxref(dbXref.getName(), dbXref.getDescription(), trailingModifiers)); } } return new HpoTerm(id, altTermIds, name, definition, comment, subsets, synonyms, obsolete, createdBy, creationDate, dbxrefList); } /** * Extract cardinality one entry (=tag) of type <code>type</code> from <code>stanza</code>. * * @param stanza {@link Stanza} to get {@link StanzaEntry} from. * @param type {@link StanzaEntryType} to use. * @return Resulting {@link StanzaEntry}, properly cast. */ @SuppressWarnings("unchecked") protected <E extends StanzaEntry> E getCardinalityOneEntry(Stanza stanza, StanzaEntryType type) { final List<StanzaEntry> typeEntries = stanza.getEntryByType().get(type); if (typeEntries == null) { throw new OntoLibRuntimeException( type + " tag must have cardinality 1 but was null (" + stanza + ")"); } else if (typeEntries.size() != 1) { throw new OntoLibRuntimeException(type + " tag must have cardinality 1 but was " + typeEntries.size() + " (" + stanza + ")"); } return (E) typeEntries.get(0); } /** * Extract cardinality zero or one entry (=tag) of type <code>type</code> from * <code>stanza</code>. * * @param stanza {@link Stanza} to get {@link StanzaEntry} from. * @param type {@link StanzaEntryType} to use. * @return Resulting {@link StanzaEntry}, properly cast, or <code>null</code>. */ @SuppressWarnings("unchecked") protected <E extends StanzaEntry> E getCardinalityZeroOrOneEntry(Stanza stanza, StanzaEntryType type) { final List<StanzaEntry> typeEntries = stanza.getEntryByType().get(type); if (typeEntries == null) { return null; } else if (typeEntries.size() != 1) { throw new RuntimeException(type + " tag must have cardinality <= 1 but was " + typeEntries.size() + " (" + stanza + ")"); } else { return (E) typeEntries.get(0); } } @Override public HpoTermRelation constructTermRelation(Stanza stanza, StanzaEntryIsA stanzaEntry) { final TermId sourceId = termIds.get(this.<StanzaEntryId>getCardinalityOneEntry(stanza, StanzaEntryType.ID).getId()); final TermId destId = termIds.get(stanzaEntry.getId()); return new HpoTermRelation(sourceId, destId, nextRelationId++, HpoRelationQualifier.IS_A); } @Override public HpoTermRelation constructTermRelation(Stanza stanza, StanzaEntryDisjointFrom stanzaEntry) { throw new UnsupportedOperationException(); } @Override public HpoTermRelation constructTermRelation(Stanza stanza, StanzaEntryUnionOf stanzaEntry) { throw new UnsupportedOperationException(); } @Override public HpoTermRelation constructTermRelation(Stanza stanza, StanzaEntryIntersectionOf stanzaEntry) { throw new UnsupportedOperationException(); } @Override public HpoTermRelation constructTermRelation(Stanza stanza, StanzaEntryRelationship stanzaEntry) { throw new UnsupportedOperationException(); } }
package ru.shutoff.cgstarter; import android.content.Context; import android.content.Intent; import android.content.pm.PackageManager; import android.content.pm.ResolveInfo; import android.location.Location; import android.net.Uri; import android.os.Bundle; import android.os.CountDownTimer; import android.speech.RecognizerIntent; import android.support.v4.app.Fragment; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.Window; import android.widget.AdapterView; import android.widget.BaseAdapter; import android.widget.ListView; import android.widget.TextView; import com.eclipsesource.json.Json; import com.eclipsesource.json.JsonArray; import com.eclipsesource.json.JsonObject; import com.eclipsesource.json.ParseException; import org.apache.commons.lang3.StringUtils; import java.text.DecimalFormat; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Locale; import java.util.Vector; public class SearchActivity extends GpsActivity { PlaceholderFragment fragment; static boolean isVoiceSearch(Context context) { PackageManager pm = context.getPackageManager(); List<ResolveInfo> activities = pm.queryIntentActivities(new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH), 0); return activities.size() > 0; } @Override protected void onCreate(Bundle savedInstanceState) { supportRequestWindowFeature(Window.FEATURE_NO_TITLE); super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); if (savedInstanceState == null) { fragment = new PlaceholderFragment(); fragment.location = getLastBestLocation(); getSupportFragmentManager().beginTransaction() .add(R.id.container, fragment) .commit(); } setResult(RESULT_CANCELED); } @Override public void locationChanged() { if (fragment != null) { fragment.location = getLastBestLocation(); if (fragment.location != null) fragment.startSearch(); } } public static class PlaceholderFragment extends Fragment { Vector<Address> addr_list; Vector<Phrase> phrases; int phrase; ListView results; View progress; int prev_size; Location location; boolean started; @Override public View onCreateView(final LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.list, container, false); progress = rootView.findViewById(R.id.progress); progress.setVisibility(View.GONE); results = (ListView) rootView.findViewById(R.id.list); results.setVisibility(View.GONE); phrases = new Vector<Phrase>(); addr_list = new Vector<Address>(); if (location != null) startSearch(); CountDownTimer timer = new CountDownTimer(2000, 2000) { @Override public void onTick(long millisUntilFinished) { } @Override public void onFinish() { startSearch(); } }; timer.start(); return rootView; } void startSearch() { if (started) return; started = true; progress.setVisibility(View.VISIBLE); Intent data = getActivity().getIntent(); ArrayList<String> res = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS); float[] scopes = data.getFloatArrayExtra(RecognizerIntent.EXTRA_CONFIDENCE_SCORES); if (scopes == null) scopes = new float[0]; phrases = new Vector<Phrase>(); Bookmarks.Point[] points = Bookmarks.get(getActivity()); for (int i = 0; i < res.size(); i++) { String r = res.get(i); float scope = (i < scopes.length) ? scopes[i] : 0.01f; for (Bookmarks.Point p : points) { float ratio = compare(p.name, r) * 10; if (ratio > 4) { int n = 0; for (n = 0; n < addr_list.size(); n++) { Address addr = addr_list.get(n); if ((addr.lat == p.lat) && (addr.lon == p.lng)) { addr.scope += scope * ratio; break; } } if (n >= addr_list.size()) { Address address = new Address(); address.name = p.name; address.address = ""; address.lat = p.lat; address.lon = p.lng; address.scope = scope * ratio; addr_list.add(address); } } } if (scope == 0) continue; Phrase phrase = new Phrase(); phrase.phrase = r; phrase.scope = scope; phrases.add(phrase); } updateResults(); phrase = 0; if (phrases.size() == 0) { if (getActivity() != null) getActivity().finish(); return; } new Request(); } void updateResults() { if (prev_size == addr_list.size()) return; prev_size = addr_list.size(); if (addr_list.size() == 0) return; if (location != null) { for (Address addr : addr_list) { if (addr.distance != 0) continue; addr.distance = OnExitService.calc_distance(location.getLatitude(), location.getLongitude(), addr.lat, addr.lon); addr.scope /= Math.log(200 + addr.distance); } Collections.sort(addr_list, new Comparator<Address>() { @Override public int compare(Address lhs, Address rhs) { if (lhs.scope < rhs.scope) return 1; if (lhs.scope > rhs.scope) return -1; return 0; } }); } if (results.getVisibility() == View.VISIBLE) { BaseAdapter adapter = (BaseAdapter) results.getAdapter(); adapter.notifyDataSetChanged(); return; } results.setAdapter(new BaseAdapter() { @Override public int getCount() { return addr_list.size(); } @Override public Object getItem(int position) { return addr_list.get(position); } @Override public long getItemId(int position) { return position; } @Override public View getView(int position, View convertView, ViewGroup parent) { View v = convertView; if (v == null) { LayoutInflater inflater = (LayoutInflater) getActivity().getSystemService(LAYOUT_INFLATER_SERVICE); v = inflater.inflate(R.layout.addr_item, null); } Address addr = addr_list.get(position); TextView tv = (TextView) v.findViewById(R.id.addr); tv.setText(addr.address); tv = (TextView) v.findViewById(R.id.name); tv.setText(addr.name); tv = (TextView) v.findViewById(R.id.dist); if (addr.distance < 100) { tv.setText(""); } else { DecimalFormat df = new DecimalFormat("#.#"); tv.setText(df.format(addr.distance / 1000) + getString(R.string.km)); } return v; } }); progress.setVisibility(View.GONE); results.setVisibility(View.VISIBLE); results.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) { Address addr = addr_list.get(i); if (OnExitService.isRunCG(getActivity())) CarMonitor.killCG(getActivity()); CarMonitor.startCG(getActivity(), addr.lat + "|" + addr.lon, null, addr); getActivity().setResult(RESULT_OK); getActivity().finish(); } }); } float compare(String s1, String s2) { String[] w1 = s1.toUpperCase().split(" "); String[] w2 = s2.toUpperCase().split(" "); float res = 0; for (String w : w1) { if (w.equals("")) continue; for (String s : w2) { if (s.equals("")) continue; int lfd = StringUtils.getLevenshteinDistance(w, s); float ratio = ((float) lfd) / Math.min(s.length(), w.length()); if (ratio < 0.5) res += 1 - ratio * 2; } } return res / Math.max(w1.length, w2.length); } class Request extends LocationRequest { Request() { Phrase p = phrases.get(phrase); exec(p.phrase); } @Override Location getLocation() { return location; } @Override void showError(String error) { } @Override void result(Vector<Address> result) { if (result.size() > 0) { float scope = phrases.get(phrase).scope; for (Address addr : result) { addr.scope = scope; addr_list.add(addr); } updateResults(); } if (++phrase >= phrases.size()) { int near_count = 0; Location location = getLocation(); if (location != null) { for (Address addr : addr_list) { if (addr.distance == 0) addr.distance = OnExitService.calc_distance(addr.lat, addr.lon, location.getLatitude(), location.getLongitude()); if (addr.distance < 10000) near_count++; } } else { near_count = addr_list.size(); } if (near_count == 0) { phrase = 0; new NearRequest(); return; } BaseAdapter adapter = (BaseAdapter) results.getAdapter(); adapter.notifyDataSetChanged(); return; } new Request(); } } class NearRequest extends PlaceRequest { NearRequest() { Phrase p = phrases.get(phrase); exec(p.phrase, 1000); } @Override Location getLocation() { return location; } @Override void showError(String error) { } @Override void result(Vector<Address> result) { if (result.size() > 0) { float scope = phrases.get(phrase).scope; for (Address addr : result) { addr.scope = scope; addr_list.add(addr); } updateResults(); } if (++phrase >= phrases.size()) { int near_count = 0; Location location = getLocation(); if (location != null) { for (Address addr : addr_list) { if (addr.distance == 0) addr.distance = OnExitService.calc_distance(addr.lat, addr.lon, location.getLatitude(), location.getLongitude()); if (addr.distance < 10000) near_count++; } } else { near_count = addr_list.size(); } if (near_count == 0) { phrase = 0; new LongRequest(); return; } BaseAdapter adapter = (BaseAdapter) results.getAdapter(); adapter.notifyDataSetChanged(); return; } new NearRequest(); } } class LongRequest extends PlaceRequest { LongRequest() { Phrase p = phrases.get(phrase); exec(p.phrase, 50000); } @Override Location getLocation() { return location; } @Override void showError(String error) { } @Override void result(Vector<Address> result) { if (result.size() > 0) { float scope = phrases.get(phrase).scope; for (Address addr : result) { addr.scope = scope; addr_list.add(addr); } updateResults(); } if (++phrase >= phrases.size()) { if (addr_list.size() == 0) { getActivity().finish(); return; } BaseAdapter adapter = (BaseAdapter) results.getAdapter(); adapter.notifyDataSetChanged(); return; } new LongRequest(); } } } static class Phrase { String phrase; float scope; } static class Address { String address; String name; double lat; double lon; double distance; float scope; } static abstract class PlaceRequest extends HttpTask { String error; abstract Location getLocation(); abstract void showError(String error); abstract void result(Vector<Address> result); void exec(String addr, int radius) { String url = "https://maps.googleapis.com/maps/api/place/textsearch/json?query=%1&sensor=true"; Location location = getLocation(); if (location != null) { double lat = location.getLatitude(); double lon = location.getLongitude(); url += "&location=" + lat + "," + lon + "&radius=" + radius; } url += "&key=AIzaSyBljQKazFWpl9nyGHp-lu8ati7QjMbwzsU"; url += "&language=" + Locale.getDefault().getLanguage(); execute(url, addr); } void result(String result) { JsonArray res = Json.parse(result).asObject().get("results").asArray(); Vector<Address> r = new Vector<Address>(); for (int i = 0; i < res.size(); i++) { JsonObject o = res.get(i).asObject(); Address addr = new Address(); addr.address = o.get("formatted_address").asString(); try { addr.name = o.get("name").asString(); } catch (Exception ex) { // ignore } JsonObject geo = o.get("geometry").asObject().get("location").asObject(); addr.lat = geo.get("lat").asDouble(); addr.lon = geo.get("lng").asDouble(); r.add(addr); } result(r); } void error(String error) { showError(error); } } static public abstract class LocationRequest extends PlaceRequest { void exec(String addr) { String url = "http://maps.googleapis.com/maps/api/geocode/json?address=$1&sensor=true"; Location location = getLocation(); if (location != null) { double lat = location.getLatitude(); double lon = location.getLongitude(); url += "&bounds=" + (lat - 1.5) + "," + (lon - 1.5) + Uri.encode("|") + (lat + 1.5) + "," + (lon + 1.5); } url += "&language=" + Locale.getDefault().getLanguage(); execute(url, addr); } void result(String data) throws ParseException { JsonArray res = Json.parse(data).asObject().get("results").asArray(); Vector<Address> r = new Vector<Address>(); for (int i = 0; i < res.size(); i++) { JsonObject o = res.get(i).asObject(); Address addr = new Address(); addr.address = o.get("formatted_address").asString(); JsonObject geo = o.get("geometry").asObject().get("location").asObject(); addr.lat = geo.get("lat").asDouble(); addr.lon = geo.get("lng").asDouble(); r.add(addr); } result(r); } } }
/* ============================================================= * SmallSQL : a free Java DBMS library for the Java(tm) platform * ============================================================= * * (C) Copyright 2004-2011, by Volker Berlin. * * Project Info: http://www.smallsql.de/ * * This library is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation; either version 2.1 of the License, or * (at your option) any later version. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public * License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, * USA. * * [Java is a trademark or registered trademark of Sun Microsystems, Inc. * in the United States and other countries.] * * --------------- * Database.java * --------------- * Author: Volker Berlin * */ package smallsql.database; import java.util.*; import java.io.*; import java.nio.channels.FileChannel; import java.sql.*; import smallsql.database.language.Language; /** * There are only one instance of this class per database. It will be share between all connections to this database and * all threads. That the access must be thread safe. * * Here are save mainly table definitions and locks. */ final class Database{ static private HashMap databases = new HashMap(); private final TableViewMap tableViews = new TableViewMap(); private final String name; private final boolean readonly; private final File directory; private final FileChannel master; private final WeakHashMap connections = new WeakHashMap(); /** * Get a instance of the Database Class. If the Database with the given name is not open * then it will be open. * @param name the name of the database * @param con a reference holder to this database, if all connection close that have a reference * then the database can be unload. * @param create if the database not exist then create it */ static Database getDatabase(String name, SSConnection con, boolean create) throws SQLException{ if(name == null){ return null; } if(name.startsWith("file:")){ name = name.substring(5); } File file; try{ file = new File(name).getCanonicalFile(); }catch(Throwable th){ throw SmallSQLException.createFromException( th ); } String dbKey = file.getName() + ";readonly=" + con.isReadOnly(); synchronized(databases){ Database db = (Database)databases.get(dbKey); if(db == null){ if(create && !file.isDirectory()){ CommandCreateDatabase command = new CommandCreateDatabase(con.log, name); command.execute(con, null); } db = new Database( name, file, con.isReadOnly() ); databases.put(dbKey, db); } db.connections.put(con, null); return db; } } private static Database getDatabase(SSConnection con, String name) throws SQLException{ return name == null ? con.getDatabase(false) : getDatabase( name, con, false ); } /** * Create a instance of a Database * @param name is used for getCatalog() * @param canonicalFile the directory that is already canonical * @param readonly open database in read only mode * @throws SQLException If can't open */ private Database( String name, File canonicalFile, boolean readonly ) throws SQLException{ try{ this.name = name; this.readonly = readonly; directory = canonicalFile; if(!directory.isDirectory()){ throw SmallSQLException.create(Language.DB_NONEXISTENT, name); } File file = new File( directory, Utils.MASTER_FILENAME); if(!file.exists()) throw SmallSQLException.create(Language.DB_NOT_DIRECTORY, name); master = Utils.openRaFile( file, readonly ); }catch(Exception e){ throw SmallSQLException.createFromException(e); } } String getName(){ return name; } boolean isReadOnly(){ return readonly; } /** * Remove a connection from this database. */ static final void closeConnection(SSConnection con) throws SQLException{ synchronized(databases){ Iterator iterator = databases.values().iterator(); while(iterator.hasNext()){ Database database = (Database)iterator.next(); WeakHashMap connections = database.connections; connections.remove(con); if(connections.size() == 0){ try { iterator.remove(); database.close(); } catch (Exception e) { throw SmallSQLException.createFromException(e); } } } } } /** * Close all tables and views of this Database. */ private final void close() throws Exception{ synchronized(tableViews){ Iterator iterator = tableViews.values().iterator(); while(iterator.hasNext()){ TableView tableView = (TableView)iterator.next(); tableView.close(); iterator.remove(); } } master.close(); } static TableView getTableView(SSConnection con, String catalog, String tableName) throws SQLException{ return getDatabase( con, catalog).getTableView( con, tableName); } /** * Return a TableView object. If the TableView object is not loaded then it load it. * @param con * @param tableName * @return ever a valid TableView object and never null. * @throws SQLException if the table or view does not exists */ TableView getTableView(SSConnection con, String tableName) throws SQLException{ synchronized(tableViews){ TableView tableView = tableViews.get(tableName); if(tableView == null){ // FIXME it should block only one table and not all tables, loading of the table should outside of the global synchronized tableView = TableView.load(con, this, tableName); tableViews.put( tableName, tableView); } return tableView; } } static void dropTable(SSConnection con, String catalog, String tableName) throws Exception{ getDatabase( con, catalog).dropTable( con, tableName); } void dropTable(SSConnection con, String tableName) throws Exception{ synchronized(tableViews){ Table table = (Table)tableViews.get( tableName ); if(table != null){ tableViews.remove( tableName ); table.drop(con); }else{ Table.drop( this, tableName ); } } } /** * Remove a table or view from the cache of open objects. * @param tableViewName the name of the object */ void removeTableView(String tableViewName){ synchronized(tableViews){ tableViews.remove( tableViewName ); } } void replaceTable( Table oldTable, Table newTable) throws Exception{ synchronized(tableViews){ tableViews.remove( oldTable.name ); tableViews.remove( newTable.name ); oldTable.close(); newTable.close(); File oldFile = oldTable.getFile(this); File newFile = newTable.getFile(this); File tmpFile = new File(Utils.createTableViewFileName( this, "#" + System.currentTimeMillis() + this.hashCode() )); if( !oldFile.renameTo(tmpFile) ){ throw SmallSQLException.create(Language.TABLE_CANT_RENAME, oldTable.name); } if( !newFile.renameTo(oldFile) ){ tmpFile.renameTo(oldFile); //restore the old table throw SmallSQLException.create(Language.TABLE_CANT_RENAME, oldTable.name); } tmpFile.delete(); } } static void dropView(SSConnection con, String catalog, String tableName) throws Exception{ getDatabase( con, catalog).dropView(tableName); } void dropView(String viewName) throws Exception{ synchronized(tableViews){ Object view = tableViews.remove( viewName ); if(view != null && !(view instanceof View)) throw SmallSQLException.create(Language.VIEWDROP_NOT_VIEW, viewName); View.drop( this, viewName ); } } private void checkForeignKeys( SSConnection con, ForeignKeys foreignKeys ) throws SQLException{ for(int i=0; i<foreignKeys.size(); i++){ ForeignKey foreignKey = foreignKeys.get(i); TableView pkTable = getTableView(con, foreignKey.pkTable); if(!(pkTable instanceof Table)){ throw SmallSQLException.create(Language.FK_NOT_TABLE, foreignKey.pkTable); } } } /** * @param con current Connections * @param name the name of the new Table * @param columns the column descriptions of the table * @param indexes the indexes of the new table * @param foreignKeys * @throws Exception */ void createTable(SSConnection con, String name, Columns columns, IndexDescriptions indexes, ForeignKeys foreignKeys) throws Exception{ checkForeignKeys( con, foreignKeys ); // createFile() can run only one Thread success (it is atomic) // Thats the create of the Table does not need in the Synchronized. Table table = new Table( this, con, name, columns, indexes, foreignKeys); synchronized(tableViews){ tableViews.put( name, table); } } /** * It is used to create temp Table for ALTER TABLE and co. */ Table createTable(SSConnection con, String tableName, Columns columns, IndexDescriptions oldIndexes, IndexDescriptions newIndexes, ForeignKeys foreignKeys) throws Exception{ checkForeignKeys( con, foreignKeys ); Table table = new Table( this, con, tableName, columns, oldIndexes, newIndexes, foreignKeys); synchronized(tableViews){ tableViews.put( tableName, table); } return table; } void createView(SSConnection con, String viewName, String sql) throws Exception{ // createFile() can run only one Thread success (it is atomic) // Thats the create of the View does not need in the Synchronized. new View( this, con, viewName, sql); } /** * Create a list of all available Databases from the point of the current * Database or current working directory * @param database - current database * @return */ static Object[][] getCatalogs(Database database){ List catalogs = new ArrayList(); File baseDir = (database != null) ? database.directory.getParentFile() : new File("."); File dirs[] = baseDir.listFiles(); if(dirs != null) for(int i=0; i<dirs.length; i++){ if(dirs[i].isDirectory()){ if(new File(dirs[i], Utils.MASTER_FILENAME).exists()){ Object[] catalog = new Object[1]; catalog[0] = dirs[i].getPath(); catalogs.add(catalog); } } } Object[][] result = new Object[catalogs.size()][]; catalogs.toArray(result); return result; } Strings getTables(String tablePattern){ Strings list = new Strings(); File dirs[] = directory.listFiles(); if(dirs != null) if(tablePattern == null) tablePattern = "%"; tablePattern += Utils.TABLE_VIEW_EXTENTION; for(int i=0; i<dirs.length; i++){ String name = dirs[i].getName(); if(Utils.like(name, tablePattern)){ list.add(name.substring( 0, name.length()-Utils.TABLE_VIEW_EXTENTION.length() )); } } return list; } Object[][] getColumns( SSConnection con, String tablePattern, String colPattern) throws Exception{ List rows = new ArrayList(); Strings tables = getTables(tablePattern); for(int i=0; i<tables.size(); i++){ String tableName = tables.get(i); try{ TableView tab = getTableView( con, tableName); Columns cols = tab.columns; for(int c=0; c<cols.size(); c++){ Column col = cols.get(c); Object[] row = new Object[18]; row[0] = getName(); //TABLE_CAT //TABLE_SCHEM row[2] = tableName; //TABLE_NAME row[3] = col.getName(); //COLUMN_NAME row[4] = Utils.getShort( SQLTokenizer.getSQLDataType( col.getDataType() )); //DATA_TYPE row[5] = SQLTokenizer.getKeyWord( col.getDataType() ); //TYPE_NAME row[6] = Utils.getInteger(col.getColumnSize());//COLUMN_SIZE //BUFFER_LENGTH row[8] = Utils.getInteger(col.getScale());//DECIMAL_DIGITS row[9] = Utils.getInteger(10); //NUM_PREC_RADIX row[10]= Utils.getInteger(col.isNullable() ? DatabaseMetaData.columnNullable : DatabaseMetaData.columnNoNulls); //NULLABLE //REMARKS row[12]= col.getDefaultDefinition(); //COLUMN_DEF //SQL_DATA_TYPE //SQL_DATETIME_SUB row[15]= row[6]; //CHAR_OCTET_LENGTH row[16]= Utils.getInteger(i); //ORDINAL_POSITION row[17]= col.isNullable() ? "YES" : "NO"; //IS_NULLABLE rows.add(row); } }catch(Exception e){ //invalid Tables and View will not show } } Object[][] result = new Object[rows.size()][]; rows.toArray(result); return result; } Object[][] getReferenceKeys(SSConnection con, String pkTable, String fkTable) throws SQLException{ List rows = new ArrayList(); Strings tables = (pkTable != null) ? getTables(pkTable) : getTables(fkTable); for(int t=0; t<tables.size(); t++){ String tableName = tables.get(t); TableView tab = getTableView( con, tableName); if(!(tab instanceof Table)) continue; ForeignKeys references = ((Table)tab).references; for(int i=0; i<references.size(); i++){ ForeignKey foreignKey = references.get(i); IndexDescription pk = foreignKey.pk; IndexDescription fk = foreignKey.fk; if((pkTable == null || pkTable.equals(foreignKey.pkTable)) && (fkTable == null || fkTable.equals(foreignKey.fkTable))){ Strings columnsPk = pk.getColumns(); Strings columnsFk = fk.getColumns(); for(int c=0; c<columnsPk.size(); c++){ Object[] row = new Object[14]; row[0] = getName(); //PKTABLE_CAT //PKTABLE_SCHEM row[2] = foreignKey.pkTable; //PKTABLE_NAME row[3] = columnsPk.get(c); //PKCOLUMN_NAME row[4] = getName(); //FKTABLE_CAT //FKTABLE_SCHEM row[6] = foreignKey.fkTable; //FKTABLE_NAME row[7] = columnsFk.get(c); //FKCOLUMN_NAME row[8] = Utils.getShort(c+1); //KEY_SEQ row[9] = Utils.getShort(foreignKey.updateRule);//UPDATE_RULE row[10]= Utils.getShort(foreignKey.deleteRule); //DELETE_RULE row[11]= fk.getName(); //FK_NAME row[12]= pk.getName(); //PK_NAME row[13]= Utils.getShort(DatabaseMetaData.importedKeyNotDeferrable); //DEFERRABILITY rows.add(row); } } } } Object[][] result = new Object[rows.size()][]; rows.toArray(result); return result; } Object[][] getBestRowIdentifier(SSConnection con, String table) throws SQLException{ List rows = new ArrayList(); Strings tables = getTables(table); for(int t=0; t<tables.size(); t++){ String tableName = tables.get(t); TableView tab = getTableView( con, tableName); if(!(tab instanceof Table)) continue; IndexDescriptions indexes = ((Table)tab).indexes; for(int i=0; i<indexes.size(); i++){ IndexDescription index = indexes.get(i); if(index.isUnique()){ Strings columns = index.getColumns(); for(int c=0; c<columns.size(); c++){ String columnName = columns.get(c); Column column = tab.findColumn(columnName); Object[] row = new Object[8]; row[0] = Utils.getShort(DatabaseMetaData.bestRowSession);//SCOPE row[1] = columnName; //COLUMN_NAME final int dataType = column.getDataType(); row[2] = Utils.getInteger(dataType);//DATA_TYPE row[3] = SQLTokenizer.getKeyWord(dataType);//TYPE_NAME row[4] = Utils.getInteger(column.getPrecision()); //COLUMN_SIZE //BUFFER_LENGTH row[6] = Utils.getShort(column.getScale()); //DECIMAL_DIGITS row[7] = Utils.getShort(DatabaseMetaData.bestRowNotPseudo);//PSEUDO_COLUMN rows.add(row); } } } } Object[][] result = new Object[rows.size()][]; rows.toArray(result); return result; } Object[][] getPrimaryKeys(SSConnection con, String table) throws SQLException{ List rows = new ArrayList(); Strings tables = getTables(table); for(int t=0; t<tables.size(); t++){ String tableName = tables.get(t); TableView tab = getTableView( con, tableName); if(!(tab instanceof Table)) continue; IndexDescriptions indexes = ((Table)tab).indexes; for(int i=0; i<indexes.size(); i++){ IndexDescription index = indexes.get(i); if(index.isPrimary()){ Strings columns = index.getColumns(); for(int c=0; c<columns.size(); c++){ Object[] row = new Object[6]; row[0] = getName(); //TABLE_CAT //TABLE_SCHEM row[2] = tableName; //TABLE_NAME row[3] = columns.get(c); //COLUMN_NAME row[4] = Utils.getShort(c+1); //KEY_SEQ row[5] = index.getName(); //PK_NAME rows.add(row); } } } } Object[][] result = new Object[rows.size()][]; rows.toArray(result); return result; } Object[][] getIndexInfo( SSConnection con, String table, boolean unique) throws SQLException { List rows = new ArrayList(); Strings tables = getTables(table); Short type = Utils.getShort( DatabaseMetaData.tableIndexOther ); for(int t=0; t<tables.size(); t++){ String tableName = tables.get(t); TableView tab = getTableView( con, tableName); if(!(tab instanceof Table)) continue; IndexDescriptions indexes = ((Table)tab).indexes; for(int i=0; i<indexes.size(); i++){ IndexDescription index = indexes.get(i); Strings columns = index.getColumns(); for(int c=0; c<columns.size(); c++){ Object[] row = new Object[13]; row[0] = getName(); //TABLE_CAT //TABLE_SCHEM row[2] = tableName; //TABLE_NAME row[3] = Boolean.valueOf(!index.isUnique());//NON_UNIQUE //INDEX_QUALIFIER row[5] = index.getName(); //INDEX_NAME row[6] = type; //TYPE row[7] = Utils.getShort(c+1); //ORDINAL_POSITION row[8] = columns.get(c); //COLUMN_NAME //ASC_OR_DESC //CARDINALITY //PAGES //FILTER_CONDITION rows.add(row); } } } Object[][] result = new Object[rows.size()][]; rows.toArray(result); return result; } }
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.thoughtworks.selenium; import static java.nio.file.StandardCopyOption.REPLACE_EXISTING; import static org.openqa.selenium.UnexpectedAlertBehaviour.IGNORE; import static org.openqa.selenium.remote.CapabilityType.UNEXPECTED_ALERT_BEHAVIOUR; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableSet; import com.google.common.io.Resources; import com.thoughtworks.selenium.testing.SeleniumTestEnvironment; import com.thoughtworks.selenium.webdriven.WebDriverBackedSelenium; import org.junit.After; import org.junit.AfterClass; import org.junit.Assume; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.rules.ExternalResource; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; import org.junit.rules.TestWatcher; import org.junit.runner.Description; import org.junit.runners.model.Statement; import org.openqa.selenium.BuckBuild; import org.openqa.selenium.Capabilities; import org.openqa.selenium.JavascriptExecutor; import org.openqa.selenium.MutableCapabilities; import org.openqa.selenium.WebDriver; import org.openqa.selenium.environment.GlobalTestEnvironment; import org.openqa.selenium.internal.WrapsDriver; import org.openqa.selenium.remote.DesiredCapabilities; import org.openqa.selenium.testing.DevMode; import org.openqa.selenium.testing.InProject; import org.openqa.selenium.testing.drivers.Browser; import org.openqa.selenium.testing.drivers.WebDriverBuilder; import java.io.IOException; import java.net.URL; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; import java.util.concurrent.atomic.AtomicBoolean; import java.util.logging.Logger; public class InternalSelenseTestBase extends SeleneseTestBase { private static final Logger log = Logger.getLogger(InternalSelenseTestBase.class.getName()); private static final ImmutableSet<String> ATOM_TARGETS = ImmutableSet.of( "findElement", "findOption", "fireEvent", "fireEventAt", "getAttribute", "getText", "linkLocator", "isElementPresent", "isSomethingSelected", "isTextPresent", "isVisible", "setCursorPosition", "type"); private static Selenium INSTANCE; private static final AtomicBoolean MUST_BUILD = new AtomicBoolean(true); @BeforeClass public static void buildJavascriptLibraries() throws IOException { if (!DevMode.isInDevMode() || !MUST_BUILD.compareAndSet(true, false)) { return; } log.info("In dev mode. Copying required files in case we're using a WebDriver-backed Selenium"); Path dir = InProject.locate("java/client/build/production/com/thoughtworks/selenium/webdriven"); Files.createDirectories(dir); for (String target : ATOM_TARGETS) { Path atom = new BuckBuild().of("//javascript/selenium-atoms:" + target).go(); Files.copy(atom, dir.resolve(atom.getFileName()), REPLACE_EXISTING); } Path sizzle = InProject.locate("third_party/js/sizzle/sizzle.js"); Files.copy(sizzle, dir.resolve("sizzle.js"), REPLACE_EXISTING); Path seDir = InProject.locate("java/client/test/com/thoughtworks/selenium"); Path destDir = InProject.locate("java/client/build/production/com/thoughtworks/selenium"); Files.list(seDir) .filter(path -> path.getFileName().toString().endsWith(".js")) .forEach(path -> { try { Files.copy(path, destDir.resolve(path.getFileName()), REPLACE_EXISTING); } catch (IOException e) { throw new RuntimeException(e); } }); } @BeforeClass public static void initializeServer() { GlobalTestEnvironment.get(SeleniumTestEnvironment.class); } public TestWatcher traceMethodName = new TestWatcher() { @Override protected void starting(Description description) { super.starting(description); log.info(">>> Starting " + description); } @Override protected void finished(Description description) { super.finished(description); log.info("<<< Finished " + description); } }; public ExternalResource initializeSelenium = new ExternalResource() { @Override protected void before() throws Throwable { selenium = INSTANCE; if (selenium != null) { return; } MutableCapabilities caps = new MutableCapabilities(createCapabilities()); caps.setCapability(UNEXPECTED_ALERT_BEHAVIOUR, IGNORE); String baseUrl = whereIs("selenium-server/"); WebDriver driver = new WebDriverBuilder().get(caps); selenium = new WebDriverBackedSelenium(driver, baseUrl); selenium.setBrowserLogLevel("debug"); INSTANCE = selenium; } }; private Capabilities createCapabilities() { String property = System.getProperty("selenium.browser", "ff"); Browser browser = Browser.valueOf(property); switch (browser) { case chrome: return DesiredCapabilities.chrome(); case edge: return DesiredCapabilities.edge(); case ie: return DesiredCapabilities.internetExplorer(); case ff: return DesiredCapabilities.firefox(); case opera: case operablink: return DesiredCapabilities.operaBlink(); case safari: return DesiredCapabilities.safari(); default: fail("Attempt to use an unsupported browser: " + property); // we never get here, but keep null checks happy anyway return new DesiredCapabilities(); } } public ExternalResource addNecessaryJavascriptCommands = new ExternalResource() { @Override protected void before() throws Throwable { if (selenium == null || !(selenium instanceof WebDriverBackedSelenium)) { return; } // We need to be a on page where we can execute JS WebDriver driver = ((WrapsDriver) selenium).getWrappedDriver(); driver.get(whereIs("/selenium-server")); try { URL scriptUrl = Resources.getResource(getClass(), "/com/thoughtworks/selenium/testHelpers.js"); String script = Resources.toString(scriptUrl, StandardCharsets.UTF_8); ((JavascriptExecutor) driver).executeScript(script); } catch (IOException e) { fail("Cannot read script: " + Throwables.getStackTraceAsString(e)); } } }; public ExternalResource returnFocusToMainWindow = new ExternalResource() { @Override protected void before() throws Throwable { if (selenium == null) { return; } selenium.selectWindow(""); selenium.windowFocus(); } }; public TestWatcher filter = new TestWatcher() { @Override public Statement apply(Statement base, Description description) { String onlyRun = System.getProperty("only_run"); Assume.assumeTrue(onlyRun == null || Arrays.asList(onlyRun.split(",")).contains(description.getTestClass().getSimpleName())); String mth = System.getProperty("method"); Assume.assumeTrue(mth == null || Arrays.asList(mth.split(",")).contains(description.getMethodName())); return super.apply(base, description); } }; @Rule public TestRule chain = RuleChain.outerRule(filter) .around(initializeSelenium) .around(returnFocusToMainWindow) .around(addNecessaryJavascriptCommands) .around(traceMethodName); @After public void checkVerifications() { checkForVerificationErrors(); } private String whereIs(String location) { return GlobalTestEnvironment.get().getAppServer().whereIs(location); } @AfterClass public static void destroyDriver() { if (Boolean.getBoolean("webdriver.singletestsuite.leaverunning")) { return; } Selenium selenium = INSTANCE; if (selenium != null) { selenium.stop(); INSTANCE = null; } } }
/******************************************************************************* * Copyright 2016 Intuit * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package com.intuit.wasabi.api; import com.codahale.metrics.annotation.Timed; import com.google.inject.Inject; import com.google.inject.Singleton; import com.intuit.wasabi.analyticsobjects.Event; import com.intuit.wasabi.analyticsobjects.EventList; import com.intuit.wasabi.assignmentobjects.User; import com.intuit.wasabi.events.Events; import com.intuit.wasabi.experimentobjects.Application; import com.intuit.wasabi.experimentobjects.Context; import com.intuit.wasabi.experimentobjects.Experiment; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiParam; import org.slf4j.Logger; import javax.ws.rs.Consumes; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.Response; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import static com.intuit.wasabi.api.APISwaggerResource.DEFAULT_EVENT; import static javax.ws.rs.core.MediaType.APPLICATION_JSON; import static javax.ws.rs.core.Response.Status.CREATED; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.slf4j.LoggerFactory.getLogger; /** * API endpoint for managing events */ @Path("/v1/events") @Produces(APPLICATION_JSON) @Singleton @Api(value = "Events (Record-Manage Events)") public class EventsResource { private static final Logger LOGGER = getLogger(EventsResource.class); private final Events events; private final HttpHeader httpHeader; @Inject EventsResource(final Events events, final HttpHeader httpHeader) { this.events = events; this.httpHeader = httpHeader; } /** * Submit events for the specified user within the context of a specific * application and experiment. Each event is an impression or action. * <p> * Example events structure * <p> * "events": [ * { * "timestamp": "...", * "name": "..." * "IMPRESSION" for impressions * anything else for action * "payload": "..." * json string (with escaped quotes). * "value": ... * null: binary action success * number: continuous/counting action * string: categorical action * boolean: not currently used * } * * @param applicationName the application name * @param experimentLabel the experiment label * @param userID the current user id * @param eventList the {@link com.intuit.wasabi.analyticsobjects.EventList} event list * @return Response object * @throws Exception generic exception */ @POST @Path("applications/{applicationName}/experiments/{experimentLabel}/users/{userID}") @Consumes(APPLICATION_JSON) @Produces(APPLICATION_JSON) @ApiOperation(value = "Submit a single event or a batch of events for the specified assigned user(customer)", notes = "*NOTE*: For a given user, please make sure that you have the user assignment done using the " + "assignments API before using this API. An event is either an impression, indicating the user " + "has been exposed to the treatment, or an action, indicating that the user has done something " + "that you want to track. Please record impressions first and then action - use event " + "name = \"IMPRESSION\" for impressions.") @Timed public Response recordEvents( @PathParam("applicationName") @ApiParam(value = "Application Name") final Application.Name applicationName, @PathParam("experimentLabel") @ApiParam(value = "Experiment Label") final Experiment.Label experimentLabel, @PathParam("userID") @ApiParam(value = "Customer User ID that is already assigned using assignments API") final User.ID userID, @ApiParam(name = "eventList", required = true, value = "For impression", defaultValue = DEFAULT_EVENT) final EventList eventList) throws Exception { try { final Date NOW = new Date(); Set<Context> contextSet = new HashSet<>(); for (Event event : eventList.getEvents()) { if (event.getTimestamp() == null) { event.setTimestamp(NOW); } contextSet.add(event.getContext()); // TODO: add checking to Event.Name constructor instead of here if (event.getName() == null || isBlank(event.getName().toString())) { throw new IllegalArgumentException("Event name cannot be null or an empty string"); } } events.recordEvents(applicationName, experimentLabel, userID, eventList, contextSet); return httpHeader.headers(CREATED).build(); } catch (Exception exception) { LOGGER.error("recordEvents failed for applicationName={}," + " experimentLabel={}, userID={}, eventList={} with error:", applicationName, experimentLabel, userID, eventList, exception); throw exception; } } /** * Submit events for users within the context of a specific application * and experiment. Each event is an impression or action. * * @param applicationName the application name * @param experimentLabel the experiment label * @param eventList the {@link com.intuit.wasabi.analyticsobjects.EventList} event list * @throws UnsupportedOperationException UnsupportedOperationException */ @POST @Path("applications/{applicationName}/experiments/{experimentLabel}/users") @Consumes(APPLICATION_JSON) @Produces(APPLICATION_JSON) @Timed public Response recordUsersEvents( @PathParam("applicationName") final Application.Name applicationName, @PathParam("experimentLabel") final Experiment.Label experimentLabel, final Map<User.ID, List<Event>> eventList) { LOGGER.warn("recordUsersEvents is unsupported"); throw new UnsupportedOperationException("Not implemented"); } /** * Submit events for users and experiments within the context of a * specific application. Each event is an impression or action. * * @param applicationName the application name * @param eventList the {@link com.intuit.wasabi.analyticsobjects.EventList} event list * @throws UnsupportedOperationException always throws */ @POST @Path("applications/{applicationName}/experiments") @Consumes(APPLICATION_JSON) @Produces(APPLICATION_JSON) @Timed public Response recordExperimentsEvents( @PathParam("applicationName") final Application.Name applicationName, final Map<Experiment.Label, Map<User.ID, List<Event>>> eventList) { LOGGER.warn("recordExperimentsEvents is unsupported"); throw new UnsupportedOperationException("Not implemented"); } /** * Returns number of events currently in the queue * * @return Response object */ @GET @Path("queueLength") @Produces(APPLICATION_JSON) @Timed public Response getEventsQueueLength() { try { return httpHeader.headers().entity(events.queuesLength()).build(); } catch (Exception exception) { LOGGER.error("getEventsQueueLength failed with error:", exception); throw exception; } } }
/* * Copyright (C) 2015 Willi Ye * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.grarak.kerneladiutor.fragments.tools.download; import android.content.Context; import android.content.res.Configuration; import android.graphics.Color; import android.os.Bundle; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.Toolbar; import android.text.Html; import android.view.View; import android.view.animation.Animation; import android.view.animation.AnimationUtils; import android.widget.FrameLayout; import android.widget.TextView; import com.grarak.kerneladiutor.KernelActivity; import com.grarak.kerneladiutor.R; import com.grarak.kerneladiutor.elements.cards.download.DownloadCardView; import com.grarak.kerneladiutor.elements.cards.download.DownloadInfoCardView; import com.grarak.kerneladiutor.elements.cards.download.FeatureCardView; import com.grarak.kerneladiutor.fragments.RecyclerViewFragment; import com.grarak.kerneladiutor.fragments.ViewPagerFragment; import com.grarak.kerneladiutor.utils.Downloads; import com.grarak.kerneladiutor.utils.Utils; import com.nineoldandroids.view.ViewHelper; import java.util.List; /** * Created by willi on 20.06.15. */ public class ParentFragment extends ViewPagerFragment { public static ParentFragment newInstance(Downloads.KernelContent kernelContent) { ParentFragment fragment = new ParentFragment(); fragment.kernelContent = kernelContent; return fragment; } private static ParentFragment parentFragment; private Downloads.KernelContent kernelContent; private TextView descriptionText; private View viewContainer; private View viewContainerBackground; private View logoContainer; private Toolbar toolbar; private Animation animation; @Override public View getParentView() { View view = inflater.inflate(R.layout.download_viewpager, container, false); descriptionText = (TextView) view.findViewById(R.id.description); viewContainer = view.findViewById(R.id.view_container); viewContainerBackground = view.findViewById(R.id.view_container_background); return view; } @Override public void preInit(Bundle savedInstanceState) { super.preInit(savedInstanceState); parentFragment = this; String description; if (kernelContent != null && (description = kernelContent.getShortDescription()) != null) descriptionText.setText(Html.fromHtml(description)); logoContainer = ((KernelActivity) getActivity()).getLogoContainer(); toolbar = ((KernelActivity) getActivity()).getToolbar(); animation = AnimationUtils.loadAnimation(getActivity(), android.R.anim.fade_out); animation.setAnimationListener(new Animation.AnimationListener() { @Override public void onAnimationStart(Animation animation) { descriptionText.setVisibility(View.GONE); } @Override public void onAnimationEnd(Animation animation) { viewContainerBackground.setBackgroundColor(Color.TRANSPARENT); } @Override public void onAnimationRepeat(Animation animation) { } }); } @Override public void init(Bundle savedInstanceState) { super.init(savedInstanceState); if (kernelContent != null) addFragment(new ViewPagerItem(DownloadRecyclerViewFragment.InfoFragment.newInstance(kernelContent), getString(R.string.information))); List<Downloads.Feature> features; if (kernelContent != null && (features = kernelContent.getFeatures()).size() > 0) addFragment(new ViewPagerItem(DownloadRecyclerViewFragment.FeaturesFragment.newInstance(features), getString(R.string.features))); List<Downloads.Download> downloads; if (kernelContent != null && (downloads = kernelContent.getDownloads()).size() > 0) addFragment(new ViewPagerItem(DownloadRecyclerViewFragment.DownloadFragment.newInstance(downloads), getString(R.string.download))); } @Override public void onSwipe(int page) { super.onSwipe(page); for (int i = 0; i < getCount(); i++) ((DownloadRecyclerViewFragment) getFragment(i)).resetTranslations(); } private static class CustomOnScrollListener extends RecyclerView.OnScrollListener { private final Context context; private int scrollDistance; private int logoViewContainer; private int viewContainerOffset; private int toolbarOffset; private boolean isColored; public CustomOnScrollListener() { context = parentFragment.logoContainer.getContext(); } @Override public void onScrolled(RecyclerView recyclerView, int dx, int dy) { super.onScrolled(recyclerView, dx, dy); scrollDistance += dy; logoViewContainer += dy; float logoViewTranslation = logoViewContainer / 2; int logoContainerHeight = parentFragment.logoContainer.getHeight(); if (logoViewTranslation > logoContainerHeight) logoViewTranslation = logoContainerHeight; else if (logoViewTranslation < 0) logoViewTranslation = 0; ViewHelper.setTranslationY(parentFragment.logoContainer, -logoViewTranslation); viewContainerOffset += dy; int viewContainerHeight = parentFragment.viewContainer.getHeight(); if (viewContainerOffset > viewContainerHeight) viewContainerOffset = viewContainerHeight; else if (viewContainerOffset < 0) viewContainerOffset = 0; ViewHelper.setTranslationY(parentFragment.viewContainer, -viewContainerOffset); int toolbarHeight = parentFragment.toolbar.getHeight(); if (viewContainerOffset >= viewContainerHeight - toolbarHeight - parentFragment.mTabs.getHeight() || dy < 0) { toolbarOffset += dy; if (toolbarOffset > toolbarHeight) toolbarOffset = toolbarHeight; else if (toolbarOffset < 0) toolbarOffset = 0; ViewHelper.setTranslationY(parentFragment.toolbar, -toolbarOffset); } if (!isColored && scrollDistance >= viewContainerHeight - toolbarHeight && dy < 0) { parentFragment.toolbar.setBackgroundColor(context.getResources().getColor(R.color.color_primary)); parentFragment.descriptionText.setVisibility(View.VISIBLE); parentFragment.viewContainerBackground.setBackgroundColor(context.getResources() .getColor(R.color.color_primary)); isColored = true; } if (isColored && scrollDistance == 0) { parentFragment.toolbar.setBackgroundColor(Color.TRANSPARENT); parentFragment.viewContainerBackground.startAnimation(parentFragment.animation); isColored = false; } } public void reset() { scrollDistance = 0; logoViewContainer = 0; viewContainerOffset = 0; toolbarOffset = 0; ViewHelper.setTranslationY(parentFragment.logoContainer, 0); ViewHelper.setTranslationY(parentFragment.viewContainer, 0); ViewHelper.setTranslationY(parentFragment.toolbar, 0); } } public static class DownloadRecyclerViewFragment extends RecyclerViewFragment { private CustomOnScrollListener onScrollListener; @Override public int getSpan() { return 1; } @Override public boolean showApplyOnBoot() { return false; } @Override public RecyclerView getRecyclerView() { View view = getParentView(R.layout.download_recyclerview); return (RecyclerView) view.findViewById(R.id.recycler_view); } @Override public void setOnScrollListener(RecyclerView recyclerView) { recyclerView.addOnScrollListener(onScrollListener == null ? onScrollListener = new CustomOnScrollListener() : onScrollListener); } @Override public void onViewCreated() { super.onViewCreated(); resetTranslations(); } @Override public void resetTranslations() { parentFragment.toolbar.setBackgroundColor(Color.TRANSPARENT); parentFragment.viewContainerBackground.setBackgroundColor(Color.TRANSPARENT); parentFragment.descriptionText.setVisibility(View.GONE); int orientation = Utils.getScreenOrientation(getActivity()); float density = getResources().getDisplayMetrics().density; float tabsPadding = orientation == Configuration.ORIENTATION_PORTRAIT ? 0 : density * 48; FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) parentFragment.mTabs.getLayoutParams(); layoutParams.setMargins((int) tabsPadding, 0, (int) tabsPadding, 0); parentFragment.mTabs.requestLayout(); setPaddingRecyclerview(orientation, density); if (onScrollListener != null) onScrollListener.reset(); layoutManager.scrollToPositionWithOffset(0, 0); } public void setPaddingRecyclerview(int orientation, float density) { float recyclerviewPadding = orientation == Configuration.ORIENTATION_PORTRAIT ? 0 : density * 48; recyclerView.setPadding((int) recyclerviewPadding, recyclerView.getPaddingTop(), (int) recyclerviewPadding, recyclerView.getPaddingBottom()); } public static class InfoFragment extends ParentFragment.DownloadRecyclerViewFragment { public static InfoFragment newInstance(Downloads.KernelContent kernelContent) { InfoFragment fragment = new InfoFragment(); fragment.kernelContent = kernelContent; return fragment; } private Downloads.KernelContent kernelContent; @Override public void init(Bundle savedInstanceState) { super.init(savedInstanceState); if (kernelContent != null && kernelContent.getShortDescription() != null && kernelContent.getLongDescription() != null) addView(new DownloadInfoCardView.DDDownloadInfoCard(kernelContent)); } } public static class FeaturesFragment extends ParentFragment.DownloadRecyclerViewFragment { public static FeaturesFragment newInstance(List<Downloads.Feature> features) { FeaturesFragment fragment = new FeaturesFragment(); fragment.features = features; return fragment; } private List<Downloads.Feature> features; @Override public void init(Bundle savedInstanceState) { super.init(savedInstanceState); if (features != null) for (Downloads.Feature feature : features) addView(new FeatureCardView.DFeatureCard(feature)); } } public static class DownloadFragment extends ParentFragment.DownloadRecyclerViewFragment { public static DownloadFragment newInstance(List<Downloads.Download> downloads) { DownloadFragment fragment = new DownloadFragment(); fragment.downloads = downloads; return fragment; } private List<Downloads.Download> downloads; @Override public void init(Bundle savedInstanceState) { super.init(savedInstanceState); if (downloads != null) for (Downloads.Download download : downloads) { if (download.getName() != null && download.getMD5sum() != null) addView(new DownloadCardView.DDownloadCard(download)); } } } } }
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.workbench.screens.guided.dtable.client.wizard.column.plugins; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import com.google.gwt.user.client.ui.IsWidget; import com.google.gwtmockito.GwtMockitoTestRunner; import com.google.gwtmockito.WithClassesToStub; import org.drools.workbench.models.datamodel.rule.BaseSingleFieldConstraint; import org.drools.workbench.models.guided.dtable.shared.model.ActionCol52; import org.drools.workbench.models.guided.dtable.shared.model.ActionInsertFactCol52; import org.drools.workbench.models.guided.dtable.shared.model.ActionSetFieldCol52; import org.drools.workbench.models.guided.dtable.shared.model.CompositeColumn; import org.drools.workbench.models.guided.dtable.shared.model.ConditionCol52; import org.drools.workbench.models.guided.dtable.shared.model.DTColumnConfig52; import org.drools.workbench.models.guided.dtable.shared.model.GuidedDecisionTable52; import org.drools.workbench.models.guided.dtable.shared.model.Pattern52; import org.drools.workbench.screens.guided.dtable.client.resources.i18n.GuidedDecisionTableErraiConstants; import org.drools.workbench.screens.guided.dtable.client.widget.table.GuidedDecisionTableView; import org.drools.workbench.screens.guided.dtable.client.widget.table.model.synchronizers.ModelSynchronizer.VetoException; import org.drools.workbench.screens.guided.dtable.client.wizard.column.NewGuidedDecisionTableColumnWizard; import org.drools.workbench.screens.guided.dtable.client.wizard.column.pages.AdditionalInfoPage; import org.drools.workbench.screens.guided.dtable.client.wizard.column.pages.FieldPage; import org.drools.workbench.screens.guided.dtable.client.wizard.column.pages.PatternPage; import org.drools.workbench.screens.guided.dtable.client.wizard.column.pages.ValueOptionsPage; import org.drools.workbench.screens.guided.dtable.client.wizard.column.plugins.commons.ActionInsertFactWrapper; import org.drools.workbench.screens.guided.dtable.client.wizard.column.plugins.commons.ActionSetFactWrapper; import org.drools.workbench.screens.guided.dtable.client.wizard.column.plugins.commons.ActionWrapper; import org.drools.workbench.screens.guided.dtable.client.wizard.column.plugins.commons.DefaultWidgetFactory; import org.drools.workbench.screens.guided.dtable.client.wizard.column.plugins.commons.LimitedWidgetFactory; import org.drools.workbench.screens.guided.dtable.client.wizard.column.plugins.commons.PatternWrapper; import org.jboss.errai.ui.client.local.spi.TranslationService; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.kie.soup.project.datamodel.oracle.FieldAccessorsAndMutators; import org.kie.workbench.common.widgets.client.datamodel.AsyncPackageDataModelOracle; import org.mockito.InjectMocks; import org.mockito.Mock; import org.uberfire.ext.widgets.core.client.wizards.WizardPage; import org.uberfire.ext.widgets.core.client.wizards.WizardPageStatusChangeEvent; import org.uberfire.mocks.EventSourceMock; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.any; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith(GwtMockitoTestRunner.class) @WithClassesToStub({DefaultWidgetFactory.class, LimitedWidgetFactory.class}) public class ActionSetFactPluginTest { @Mock private PatternPage patternPage; @Mock private FieldPage fieldPage; @Mock private ValueOptionsPage<ActionSetFactPlugin> valueOptionsPage; @Mock private AdditionalInfoPage<ActionSetFactPlugin> additionalInfoPage; @Mock private GuidedDecisionTableView.Presenter presenter; @Mock private TranslationService translationService; @Mock private NewGuidedDecisionTableColumnWizard wizard; @Mock private EventSourceMock<WizardPageStatusChangeEvent> changeEvent; @Mock private AsyncPackageDataModelOracle oracle; @Mock private GuidedDecisionTable52 model; @InjectMocks private ActionSetFactPlugin plugin = spy(new ActionSetFactPlugin(patternPage, fieldPage, valueOptionsPage, additionalInfoPage, changeEvent, translationService)); @Before public void setup() { doReturn(GuidedDecisionTable52.TableFormat.EXTENDED_ENTRY).when(model).getTableFormat(); doReturn(oracle).when(presenter).getDataModelOracle(); doReturn(presenter).when(plugin).getPresenter(); doReturn(model).when(presenter).getModel(); } @Test public void testGetTitle() { final String errorKey = GuidedDecisionTableErraiConstants.ActionInsertFactPlugin_SetTheValueOfAField; final String errorMessage = "Title"; when(translationService.format(errorKey)).thenReturn(errorMessage); final String title = plugin.getTitle(); assertEquals(errorMessage, title); } @Test public void testGetPages() { doReturn(GuidedDecisionTable52.TableFormat.EXTENDED_ENTRY).when(plugin).tableFormat(); final List<WizardPage> pages = plugin.getPages(); assertEquals(4, pages.size()); } @Test public void testGenerateColumn() { final ActionCol52 actionCol52 = mock(ActionCol52.class); final ActionWrapper actionWrapper = mock(ActionWrapper.class); doReturn(actionCol52).when(actionWrapper).getActionCol52(); doReturn(actionWrapper).when(plugin).editingWrapper(); final Boolean success = plugin.generateColumn(); assertTrue(success); verify(presenter).appendColumn(actionCol52); } @Test public void testGenerateColumnWhenColumnIsNotNewAndVetoed() throws Exception { doReturn(false).when(plugin).isNewColumn(); doThrow(VetoException.class).when(presenter).updateColumn(any(ActionCol52.class), any(ActionCol52.class)); assertFalse(plugin.generateColumn()); verify(wizard).showGenericVetoError(); } @Test public void testSetValueOptionsPageAsCompletedWhenItIsCompleted() throws Exception { doReturn(true).when(plugin).isValueOptionsPageCompleted(); plugin.setValueOptionsPageAsCompleted(); verify(plugin, never()).setValueOptionsPageCompleted(); verify(plugin, never()).fireChangeEvent(valueOptionsPage); } @Test public void testSetValueOptionsPageAsCompletedWhenItIsNotCompleted() throws Exception { doReturn(false).when(plugin).isValueOptionsPageCompleted(); plugin.setValueOptionsPageAsCompleted(); verify(plugin).setValueOptionsPageCompleted(); verify(plugin).fireChangeEvent(valueOptionsPage); } @Test public void testSetEditingPattern() { final ActionWrapper actionWrapper = mock(ActionWrapper.class); final PatternWrapper patternWrapper = mock(PatternWrapper.class); when(plugin.editingWrapper()).thenReturn(actionWrapper); when(patternWrapper.getFactType()).thenReturn("factType"); when(patternWrapper.getBoundName()).thenReturn("boundName"); plugin.setEditingPattern(patternWrapper); verify(actionWrapper).setFactField(""); verify(actionWrapper).setFactType("factType"); verify(actionWrapper).setBoundName("boundName"); verify(actionWrapper).setType(""); verify(plugin).fireChangeEvent(patternPage); verify(plugin).fireChangeEvent(fieldPage); verify(plugin).fireChangeEvent(additionalInfoPage); } @Test public void testConstraintValue() { final int expectedConstraintValue = BaseSingleFieldConstraint.TYPE_UNDEFINED; final int actualConstraintValue = plugin.constraintValue(); assertEquals(expectedConstraintValue, actualConstraintValue); } @Test public void testGetFactType() { final PatternWrapper patternWrapper = mock(PatternWrapper.class); final String expectedFactType = "factType"; when(patternWrapper.getFactType()).thenReturn(expectedFactType); when(plugin.patternWrapper()).thenReturn(patternWrapper); final String actualFactType = plugin.getFactType(); verify(patternWrapper).getFactType(); assertEquals(expectedFactType, actualFactType); } @Test public void testGetAccessor() { final FieldAccessorsAndMutators expectedAccessor = FieldAccessorsAndMutators.MUTATOR; final FieldAccessorsAndMutators actualAccessor = plugin.getAccessor(); assertEquals(expectedAccessor, actualAccessor); } @Test public void testFilterEnumFields() { assertFalse(plugin.filterEnumFields()); } @Test public void testGetFactField() { final ActionWrapper actionWrapper = mock(ActionWrapper.class); final String expectedFactField = "factField"; when(actionWrapper.getFactField()).thenReturn(expectedFactField); when(plugin.editingWrapper()).thenReturn(actionWrapper); final String actualFactField = plugin.getFactField(); verify(actionWrapper).getFactField(); assertEquals(expectedFactField, actualFactField); } @Test public void testSetFactFieldWhenFactPatternIsNew() { final ActionInsertFactWrapper actionWrapper = mock(ActionInsertFactWrapper.class); final PatternWrapper patternWrapperMock = patternWrapperMock("factType", "boundName"); doReturn(true).when(plugin).isNewFactPattern(); doReturn(actionWrapper).when(plugin).newActionInsertFactWrapper(); doReturn(patternWrapperMock).when(plugin).patternWrapper(); doReturn("type").when(oracle).getFieldType(any(), any()); plugin.setFactField("selectedValue"); verify(actionWrapper).setFactField(eq("selectedValue")); verify(actionWrapper).setFactType(eq("factType")); verify(actionWrapper).setBoundName(eq("boundName")); verify(actionWrapper).setType(eq("type")); verify(plugin).fireChangeEvent(fieldPage); } @Test public void testSetFactFieldWhenFactPatternIsNotNew() { final ActionSetFactWrapper actionWrapper = spy(new ActionSetFactWrapper(plugin)); final Pattern52 patternMock = patternMock("factType"); final PatternWrapper patternWrapperMock = patternWrapperMock("factType", "boundName"); doReturn(false).when(plugin).isNewFactPattern(); doReturn(actionWrapper).when(plugin).newActionSetFactWrapper(); doReturn(patternWrapperMock).when(plugin).patternWrapper(); doReturn(patternMock).when(model).getConditionPattern(eq("boundName")); doReturn("type").when(oracle).getFieldType(eq("factType"), eq("selectedValue")); plugin.setFactField("selectedValue"); verify(actionWrapper).setFactField(eq("selectedValue")); verify(actionWrapper).setFactType(eq("factType")); verify(actionWrapper).setBoundName(eq("boundName")); verify(actionWrapper).setType(eq("type")); verify(plugin).fireChangeEvent(fieldPage); } @Test public void testSetFactFieldWhenColumnIsNotNew() { final ActionInsertFactWrapper actionWrapper = mock(ActionInsertFactWrapper.class); final PatternWrapper patternWrapperMock = patternWrapperMock("factType", "boundName"); doReturn(false).when(plugin).isNewColumn(); doReturn(true).when(plugin).isNewFactPattern(); doReturn(actionWrapper).when(plugin).editingWrapper(); doReturn(patternWrapperMock).when(plugin).patternWrapper(); doReturn("type").when(oracle).getFieldType(any(), any()); plugin.setFactField("selectedValue"); verify(actionWrapper).setFactField(eq("selectedValue")); verify(actionWrapper).setFactType(eq("factType")); verify(actionWrapper).setBoundName(eq("boundName")); verify(actionWrapper).setType(eq("type")); verify(plugin).fireChangeEvent(fieldPage); verify(plugin, never()).newActionInsertFactWrapper(); } private PatternWrapper patternWrapperMock(final String factType, final String boundName) { final PatternWrapper patternWrapper = mock(PatternWrapper.class); doReturn(factType).when(patternWrapper).getFactType(); doReturn(boundName).when(patternWrapper).getBoundName(); return patternWrapper; } private Pattern52 patternMock(final String factType) { final Pattern52 pattern = mock(Pattern52.class); doReturn(factType).when(pattern).getFactType(); return pattern; } @Test public void testEditingPattern() { final PatternWrapper patternWrapper = mock(PatternWrapper.class); doReturn(patternWrapper).when(plugin).patternWrapper(); when(patternWrapper.getFactType()).thenReturn("factType"); when(patternWrapper.getBoundName()).thenReturn("boundName"); when(patternWrapper.isNegated()).thenReturn(false); when(patternWrapper.getEntryPointName()).thenReturn("entryPoint"); final Pattern52 pattern52 = plugin.editingPattern(); assertEquals("factType", pattern52.getFactType()); assertEquals("boundName", pattern52.getBoundName()); assertEquals(false, pattern52.isNegated()); assertEquals("entryPoint", pattern52.getEntryPointName()); } @Test public void testEditingCol() { final ActionWrapper actionWrapper = mock(ActionWrapper.class); doReturn(actionWrapper).when(plugin).editingWrapper(); plugin.editingCol(); verify(actionWrapper).getActionCol52(); } @Test public void testGetHeader() { final ActionWrapper actionWrapper = mock(ActionWrapper.class); doReturn(actionWrapper).when(plugin).editingWrapper(); plugin.getHeader(); verify(actionWrapper).getHeader(); } @Test public void testSetHeader() { final ActionWrapper actionWrapper = mock(ActionWrapper.class); final String header = "header"; doReturn(actionWrapper).when(plugin).editingWrapper(); plugin.setHeader(header); verify(actionWrapper).setHeader(header); verify(plugin).fireChangeEvent(additionalInfoPage); } @Test public void testSetInsertLogical() { final ActionWrapper actionWrapper = mock(ActionWrapper.class); final boolean insertLogical = false; doReturn(actionWrapper).when(plugin).editingWrapper(); plugin.setInsertLogical(insertLogical); verify(actionWrapper).setInsertLogical(insertLogical); } @Test public void testSetUpdate() { final ActionWrapper actionWrapper = mock(ActionWrapper.class); final boolean update = false; doReturn(actionWrapper).when(plugin).editingWrapper(); plugin.setUpdate(update); verify(actionWrapper).setUpdate(update); } @Test public void testShowUpdateEngineWithChangesWhenFactPatternIsNew() { doReturn(true).when(plugin).isNewFactPattern(); final boolean showUpdateEngineWithChanges = plugin.showUpdateEngineWithChanges(); assertEquals(false, showUpdateEngineWithChanges); } @Test public void testShowUpdateEngineWithChangesWhenFactPatternIsNotNew() { doReturn(mock(ActionSetFactWrapper.class)).when(plugin).editingWrapper(); final boolean showUpdateEngineWithChanges = plugin.showUpdateEngineWithChanges(); assertEquals(true, showUpdateEngineWithChanges); } @Test public void testShowLogicallyInsertWhenFactPatternIsNew() { doReturn(mock(ActionInsertFactWrapper.class)).when(plugin).editingWrapper(); final boolean showLogicallyInsert = plugin.showLogicallyInsert(); assertEquals(true, showLogicallyInsert); } @Test public void testShowLogicallyInsertWhenFactPatternIsNotNew() { doReturn(false).when(plugin).isNewFactPattern(); final boolean showLogicallyInsert = plugin.showLogicallyInsert(); assertEquals(false, showLogicallyInsert); } @Test public void testGetValueList() { final ActionWrapper actionWrapper = mock(ActionWrapper.class); doReturn(actionWrapper).when(plugin).editingWrapper(); plugin.getValueList(); verify(actionWrapper).getValueList(); } @Test public void testSetValueList() { final ActionWrapper actionWrapper = mock(ActionWrapper.class); final String valueList = "valueList"; doReturn(actionWrapper).when(plugin).editingWrapper(); plugin.setValueList(valueList); verify(actionWrapper).setValueList(valueList); } @Test public void testTableFormat() { final GuidedDecisionTable52.TableFormat expectedTableFormat = GuidedDecisionTable52.TableFormat.EXTENDED_ENTRY; final GuidedDecisionTable52 model = mock(GuidedDecisionTable52.class); when(model.getTableFormat()).thenReturn(expectedTableFormat); when(presenter.getModel()).thenReturn(model); final GuidedDecisionTable52.TableFormat actualTableFormat = plugin.tableFormat(); assertEquals(expectedTableFormat, actualTableFormat); } @Test public void testDefaultValueWidget() { final IsWidget defaultWidget = plugin.defaultValueWidget(); assertNotNull(defaultWidget); } @Test public void testLimitedValueWidget() { final IsWidget limitedValueWidget = plugin.limitedValueWidget(); assertNotNull(limitedValueWidget); } @Test public void testInitializedPatternPage() { plugin.initializedPatternPage(); verify(patternPage).disableEntryPoint(); } @Test public void testInitializedAdditionalInfoPage() throws Exception { plugin.initializedAdditionalInfoPage(); verify(additionalInfoPage).setPlugin(plugin); verify(additionalInfoPage).enableHeader(); verify(additionalInfoPage).enableHideColumn(); verify(additionalInfoPage).enableLogicallyInsert(); } @Test public void testInitializedValueOptionsPageWhenTableIsALimitedEntry() throws Exception { doReturn(GuidedDecisionTable52.TableFormat.LIMITED_ENTRY).when(plugin).tableFormat(); plugin.initializedValueOptionsPage(); verify(valueOptionsPage).setPlugin(plugin); verify(valueOptionsPage).enableLimitedValue(); } @Test public void testGetAlreadyUsedColumnNames() throws Exception { final GuidedDecisionTable52 model = new GuidedDecisionTable52(); model.getActionCols().add(new ActionCol52() {{ setHeader("a"); }}); model.getActionCols().add(new ActionCol52() {{ setHeader("b"); }}); when(presenter.getModel()).thenReturn(model); assertEquals(2, plugin.getAlreadyUsedColumnHeaders().size()); assertTrue(plugin.getAlreadyUsedColumnHeaders().contains("a")); assertTrue(plugin.getAlreadyUsedColumnHeaders().contains("b")); } @Test public void testNewActionWrapperWhenColumnIsAnActionInsertFactCol52() throws Exception { final GuidedDecisionTable52 model = mock(GuidedDecisionTable52.class); when(model.getTableFormat()).thenReturn(GuidedDecisionTable52.TableFormat.EXTENDED_ENTRY); when(presenter.getModel()).thenReturn(model); final ActionWrapper wrapper = plugin.newActionWrapper(mock(ActionInsertFactCol52.class)); assertTrue(wrapper instanceof ActionInsertFactWrapper); } @Test public void testNewActionWrapperWhenColumnIsAnActionSetFactWrapper() throws Exception { final GuidedDecisionTable52 model = mock(GuidedDecisionTable52.class); when(model.getTableFormat()).thenReturn(GuidedDecisionTable52.TableFormat.EXTENDED_ENTRY); when(presenter.getModel()).thenReturn(model); final ActionWrapper wrapper = plugin.newActionWrapper(mock(ActionSetFieldCol52.class)); assertTrue(wrapper instanceof ActionSetFactWrapper); } @Test(expected = UnsupportedOperationException.class) public void testNewActionWrapperWhenColumnIsInvalid() throws Exception { final GuidedDecisionTable52 model = mock(GuidedDecisionTable52.class); when(model.getTableFormat()).thenReturn(GuidedDecisionTable52.TableFormat.EXTENDED_ENTRY); when(presenter.getModel()).thenReturn(model); plugin.newActionWrapper(mock(ConditionCol52.class)); } @Test public void testNewPatternWrapperWhenPatternIsFound() throws Exception { final PatternWrapper expectedWrapper = mockPatternWrapper("boundName"); final Set<PatternWrapper> actionWrappers = new HashSet<PatternWrapper>() {{ add(expectedWrapper); }}; doReturn(actionWrappers).when(plugin).getPatterns(); final PatternWrapper actualWrapper = plugin.newPatternWrapper(mockActionWrapper("boundName", "factType")); assertSame(expectedWrapper, actualWrapper); } @Test public void testNewPatternWrapperWhenPatternIsNotFound() throws Exception { final Set<PatternWrapper> actionWrappers = new HashSet<>(); final ActionWrapper actionWrapper = mockActionWrapper("boundName", "factType"); doReturn(actionWrappers).when(plugin).getPatterns(); final PatternWrapper patternWrapper = plugin.newPatternWrapper(actionWrapper); assertEquals(actionWrapper.getBoundName(), patternWrapper.getBoundName()); assertEquals(actionWrapper.getFactType(), patternWrapper.getFactType()); } @Test public void testSetupValuesWhenColumnIsNew() throws Exception { doReturn(true).when(plugin).isNewColumn(); plugin.setupValues(); verify(plugin, never()).setValueOptionsPageAsCompleted(); verify(plugin, never()).fireChangeEvent(patternPage); verify(plugin, never()).fireChangeEvent(fieldPage); verify(plugin, never()).fireChangeEvent(additionalInfoPage); } @Test public void testSetupValuesWhenColumnIsNotNew() throws Exception { final DTColumnConfig52 column = mock(DTColumnConfig52.class); final ActionWrapper actionWrapper = mock(ActionWrapper.class); final PatternWrapper patternWrapper = mock(PatternWrapper.class); doReturn(column).when(plugin).getOriginalColumnConfig52(); doReturn(actionWrapper).when(plugin).newActionWrapper(column); doReturn(patternWrapper).when(plugin).newPatternWrapper(actionWrapper); doReturn(false).when(plugin).isNewColumn(); plugin.setupValues(); verify(plugin).setValueOptionsPageAsCompleted(); verify(plugin).fireChangeEvent(patternPage); verify(plugin).fireChangeEvent(fieldPage); verify(plugin).fireChangeEvent(additionalInfoPage); } @Test public void testGenerateColumnWhenColumnIsNew() throws Exception { final ActionCol52 actionCol52 = mock(ActionCol52.class); doReturn(actionCol52).when(plugin).editingCol(); doReturn(true).when(plugin).isNewColumn(); assertTrue(plugin.generateColumn()); verify(presenter).appendColumn(actionCol52); } @Test public void testGenerateColumnWhenColumnIsNotNew() throws Exception { final ActionCol52 editingCol = mock(ActionCol52.class); final ActionCol52 originalCol = mock(ActionCol52.class); doReturn(editingCol).when(plugin).editingCol(); doReturn(originalCol).when(plugin).originalCol(); doReturn(false).when(plugin).isNewColumn(); assertTrue(plugin.generateColumn()); verify(presenter).updateColumn(originalCol, editingCol); } @Test public void testGetPatternsWhenColumnIsNew() throws Exception { mockPatterns(); doReturn(true).when(plugin).isNewColumn(); final Set<PatternWrapper> patterns = plugin.getPatterns(); assertEquals(2, patterns.size()); assertTrue(patterns.contains(new PatternWrapper("factType", "boundName", true))); assertTrue(patterns.contains(new PatternWrapper("factType", "boundName"))); } @Test public void testGetPatternsWhenColumnIsNotNewButFactPatternIsNew() throws Exception { mockPatterns(); doReturn(false).when(plugin).isNewColumn(); doReturn(true).when(plugin).isNewFactPattern(); final Set<PatternWrapper> patterns = plugin.getPatterns(); assertEquals(1, patterns.size()); assertTrue(patterns.contains(new PatternWrapper("factType", "boundName"))); } @Test public void testGetPatternsWhenColumnAndFactPatternAreNotNew() throws Exception { mockPatterns(); doReturn(false).when(plugin).isNewColumn(); doReturn(false).when(plugin).isNewFactPattern(); final Set<PatternWrapper> patterns = plugin.getPatterns(); assertEquals(1, patterns.size()); assertTrue(patterns.contains(new PatternWrapper("factType", "boundName", true))); } @Test public void testIsHideColumn() { final ActionWrapper actionWrapper = mock(ActionWrapper.class); doReturn(actionWrapper).when(plugin).editingWrapper(); plugin.isHideColumn(); verify(actionWrapper).isHideColumn(); } @Test public void testSetHideColumn() { final boolean hideColumn = false; final ActionWrapper actionWrapper = mock(ActionWrapper.class); doReturn(actionWrapper).when(plugin).editingWrapper(); plugin.setHideColumn(hideColumn); verify(actionWrapper).setHideColumn(hideColumn); } @Test public void testIsNewFactPatternWhenIsNew() throws Exception { mockPatterns(); plugin.setEditingPattern(new PatternWrapper("factType", "bananna")); assertTrue(plugin.isNewFactPattern()); } @Test public void testIsNewFactPatternWhenIsExisting() throws Exception { mockPatterns(); plugin.setEditingPattern(new PatternWrapper("factType", "boundName")); assertFalse(plugin.isNewFactPattern()); } @Test public void testIsFieldBindingValid() { assertTrue(plugin.isFieldBindingValid()); } @Test public void testIsBindable() { assertFalse(plugin.isBindable()); } private void mockPatterns() { final GuidedDecisionTable52 model = mock(GuidedDecisionTable52.class); final List<CompositeColumn<?>> patterns = Collections.singletonList(fakePattern()); final List<ActionCol52> actions = Arrays.asList(fakeActionCol(), fakeActionCol()); when(model.getConditions()).thenReturn(patterns); when(model.getActionCols()).thenReturn(actions); when(presenter.getModel()).thenReturn(model); } private ActionWrapper mockActionWrapper(final String boundName, final String factType) { final ActionWrapper wrapper = mock(ActionWrapper.class); when(wrapper.getBoundName()).thenReturn(boundName); when(wrapper.getFactType()).thenReturn(factType); return wrapper; } private PatternWrapper mockPatternWrapper(final String boundName) { final PatternWrapper wrapper = mock(PatternWrapper.class); when(wrapper.getBoundName()).thenReturn(boundName); return wrapper; } private Pattern52 fakePattern() { return new Pattern52() {{ setFactType("factType"); setBoundName("boundName"); setNegated(true); }}; } private ActionInsertFactCol52 fakeActionCol() { return new ActionInsertFactCol52() {{ setFactType("factType"); setBoundName("boundName"); }}; } }
package towers; import projectiles.Projectile; import projectiles.RingOfFire; import mobs.Mob; import models.DriverModel; import utilities.Position; import views.Alert; import views.DriverView; /** * Creates a tower that shoots a projectile * that blasts the area with fire effecting * all mobs around it * * @author Scorpion * */ public class FireTower extends Tower { private static final String TOWER_BASE_IMAGE = "FireTower.png"; public static final String TOWER_TURRET_IMAGE = null; public static final int TOWER_RANGE = 75; public static final int TOWER_FIRE_RATE = 800; public static final int TOWER_COST = 300; private static boolean clickedTowerBefore = true; private static final long serialVersionUID = 1L; /** * Constructor for the FireTower * * @param location * @param model */ public FireTower(final Position position, final DriverModel model) { super(position, TOWER_BASE_IMAGE, TOWER_TURRET_IMAGE); this.range = (int) (TOWER_RANGE); this.fireRate = (int) (TOWER_FIRE_RATE); this.cost = TOWER_COST; this.path1UpgradeName = "Damage"; this.path1UpgradeIcon = "Damage Icon.png"; this.path1UpgradeLevel = 0; this.path1UpgradeCosts = new int[3]; this.path1UpgradeCosts[0] = 350; this.path1UpgradeCosts[1] = 850; this.path1UpgradeCosts[2] = 1750; this.path2UpgradeName = "Fire Rate"; this.path2UpgradeIcon = "Fire Rate Icon.jpg"; this.path2UpgradeLevel = 0; this.path2UpgradeCosts = new int[3]; this.path2UpgradeCosts[0] = 350; this.path2UpgradeCosts[1] = 750; this.path2UpgradeCosts[2] = 1800; this.path3UpgradeName = "Range"; this.path3UpgradeIcon = "Range Icon.png"; this.path3UpgradeLevel = 0; this.path3UpgradeCosts = new int[3]; this.path3UpgradeCosts[0] = 400; this.path3UpgradeCosts[1] = 950; this.path3UpgradeCosts[2] = 2200; model.towerBuyUpgradeMoney(this.cost); } /** * method to tell towers to attack a mob if * their fire rate cool down is finished * * @param model * @return Projectile[] */ public Projectile[] attackMob(DriverModel model) { Projectile[] projectiles = new Projectile[1]; if (this.reloadProgress > 30) { this.reloadProgress -= 30; return projectiles; } this.mobTravelDistance = 0; this.reloadProgress = (int) (((this.fireRate) - (200 * this.path2UpgradeLevel)) * fireRateBoost); for (Mob mob : model.allMobs()) { if (this.position.getDistance(mob.getPosition()) < (((this.range + (25 * this.path3UpgradeLevel)) * rangeBoost) + mob.getRadius())) { if (mob.getDistanceTraveled() > this.mobTravelDistance) { projectiles[0] = new RingOfFire(model, this.position, null, this.path3UpgradeLevel, this.path1UpgradeLevel); } } } return projectiles; } /** * Method that shows the tower's tutorial * if its the first time users clicked on it * * @return boolean */ public static boolean clickedTower(final DriverView view) { if (clickedTowerBefore) { return true; } new Alert(view, DriverView.getImage(TOWER_BASE_IMAGE, 50, 50), "Fire Burst Tower", "This tower blasts the area", "with fire burning all mobs", "around it."); clickedTowerBefore = true; return false; } /** * getting the range of a tower * * @return int */ public int getRange() { return path3CurrentValue(); } /** * returns the current value of the attribute * for the towers first upgrade path * * @return int */ public int path1CurrentValue() { return RingOfFire.getDamageLevelBoost(this.path1UpgradeLevel); } /** * returns the current value of the attribute * for the towers second upgrade path * * @return int */ public int path2CurrentValue() { return (int) (60000 / ((this.fireRate) - (200 * this.path2UpgradeLevel)) * fireRateBoost); } /** * returns the current value of the attribute * for the towers third upgrade path * * @return int */ public int path3CurrentValue() { return (int) ((this.range + (25 * this.path3UpgradeLevel)) * rangeBoost); } /** * returns the value of the attribute * if it were to go to the next upgrade * for the towers first upgrade path * * @return int */ public int path1UpgradeValue() { return this.path1UpgradeLevel == 3 ? -1 : RingOfFire.getDamageLevelBoost(this.path1UpgradeLevel+1); } /** * returns the value of the attribute * if it were to go to the next upgrade * for the towers second upgrade path * * @return int */ public int path2UpgradeValue() { return this.path2UpgradeLevel == 3 ? -1 : (int) (60000 / ((this.fireRate) - (200 * (this.path2UpgradeLevel+1))) * fireRateBoost); } /** * returns the value of the attribute * if it were to go to the next upgrade * for the towers third upgrade path * * @return int */ public int path3UpgradeValue() { return this.path3UpgradeLevel == 3 ? -1 : (int) ((this.range + (25 * (this.path3UpgradeLevel+1))) * rangeBoost); } /** * upgrades the first upgrade path to the next level * * @param model */ public void upgradePath1(final DriverModel model) { if (this.path1UpgradeLevel == 3) { model.towerBuyUpgradeMoney(this.path1UpgradeCosts[this.path1UpgradeLevel]); this.path1UpgradeLevel = -1; } else { model.towerBuyUpgradeMoney(this.path1UpgradeCosts[this.path1UpgradeLevel]); this.path1UpgradeLevel++; } } /** * upgrades the second upgrade path to the next level * * @param model */ public void upgradePath2(final DriverModel model) { if (this.path2UpgradeLevel == 3) { model.towerBuyUpgradeMoney(this.path2UpgradeCosts[this.path2UpgradeLevel]); this.path2UpgradeLevel = -1; } else { model.towerBuyUpgradeMoney(this.path2UpgradeCosts[this.path2UpgradeLevel]); this.path2UpgradeLevel++; } } /** * upgrades the third upgrade path to the next level * * @param model */ public void upgradePath3(final DriverModel model) { if (this.path3UpgradeLevel == 3) { model.towerBuyUpgradeMoney(this.path3UpgradeCosts[this.path3UpgradeLevel]); this.path3UpgradeLevel = -1; } else { model.towerBuyUpgradeMoney(this.path3UpgradeCosts[this.path3UpgradeLevel]); this.path3UpgradeLevel++; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.analytics.value; import java.time.Instant; import java.time.format.DateTimeParseException; import java.util.function.Consumer; import java.util.function.DoubleConsumer; import java.util.function.IntConsumer; import java.util.function.LongConsumer; import org.apache.solr.analytics.util.function.BooleanConsumer; import org.apache.solr.analytics.util.function.FloatConsumer; import org.apache.solr.analytics.value.AnalyticsValue.AbstractAnalyticsValue; import org.apache.solr.analytics.value.AnalyticsValueStream.AbstractAnalyticsValueStream; import org.apache.solr.analytics.value.BooleanValue.AbstractBooleanValue; import org.apache.solr.analytics.value.BooleanValueStream.AbstractBooleanValueStream; import org.apache.solr.analytics.value.DateValue.AbstractDateValue; import org.apache.solr.analytics.value.DateValueStream.AbstractDateValueStream; import org.apache.solr.analytics.value.DoubleValue.AbstractDoubleValue; import org.apache.solr.analytics.value.DoubleValueStream.AbstractDoubleValueStream; import org.apache.solr.analytics.value.FloatValue.AbstractFloatValue; import org.apache.solr.analytics.value.FloatValueStream.AbstractFloatValueStream; import org.apache.solr.analytics.value.IntValue.AbstractIntValue; import org.apache.solr.analytics.value.IntValueStream.AbstractIntValueStream; import org.apache.solr.analytics.value.LongValue.AbstractLongValue; import org.apache.solr.analytics.value.LongValueStream.AbstractLongValueStream; import org.apache.solr.analytics.value.StringValue.AbstractStringValue; import org.apache.solr.analytics.value.StringValueStream.AbstractStringValueStream; public class FillableTestValue { public static class TestAnalyticsValue extends AbstractAnalyticsValue { private final ExpressionType expressionType; private Object value; private boolean exists; public TestAnalyticsValue() { this(ExpressionType.CONST); } public TestAnalyticsValue(ExpressionType expressionType) { this.expressionType = expressionType; } public TestAnalyticsValue setValue(Object value) { this.value = value; return this; } public TestAnalyticsValue setExists(boolean exists) { this.exists = exists; return this; } @Override public Object getObject() { return value; } @Override public boolean exists() { return exists; } @Override public String getName() { return "test_analytics_value"; } @Override public String getExpressionStr() { return "test_analytics_value"; } @Override public ExpressionType getExpressionType() { return expressionType; } } public static class TestAnalyticsValueStream extends AbstractAnalyticsValueStream { private final ExpressionType expressionType; private Object[] values; public TestAnalyticsValueStream() { this(ExpressionType.CONST); } public TestAnalyticsValueStream(ExpressionType expressionType) { this.expressionType = expressionType; } public TestAnalyticsValueStream setValues(Object... values) { this.values = values; return this; } @Override public void streamObjects(Consumer<Object> cons) { for (int i = 0; i < values.length; ++i) { cons.accept(values[i]); } } @Override public String getName() { return "test_analytics_value_stream"; } @Override public String getExpressionStr() { return "test_analytics_value_stream"; } @Override public ExpressionType getExpressionType() { return expressionType; } } public static class TestIntValue extends AbstractIntValue { private final ExpressionType expressionType; private int value; private boolean exists; public TestIntValue() { this(ExpressionType.CONST); } public TestIntValue(ExpressionType expressionType) { this.expressionType = expressionType; } public TestIntValue setValue(int value) { this.value = value; return this; } public TestIntValue setExists(boolean exists) { this.exists = exists; return this; } @Override public int getInt() { return value; } @Override public boolean exists() { return exists; } @Override public String getName() { return "test_int_value"; } @Override public String getExpressionStr() { return "test_int_value"; } @Override public ExpressionType getExpressionType() { return expressionType; } } public static class TestIntValueStream extends AbstractIntValueStream { private int[] values; public TestIntValueStream() { this.values = new int[0]; } public TestIntValueStream setValues(int... values) { this.values = values; return this; } @Override public void streamInts(IntConsumer cons) { for (int i = 0; i < values.length; ++i) { cons.accept(values[i]); } } @Override public String getName() { return "test_int_value_stream"; } @Override public String getExpressionStr() { return "test_int_value_stream"; } @Override public ExpressionType getExpressionType() { return ExpressionType.UNREDUCED_MAPPING; } } public static class TestLongValue extends AbstractLongValue { private final ExpressionType expressionType; private long value; private boolean exists; public TestLongValue() { this(ExpressionType.CONST); } public TestLongValue(ExpressionType expressionType) { this.expressionType = expressionType; } public TestLongValue setValue(long value) { this.value = value; return this; } public TestLongValue setExists(boolean exists) { this.exists = exists; return this; } @Override public long getLong() { return value; } @Override public boolean exists() { return exists; } @Override public String getName() { return "test_long_value"; } @Override public String getExpressionStr() { return "test_long_value"; } @Override public ExpressionType getExpressionType() { return expressionType; } } public static class TestLongValueStream extends AbstractLongValueStream { private long[] values; public TestLongValueStream() { this.values = new long[0]; } public TestLongValueStream setValues(long... values) { this.values = values; return this; } @Override public void streamLongs(LongConsumer cons) { for (int i = 0; i < values.length; ++i) { cons.accept(values[i]); } } @Override public String getName() { return "test_long_value_stream"; } @Override public String getExpressionStr() { return "test_long_value_stream"; } @Override public ExpressionType getExpressionType() { return ExpressionType.UNREDUCED_MAPPING; } } public static class TestFloatValue extends AbstractFloatValue { private final ExpressionType expressionType; private float value; private boolean exists; public TestFloatValue() { this(ExpressionType.CONST); } public TestFloatValue(ExpressionType expressionType) { this.expressionType = expressionType; } public TestFloatValue setValue(float value) { this.value = value; return this; } public TestFloatValue setExists(boolean exists) { this.exists = exists; return this; } @Override public float getFloat() { return value; } @Override public boolean exists() { return exists; } @Override public String getName() { return "test_float_value"; } @Override public String getExpressionStr() { return "test_float_value"; } @Override public ExpressionType getExpressionType() { return expressionType; } } public static class TestFloatValueStream extends AbstractFloatValueStream { private float[] values; public TestFloatValueStream() { this.values = new float[0]; } public TestFloatValueStream setValues(float... values) { this.values = values; return this; } @Override public void streamFloats(FloatConsumer cons) { for (int i = 0; i < values.length; ++i) { cons.accept(values[i]); } } @Override public String getName() { return "test_float_value_stream"; } @Override public String getExpressionStr() { return "test_float_value_stream"; } @Override public ExpressionType getExpressionType() { return ExpressionType.UNREDUCED_MAPPING; } } public static class TestDoubleValue extends AbstractDoubleValue { private final ExpressionType expressionType; private double value; private boolean exists; public TestDoubleValue() { this(ExpressionType.CONST); } public TestDoubleValue(ExpressionType expressionType) { this.expressionType = expressionType; } public TestDoubleValue setValue(double value) { this.value = value; return this; } public TestDoubleValue setExists(boolean exists) { this.exists = exists; return this; } @Override public double getDouble() { return value; } @Override public boolean exists() { return exists; } @Override public String getName() { return "test_double_value"; } @Override public String getExpressionStr() { return "test_double_value"; } @Override public ExpressionType getExpressionType() { return expressionType; } } public static class TestDoubleValueStream extends AbstractDoubleValueStream { private double[] values; public TestDoubleValueStream() { this.values = new double[0]; } public TestDoubleValueStream setValues(double... values) { this.values = values; return this; } @Override public void streamDoubles(DoubleConsumer cons) { for (int i = 0; i < values.length; ++i) { cons.accept(values[i]); } } @Override public String getName() { return "test_double_value_stream"; } @Override public String getExpressionStr() { return "test_double_value_stream"; } @Override public ExpressionType getExpressionType() { return ExpressionType.UNREDUCED_MAPPING; } } public static class TestBooleanValue extends AbstractBooleanValue { private final ExpressionType expressionType; private boolean value; private boolean exists; public TestBooleanValue() { this(ExpressionType.CONST); } public TestBooleanValue(ExpressionType expressionType) { this.expressionType = expressionType; } public TestBooleanValue setValue(boolean value) { this.value = value; return this; } public TestBooleanValue setExists(boolean exists) { this.exists = exists; return this; } @Override public boolean getBoolean() { return value; } @Override public boolean exists() { return exists; } @Override public String getName() { return "test_boolean_value"; } @Override public String getExpressionStr() { return "test_boolean_value"; } @Override public ExpressionType getExpressionType() { return expressionType; } } public static class TestBooleanValueStream extends AbstractBooleanValueStream { private boolean[] values; public TestBooleanValueStream() { this.values = new boolean[0]; } public TestBooleanValueStream setValues(boolean... values) { this.values = values; return this; } @Override public void streamBooleans(BooleanConsumer cons) { for (int i = 0; i < values.length; ++i) { cons.accept(values[i]); } } @Override public String getName() { return "test_boolean_value_stream"; } @Override public String getExpressionStr() { return "test_boolean_value_stream"; } @Override public ExpressionType getExpressionType() { return ExpressionType.UNREDUCED_MAPPING; } } public static class TestDateValue extends AbstractDateValue { private final ExpressionType expressionType; private long value; private boolean exists; public TestDateValue() { this(ExpressionType.CONST); } public TestDateValue(ExpressionType expressionType) { this.expressionType = expressionType; } public TestDateValue setValue(String value) { try { this.value = Instant.parse(value).toEpochMilli(); } catch (DateTimeParseException e) { this.value = 0; } return this; } public TestDateValue setExists(boolean exists) { this.exists = exists; return this; } @Override public long getLong() { return value; } @Override public boolean exists() { return exists; } @Override public String getName() { return "test_date_value"; } @Override public String getExpressionStr() { return "test_date_value"; } @Override public ExpressionType getExpressionType() { return expressionType; } } public static class TestDateValueStream extends AbstractDateValueStream { private String[] values; public TestDateValueStream() { this.values = new String[0]; } public TestDateValueStream setValues(String... values) { this.values = values; return this; } @Override public void streamLongs(LongConsumer cons) { for (int i = 0; i < values.length; ++i) { try { cons.accept(Instant.parse(values[i]).toEpochMilli()); } catch (DateTimeParseException e) { } } } @Override public String getName() { return "test_date_value_stream"; } @Override public String getExpressionStr() { return "test_date_value_stream"; } @Override public ExpressionType getExpressionType() { return ExpressionType.UNREDUCED_MAPPING; } } public static class TestStringValue extends AbstractStringValue { private final ExpressionType expressionType; private String value; private boolean exists; public TestStringValue() { this(ExpressionType.CONST); } public TestStringValue(ExpressionType expressionType) { this.expressionType = expressionType; } public TestStringValue setValue(String value) { this.value = value; return this; } public TestStringValue setExists(boolean exists) { this.exists = exists; return this; } @Override public String getString() { return value; } @Override public boolean exists() { return exists; } @Override public String getName() { return "test_string_value"; } @Override public String getExpressionStr() { return "test_string_value"; } @Override public ExpressionType getExpressionType() { return expressionType; } } public static class TestStringValueStream extends AbstractStringValueStream { private String[] values; public TestStringValueStream() { this.values = new String[0]; } public TestStringValueStream setValues(String... values) { this.values = values; return this; } @Override public void streamStrings(Consumer<String> cons) { for (int i = 0; i < values.length; ++i) { cons.accept(values[i]); } } @Override public String getName() { return "test_string_value_stream"; } @Override public String getExpressionStr() { return "test_string_value_stream"; } @Override public ExpressionType getExpressionType() { return ExpressionType.UNREDUCED_MAPPING; } } }
/* * Copyright (c) 2015 WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.policy.mgt.core.dao.impl; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.device.mgt.common.policy.mgt.Profile; import org.wso2.carbon.policy.mgt.core.dao.PolicyManagementDAOFactory; import org.wso2.carbon.policy.mgt.core.dao.ProfileDAO; import org.wso2.carbon.policy.mgt.core.dao.ProfileManagerDAOException; import org.wso2.carbon.policy.mgt.core.dao.util.PolicyManagementDAOUtil; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; public class ProfileDAOImpl implements ProfileDAO { private static final Log log = LogFactory.getLog(ProfileDAOImpl.class); public Profile addProfile(Profile profile) throws ProfileManagerDAOException { Connection conn; PreparedStatement stmt = null; ResultSet generatedKeys = null; int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); try { conn = this.getConnection(); String query = "INSERT INTO DM_PROFILE " + "(PROFILE_NAME, TENANT_ID, DEVICE_TYPE, CREATED_TIME, UPDATED_TIME) VALUES (?, ?, ?, ?, ?)"; stmt = conn.prepareStatement(query, new String[] {"id"}); stmt.setString(1, profile.getProfileName()); stmt.setInt(2, tenantId); stmt.setString(3, profile.getDeviceType()); stmt.setTimestamp(4, profile.getCreatedDate()); stmt.setTimestamp(5, profile.getUpdatedDate()); int affectedRows = stmt.executeUpdate(); if (affectedRows == 0 && log.isDebugEnabled()) { String msg = "No rows are updated on the profile table."; log.debug(msg); } generatedKeys = stmt.getGeneratedKeys(); if (generatedKeys.next()) { profile.setProfileId(generatedKeys.getInt(1)); } // Checking the profile id here, because profile id could have been passed from the calling method. if (profile.getProfileId() == 0) { throw new RuntimeException("Profile id is 0, this could be an issue."); } } catch (SQLException e) { String msg = "Error occurred while adding the profile to database."; log.error(msg, e); throw new ProfileManagerDAOException(msg, e); } finally { PolicyManagementDAOUtil.cleanupResources(stmt, generatedKeys); } return profile; } public Profile updateProfile(Profile profile) throws ProfileManagerDAOException { Connection conn; PreparedStatement stmt = null; // ResultSet generatedKeys = null; int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); try { conn = this.getConnection(); String query = "UPDATE DM_PROFILE SET PROFILE_NAME = ? , DEVICE_TYPE = ? , UPDATED_TIME = ? " + "WHERE ID = ? AND TENANT_ID = ?"; stmt = conn.prepareStatement(query); stmt.setString(1, profile.getProfileName()); stmt.setString(2, profile.getDeviceType()); stmt.setTimestamp(3, profile.getUpdatedDate()); stmt.setInt(4, profile.getProfileId()); stmt.setInt(5, tenantId); int affectedRows = stmt.executeUpdate(); if (affectedRows == 0 && log.isDebugEnabled()) { String msg = "No rows are updated on the profile table."; log.debug(msg); } // generatedKeys = stmt.getGeneratedKeys(); // // if (generatedKeys.next()) { // profile.setProfileId(generatedKeys.getInt(1)); // } // // Checking the profile id here, because profile id could have been passed from the calling method. // if (profile.getProfileId() == 0) { // throw new RuntimeException("Profile id is 0, this could be an issue."); // } } catch (SQLException e) { String msg = "Error occurred while updating the profile (" + profile.getProfileName() + ") in database."; log.error(msg, e); throw new ProfileManagerDAOException(msg, e); } finally { PolicyManagementDAOUtil.cleanupResources(stmt, null); } return profile; } @Override public boolean deleteProfile(Profile profile) throws ProfileManagerDAOException { Connection conn; PreparedStatement stmt = null; try { conn = this.getConnection(); String query = "DELETE FROM DM_PROFILE WHERE ID = ?"; stmt = conn.prepareStatement(query); stmt.setInt(1, profile.getProfileId()); if (stmt.executeUpdate() > 0) { return true; } return false; } catch (SQLException e) { String msg = "Error occurred while deleting the profile from the data base."; log.error(msg); throw new ProfileManagerDAOException(msg, e); } finally { PolicyManagementDAOUtil.cleanupResources(stmt, null); } } @Override public boolean deleteProfile(int profileId) throws ProfileManagerDAOException { Connection conn; PreparedStatement stmt = null; try { conn = this.getConnection(); String query = "DELETE FROM DM_PROFILE WHERE ID = ?"; stmt = conn.prepareStatement(query); stmt.setInt(1, profileId); if (stmt.executeUpdate() > 0) { return true; } return false; } catch (SQLException e) { String msg = "Error occurred while deleting the profile from the data base."; log.error(msg); throw new ProfileManagerDAOException(msg, e); } finally { PolicyManagementDAOUtil.cleanupResources(stmt, null); } } @Override public Profile getProfile(int profileId) throws ProfileManagerDAOException { Connection conn; PreparedStatement stmt = null; ResultSet resultSet = null; Profile profile = null; try { conn = this.getConnection(); String query = "SELECT * FROM DM_PROFILE WHERE ID = ?"; stmt = conn.prepareStatement(query); stmt.setInt(1, profileId); resultSet = stmt.executeQuery(); while (resultSet.next()) { profile = new Profile(); profile.setProfileId(profileId); profile.setProfileName(resultSet.getString("PROFILE_NAME")); profile.setTenantId(resultSet.getInt("TENANT_ID")); profile.setDeviceType(resultSet.getString("DEVICE_TYPE")); profile.setCreatedDate(resultSet.getTimestamp("CREATED_TIME")); profile.setUpdatedDate(resultSet.getTimestamp("UPDATED_TIME")); } } catch (SQLException e) { String msg = "Error occurred while reading the profile from the database."; log.error(msg, e); throw new ProfileManagerDAOException(msg, e); } finally { PolicyManagementDAOUtil.cleanupResources(stmt, resultSet); } return profile; } @Override public List<Profile> getAllProfiles() throws ProfileManagerDAOException { Connection conn; PreparedStatement stmt = null; ResultSet resultSet = null; List<Profile> profileList = new ArrayList<>(); try { //TODO : Fix with TenantID. conn = this.getConnection(); String query = "SELECT * FROM DM_PROFILE"; stmt = conn.prepareStatement(query); resultSet = stmt.executeQuery(); while (resultSet.next()) { Profile profile = new Profile(); profile.setProfileId(resultSet.getInt("ID")); profile.setProfileName(resultSet.getString("PROFILE_NAME")); profile.setTenantId(resultSet.getInt("TENANT_ID")); profile.setCreatedDate(resultSet.getTimestamp("CREATED_TIME")); profile.setUpdatedDate(resultSet.getTimestamp("UPDATED_TIME")); profile.setDeviceType(resultSet.getString("DEVICE_TYPE")); profileList.add(profile); } } catch (SQLException e) { String msg = "Error occurred while reading the profile list from the database."; log.error(msg, e); throw new ProfileManagerDAOException(msg, e); } finally { PolicyManagementDAOUtil.cleanupResources(stmt, resultSet); } return profileList; } @Override public List<Profile> getProfilesOfDeviceType(String deviceType) throws ProfileManagerDAOException { Connection conn; PreparedStatement stmt = null; ResultSet resultSet = null; List<Profile> profileList = new ArrayList<>(); try { conn = this.getConnection(); String query = "SELECT * FROM DM_PROFILE WHERE DEVICE_TYPE = ?"; stmt = conn.prepareStatement(query); stmt.setString(1, deviceType); resultSet = stmt.executeQuery(); while (resultSet.next()) { Profile profile = new Profile(); profile.setProfileId(resultSet.getInt("ID")); profile.setProfileName(resultSet.getString("PROFILE_NAME")); profile.setTenantId(resultSet.getInt("TENANT_ID")); profile.setDeviceType(resultSet.getString("DEVICE_TYPE")); profile.setCreatedDate(resultSet.getTimestamp("CREATED_TIME")); profile.setUpdatedDate(resultSet.getTimestamp("UPDATED_TIME")); profileList.add(profile); } } catch (SQLException e) { String msg = "Error occurred while reading the profile list from the database."; log.error(msg, e); throw new ProfileManagerDAOException(msg, e); } finally { PolicyManagementDAOUtil.cleanupResources(stmt, resultSet); } return profileList; } private Connection getConnection() throws ProfileManagerDAOException { return PolicyManagementDAOFactory.getConnection(); } }
package tw.yalan.eyedow; import android.content.Context; import android.graphics.PixelFormat; import android.os.Build; import android.util.Log; import android.view.Gravity; import android.view.View; import android.view.WindowManager; import android.view.animation.Animation; import android.widget.FrameLayout; import tw.yalan.eyedow.util.ViewHelper; /** * Created by Alan Ding on 2016/6/15. */ public class EyedowContainer extends FrameLayout implements Eyedow { public interface onScreenSizeChangeListener { void onFullScreen(); void onResize(int x, int y, int width, int height, boolean canDrag); void onRevert(int x, int y, int width, int height); } private onScreenSizeChangeListener onScreenSizeChangeListener; private View view; private WindowManager windowManager; private int containerWidth = -1; private int containerHeight = -1; private boolean canDrag; private boolean isAttachToWindow = false; private int lastX, lastY; private boolean isFullScreen = false; private boolean isDefaultSize = true; private boolean lastCanDragTemp; public EyedowContainer(Context context, View view, boolean canDrag) { super(context); this.view = view; windowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); addView(this.view, new LayoutParams(FrameLayout.LayoutParams.MATCH_PARENT, FrameLayout.LayoutParams.MATCH_PARENT)); this.canDrag = canDrag; if (canDrag) { setOnTouchListener(new EyedowDragTouchListener(windowManager)); } } public void setCanDrag(boolean canDrag) { this.canDrag = canDrag; if (canDrag) { setOnTouchListener(new EyedowDragTouchListener(windowManager)); } else { setOnTouchListener(null); } } public int getContainerWidth() { return containerWidth; } public int getContainerHeight() { return containerHeight; } public void setContainerWidth(int containerWidth) { this.containerWidth = containerWidth; } public void setContainerHeight(int containerHeight) { this.containerHeight = containerHeight; } public EyedowContainer.onScreenSizeChangeListener getOnScreenSizeChangeListener() { return onScreenSizeChangeListener; } public void setOnScreenSizeChangeListener(EyedowContainer.onScreenSizeChangeListener onScreenSizeChangeListener) { this.onScreenSizeChangeListener = onScreenSizeChangeListener; } @Override public void fullScreen() { isDefaultSize = false; isFullScreen = true; WindowManager.LayoutParams layoutParams = (WindowManager.LayoutParams) getLayoutParams(); layoutParams.height = WindowManager.LayoutParams.MATCH_PARENT; layoutParams.width = WindowManager.LayoutParams.MATCH_PARENT; lastX = layoutParams.x; lastY = layoutParams.y; layoutParams.x = 0; layoutParams.y = 0; lastCanDragTemp = this.canDrag; windowManager.removeView(this); windowManager.addView(this, layoutParams); if (onScreenSizeChangeListener != null) { onScreenSizeChangeListener.onFullScreen(); } } @Override public void resizeScreen(int x, int y, int width, int height, boolean canDrag) { isDefaultSize = false; isFullScreen = false; WindowManager.LayoutParams layoutParams = (WindowManager.LayoutParams) getLayoutParams(); layoutParams.height = width; layoutParams.width = height; lastX = layoutParams.x; lastY = layoutParams.y; layoutParams.x = x; layoutParams.y = y; lastCanDragTemp = this.canDrag; setCanDrag(canDrag); windowManager.removeView(this); windowManager.addView(this, layoutParams); if (onScreenSizeChangeListener != null) { onScreenSizeChangeListener.onResize(x, y, width, height, canDrag); } } @Override public void revertScreen() { isDefaultSize = true; isFullScreen = false; WindowManager.LayoutParams layoutParams = (WindowManager.LayoutParams) getLayoutParams(); layoutParams.height = ViewHelper.dpToPx(getContext(), containerHeight); layoutParams.width = ViewHelper.dpToPx(getContext(), containerWidth); layoutParams.x = lastX; layoutParams.y = lastY; setCanDrag(lastCanDragTemp); windowManager.removeView(this); windowManager.addView(this, layoutParams); if (onScreenSizeChangeListener != null) { onScreenSizeChangeListener.onRevert(lastX, lastY, layoutParams.width, layoutParams.height); } } @Override public void show(final Animation animation) { Log.e("eyedow", "show [" + this.getClass().getSimpleName() + "]"); if (windowManager != null) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { if (!isAttachedToWindow()) { Log.e("eyedow", "add view [" + this.getClass().getSimpleName() + "]"); windowManager.addView(this, createLayoutParams(10, 100)); } } else { if (!isAttachToWindow()) { Log.e("eyedow", "add view [" + this.getClass().getSimpleName() + "]"); windowManager.addView(this, createLayoutParams(10, 100)); } } Log.e("eyedow", "set Visible [" + this.getClass().getSimpleName() + "]"); setVisibility(View.VISIBLE); getChildAt(0).startAnimation(animation); isAttachToWindow = true; } } public boolean isAttachToWindow() { return isAttachToWindow; } public boolean isFullScreen() { return isFullScreen; } public boolean isDefaultSize() { return isDefaultSize; } @Override public void hide(Animation animation) { animation.setAnimationListener(new Animation.AnimationListener() { @Override public void onAnimationStart(Animation animation) { } @Override public void onAnimationEnd(Animation animation) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { if (isAttachedToWindow()) { setVisibility(View.GONE); } } else { if (isAttachToWindow()) { setVisibility(View.GONE); } } } @Override public void onAnimationRepeat(Animation animation) { } }); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { if (isAttachedToWindow()) { getChildAt(0).startAnimation(animation); } } else { if (isAttachToWindow()) { getChildAt(0).startAnimation(animation); } } } @Override public void remove(Animation animation) { isAttachToWindow = false; getChildAt(0).startAnimation(animation); } public WindowManager.LayoutParams createLayoutParams(int x, int y) { WindowManager.LayoutParams params = new WindowManager.LayoutParams( containerWidth == -1 ? WindowManager.LayoutParams.WRAP_CONTENT : ViewHelper.dpToPx(getContext(), containerWidth), containerHeight == -1 ? WindowManager.LayoutParams.WRAP_CONTENT : ViewHelper.dpToPx(getContext(), containerHeight), WindowManager.LayoutParams.TYPE_PHONE, WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE, PixelFormat.TRANSLUCENT); params.gravity = Gravity.TOP | Gravity.LEFT; params.x = x; params.y = y; return params; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.metrics.statsd; import org.apache.flink.configuration.ConfigConstants; import org.apache.flink.metrics.Counter; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.Metric; import org.apache.flink.metrics.MetricConfig; import org.apache.flink.metrics.MetricGroup; import org.apache.flink.metrics.SimpleCounter; import org.apache.flink.metrics.groups.UnregisteredMetricsGroup; import org.apache.flink.metrics.util.TestCounter; import org.apache.flink.metrics.util.TestHistogram; import org.apache.flink.metrics.util.TestMeter; import org.apache.flink.metrics.util.TestMetricGroup; import org.apache.flink.util.TestLogger; import org.junit.Test; import java.io.IOException; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.SocketException; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeoutException; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; /** Tests for the StatsDReporter. */ public class StatsDReporterTest extends TestLogger { @Test public void testReplaceInvalidChars() { StatsDReporter reporter = new StatsDReporter(); assertEquals("", reporter.filterCharacters("")); assertEquals("abc", reporter.filterCharacters("abc")); assertEquals("a-b--", reporter.filterCharacters("a:b::")); } /** Tests that the registered metrics' names don't contain invalid characters. */ @Test public void testAddingMetrics() { String counterName = "testCounter"; final String scope = "scope"; final char delimiter = '_'; MetricGroup metricGroup = TestMetricGroup.newBuilder() .setMetricIdentifierFunction( (metricName, characterFilter) -> scope + delimiter + metricName) .build(); TestingStatsDReporter reporter = new TestingStatsDReporter(); reporter.open(new MetricConfig()); SimpleCounter myCounter = new SimpleCounter(); reporter.notifyOfAddedMetric(myCounter, counterName, metricGroup); Map<Counter, String> counters = reporter.getCounters(); assertTrue(counters.containsKey(myCounter)); String expectedCounterName = reporter.filterCharacters(scope) + delimiter + reporter.filterCharacters(counterName); assertEquals(expectedCounterName, counters.get(myCounter)); } /** Tests that histograms are properly reported via the StatsD reporter. */ @Test public void testStatsDHistogramReporting() throws Exception { Set<String> expectedLines = new HashSet<>(6); expectedLines.add("metric.count:1|g"); expectedLines.add("metric.mean:4.0|g"); expectedLines.add("metric.min:7|g"); expectedLines.add("metric.max:6|g"); expectedLines.add("metric.stddev:5.0|g"); expectedLines.add("metric.p75:0.75|g"); expectedLines.add("metric.p98:0.98|g"); expectedLines.add("metric.p99:0.99|g"); expectedLines.add("metric.p999:0.999|g"); expectedLines.add("metric.p95:0.95|g"); expectedLines.add("metric.p50:0.5|g"); testMetricAndAssert(new TestHistogram(), "metric", expectedLines); } @Test public void testStatsDHistogramReportingOfNegativeValues() throws Exception { TestHistogram histogram = new TestHistogram(); histogram.setCount(-101); histogram.setMean(-104); histogram.setMin(-107); histogram.setMax(-106); histogram.setStdDev(-105); Set<String> expectedLines = new HashSet<>(); expectedLines.add("metric.count:0|g"); expectedLines.add("metric.count:-101|g"); expectedLines.add("metric.mean:0|g"); expectedLines.add("metric.mean:-104.0|g"); expectedLines.add("metric.min:0|g"); expectedLines.add("metric.min:-107|g"); expectedLines.add("metric.max:0|g"); expectedLines.add("metric.max:-106|g"); expectedLines.add("metric.stddev:0|g"); expectedLines.add("metric.stddev:-105.0|g"); expectedLines.add("metric.p75:0.75|g"); expectedLines.add("metric.p98:0.98|g"); expectedLines.add("metric.p99:0.99|g"); expectedLines.add("metric.p999:0.999|g"); expectedLines.add("metric.p95:0.95|g"); expectedLines.add("metric.p50:0.5|g"); testMetricAndAssert(histogram, "metric", expectedLines); } /** Tests that meters are properly reported via the StatsD reporter. */ @Test public void testStatsDMetersReporting() throws Exception { Set<String> expectedLines = new HashSet<>(4); expectedLines.add("metric.rate:5.0|g"); expectedLines.add("metric.count:100|g"); testMetricAndAssert(new TestMeter(), "metric", expectedLines); } @Test public void testStatsDMetersReportingOfNegativeValues() throws Exception { Set<String> expectedLines = new HashSet<>(); expectedLines.add("metric.rate:0|g"); expectedLines.add("metric.rate:-5.3|g"); expectedLines.add("metric.count:0|g"); expectedLines.add("metric.count:-50|g"); testMetricAndAssert(new TestMeter(-50, -5.3), "metric", expectedLines); } /** Tests that counter are properly reported via the StatsD reporter. */ @Test public void testStatsDCountersReporting() throws Exception { Set<String> expectedLines = new HashSet<>(2); expectedLines.add("metric:100|g"); testMetricAndAssert(new TestCounter(100), "metric", expectedLines); } @Test public void testStatsDCountersReportingOfNegativeValues() throws Exception { Set<String> expectedLines = new HashSet<>(); expectedLines.add("metric:0|g"); expectedLines.add("metric:-51|g"); testMetricAndAssert(new TestCounter(-51), "metric", expectedLines); } @Test public void testStatsDGaugesReporting() throws Exception { Set<String> expectedLines = new HashSet<>(2); expectedLines.add("metric:75|g"); testMetricAndAssert((Gauge<Integer>) () -> 75, "metric", expectedLines); } @Test public void testStatsDGaugesReportingOfNegativeValues() throws Exception { Set<String> expectedLines = new HashSet<>(); expectedLines.add("metric:0|g"); expectedLines.add("metric:-12345|g"); testMetricAndAssert((Gauge<Integer>) () -> -12345, "metric", expectedLines); } private void testMetricAndAssert(Metric metric, String metricName, Set<String> expectation) throws Exception { StatsDReporter reporter = null; DatagramSocketReceiver receiver = null; Thread receiverThread = null; long timeout = 5000; long joinTimeout = 30000; try { receiver = new DatagramSocketReceiver(); receiverThread = new Thread(receiver); receiverThread.start(); int port = receiver.getPort(); MetricConfig config = new MetricConfig(); config.setProperty("host", "localhost"); config.setProperty("port", String.valueOf(port)); reporter = new StatsDReporter(); reporter.open(config); MetricGroup metricGroup = new UnregisteredMetricsGroup(); reporter.notifyOfAddedMetric(metric, metricName, metricGroup); reporter.report(); receiver.waitUntilNumLines(expectation.size(), timeout); assertEquals(expectation, receiver.getLines()); } finally { if (reporter != null) { reporter.close(); } if (receiver != null) { receiver.stop(); } if (receiverThread != null) { receiverThread.join(joinTimeout); } } } /** Testing StatsDReporter which disables the socket creation. */ public static class TestingStatsDReporter extends StatsDReporter { @Override public void open(MetricConfig configuration) { // disable the socket creation } public Map<Counter, String> getCounters() { return counters; } } private static class DatagramSocketReceiver implements Runnable { private static final Object obj = new Object(); private final DatagramSocket socket; private final ConcurrentHashMap<String, Object> lines; private boolean running = true; public DatagramSocketReceiver() throws SocketException { socket = new DatagramSocket(); lines = new ConcurrentHashMap<>(); } public int getPort() { return socket.getLocalPort(); } public void stop() { running = false; socket.close(); } public void waitUntilNumLines(int numberLines, long timeout) throws TimeoutException { long endTimeout = System.currentTimeMillis() + timeout; long remainingTimeout = timeout; synchronized (lines) { while (numberLines > lines.size() && remainingTimeout > 0) { try { lines.wait(remainingTimeout); } catch (InterruptedException e) { // ignore interruption exceptions } } remainingTimeout = endTimeout - System.currentTimeMillis(); } if (remainingTimeout <= 0) { throw new TimeoutException("Have not received " + numberLines + " in time."); } } public Set<String> getLines() { return lines.keySet(); } @Override public void run() { while (running) { try { byte[] buffer = new byte[1024]; DatagramPacket packet = new DatagramPacket(buffer, buffer.length); socket.receive(packet); String line = new String( packet.getData(), 0, packet.getLength(), ConfigConstants.DEFAULT_CHARSET); lines.put(line, obj); synchronized (lines) { lines.notifyAll(); } } catch (IOException ex) { // ignore the exceptions } } } } }
/* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.identity.mgt.impl; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.wso2.carbon.identity.mgt.AuthenticationContext; import org.wso2.carbon.identity.mgt.Group; import org.wso2.carbon.identity.mgt.IdentityStore; import org.wso2.carbon.identity.mgt.User; import org.wso2.carbon.identity.mgt.bean.GroupBean; import org.wso2.carbon.identity.mgt.bean.UserBean; import org.wso2.carbon.identity.mgt.claim.Claim; import org.wso2.carbon.identity.mgt.claim.MetaClaim; import org.wso2.carbon.identity.mgt.exception.AuthenticationFailure; import org.wso2.carbon.identity.mgt.exception.GroupNotFoundException; import org.wso2.carbon.identity.mgt.exception.IdentityStoreException; import org.wso2.carbon.identity.mgt.exception.UserNotFoundException; import org.wso2.carbon.identity.mgt.impl.config.CacheConfig; import org.wso2.carbon.identity.mgt.impl.util.CacheHelper; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import javax.cache.Cache; import javax.cache.CacheManager; import javax.security.auth.callback.Callback; import static org.wso2.carbon.kernel.utils.StringUtils.isNullOrEmpty; /** * Virtual identity store with the caching. * * @since 1.0.0 */ public class CacheBackedIdentityStore implements IdentityStore { private static Logger log = LoggerFactory.getLogger(CacheBackedIdentityStore.class); private static final String UNIQUE_USER_CACHE = "uniqueUserCache"; private static final String UNIQUE_GROUP_CACHE = "uniqueGroupCache"; private IdentityStore identityStore; private Map<String, Boolean> cacheStatus = new HashMap<>(); private CacheManager cacheManager; public CacheBackedIdentityStore(Map<String, CacheConfig> cacheConfigs, IdentityStore identityStore, CacheManager cacheManager) throws IdentityStoreException { this.identityStore = identityStore; this.cacheManager = cacheManager; // Initialize all caches. initCacheIfRequired(UNIQUE_USER_CACHE, cacheConfigs); initCacheIfRequired(UNIQUE_GROUP_CACHE, cacheConfigs); if (log.isDebugEnabled()) { log.debug("Cache backed identity store successfully initialized."); } } private void initCacheIfRequired(String cacheName, Map<String, CacheConfig> cacheConfigs) { if (CacheHelper.isCacheEnabled(cacheConfigs, cacheName)) { CacheHelper.createCache(cacheName, String.class, User.class, CacheHelper.MEDIUM_EXPIRE_TIME, cacheConfigs, cacheManager); cacheStatus.put(cacheName, true); } else { cacheStatus.put(cacheName, false); } } @Override public boolean isUserExist(List<Claim> userClaims, String domainName) throws IdentityStoreException { return identityStore.isUserExist(userClaims, domainName); } @Override public List<String> isUserExist(List<Claim> userClaims) throws IdentityStoreException { return identityStore.isUserExist(userClaims); } @Override public User getUser(String uniqueUserId) throws IdentityStoreException, UserNotFoundException { if (cacheStatus.get(UNIQUE_USER_CACHE) && isNullOrEmpty(uniqueUserId)) { return doGetUser(uniqueUserId, identityStore.getPrimaryDomainName()); } User user = identityStore.getUser(uniqueUserId); user.setIdentityStore(this); return user; } @Override public User getUser(Claim claim) throws IdentityStoreException, UserNotFoundException { return identityStore.getUser(claim); } @Override public User getUser(Claim claim, String domainName) throws IdentityStoreException, UserNotFoundException { return identityStore.getUser(claim); } @Override public List<User> listUsers(int offset, int length) throws IdentityStoreException { return identityStore.listUsers(offset, length); } @Override public List<User> listUsers(int offset, int length, String domainName) throws IdentityStoreException { return identityStore.listUsers(offset, length, domainName); } @Override public List<User> listUsers(Claim claim, int offset, int length) throws IdentityStoreException { return identityStore.listUsers(claim, offset, length); } @Override public List<User> listUsers(Claim claim, int offset, int length, String domainName) throws IdentityStoreException { return identityStore.listUsers(claim, offset, length, domainName); } @Override public List<User> listUsers(MetaClaim metaClaim, String filterPattern, int offset, int length) throws IdentityStoreException { return identityStore.listUsers(metaClaim, filterPattern, offset, length); } @Override public List<User> listUsers(MetaClaim metaClaim, String filterPattern, int offset, int length, String domainName) throws IdentityStoreException { return identityStore.listUsers(metaClaim, filterPattern, offset, length, domainName); } @Override public List<User> listUsers(List<Claim> claims, int offset, int length) throws IdentityStoreException { return identityStore.listUsers(claims, offset, length); } @Override public List<User> listUsers(List<Claim> claims, int offset, int length, String domainName) throws IdentityStoreException { return identityStore.listUsers(claims, offset, length, domainName); } @Override public boolean isGroupExist(List<Claim> groupClaims, String domainName) throws IdentityStoreException { return identityStore.isGroupExist(groupClaims, domainName); } @Override public Group getGroup(String uniqueGroupId) throws IdentityStoreException, GroupNotFoundException { if (cacheStatus.get(UNIQUE_GROUP_CACHE) && !isNullOrEmpty(uniqueGroupId)) { return doGetGroup(uniqueGroupId, identityStore.getPrimaryDomainName()); } Group group = identityStore.getGroup(uniqueGroupId); group.setIdentityStore(this); return group; } @Override public Group getGroup(Claim claim) throws IdentityStoreException, GroupNotFoundException { return identityStore.getGroup(claim); } @Override public Group getGroup(Claim claim, String domainName) throws IdentityStoreException, GroupNotFoundException { return identityStore.getGroup(claim, domainName); } @Override public List<Group> listGroups(int offset, int length) throws IdentityStoreException { return identityStore.listGroups(offset, length); } @Override public List<Group> listGroups(int offset, int length, String domainName) throws IdentityStoreException { return identityStore.listGroups(offset, length, domainName); } @Override public List<Group> listGroups(Claim claim, int offset, int length) throws IdentityStoreException { return identityStore.listGroups(claim, offset, length); } @Override public List<Group> listGroups(Claim claim, int offset, int length, String domainName) throws IdentityStoreException { return identityStore.listGroups(claim, offset, length, domainName); } @Override public List<Group> listGroups(MetaClaim metaClaim, String filterPattern, int offset, int length) throws IdentityStoreException { return identityStore.listGroups(metaClaim, filterPattern, offset, length); } @Override public List<Group> listGroups(MetaClaim metaClaim, String filterPattern, int offset, int length, String domainName) throws IdentityStoreException { return identityStore.listGroups(metaClaim, filterPattern, offset, length, domainName); } @Override public List<Group> getGroupsOfUser(String uniqueUserId) throws IdentityStoreException, UserNotFoundException { return identityStore.getGroupsOfUser(uniqueUserId); } @Override public List<User> getUsersOfGroup(String uniqueGroupId) throws IdentityStoreException, GroupNotFoundException { return identityStore.getUsersOfGroup(uniqueGroupId); } @Override public boolean isUserInGroup(String uniqueUserId, String uniqueGroupId) throws IdentityStoreException, UserNotFoundException, GroupNotFoundException { return identityStore.isUserInGroup(uniqueUserId, uniqueGroupId); } @Override public List<Claim> getClaimsOfUser(String uniqueUserId) throws IdentityStoreException, UserNotFoundException { return identityStore.getClaimsOfUser(uniqueUserId); } @Override public List<Claim> getClaimsOfUser(String uniqueUserId, List<MetaClaim> metaClaims) throws IdentityStoreException, UserNotFoundException { return identityStore.getClaimsOfUser(uniqueUserId, metaClaims); } @Override public List<Claim> getClaimsOfGroup(String uniqueGroupId) throws IdentityStoreException, GroupNotFoundException { return identityStore.getClaimsOfGroup(uniqueGroupId); } @Override public List<Claim> getClaimsOfGroup(String uniqueGroupId, List<MetaClaim> metaClaims) throws IdentityStoreException, GroupNotFoundException { return identityStore.getClaimsOfGroup(uniqueGroupId, metaClaims); } @Override public User addUser(UserBean userBean) throws IdentityStoreException { return identityStore.addUser(userBean); } @Override public User addUser(UserBean userBean, String domainName) throws IdentityStoreException { return identityStore.addUser(userBean, domainName); } @Override public List<User> addUsers(List<UserBean> userBeen) throws IdentityStoreException { return identityStore.addUsers(userBeen); } @Override public List<User> addUsers(List<UserBean> userBeen, String domainName) throws IdentityStoreException { return identityStore.addUsers(userBeen, domainName); } @Override public void updateUserClaims(String uniqueUserId, List<Claim> claims) throws IdentityStoreException, UserNotFoundException { identityStore.updateUserClaims(uniqueUserId, claims); } @Override public void updateUserClaims(String uniqueUserId, List<Claim> claimsToAdd, List<Claim> claimsToRemove) throws IdentityStoreException, UserNotFoundException { identityStore.updateUserClaims(uniqueUserId, claimsToAdd, claimsToRemove); } @Override public void updateUserCredentials(String uniqueUserId, List<Callback> credentials) throws IdentityStoreException, UserNotFoundException { identityStore.updateUserCredentials(uniqueUserId, credentials); } @Override public void updateUserCredentials(String uniqueUserId, List<Callback> credentialsToAdd, List<Callback> credentialsToRemove) throws IdentityStoreException, UserNotFoundException { identityStore.updateUserCredentials(uniqueUserId, credentialsToAdd, credentialsToRemove); } @Override public void deleteUser(String uniqueUserId) throws IdentityStoreException, UserNotFoundException { identityStore.deleteUser(uniqueUserId); doDeleteUser(uniqueUserId, identityStore.getPrimaryDomainName()); } @Override public void updateGroupsOfUser(String uniqueUserId, List<String> uniqueGroupIds) throws IdentityStoreException { identityStore.updateGroupsOfUser(uniqueUserId, uniqueGroupIds); } @Override public void updateGroupsOfUser(String uniqueUserId, List<String> uniqueGroupIdsToAdd, List<String> uniqueGroupIdsToRemove) throws IdentityStoreException { identityStore.updateGroupsOfUser(uniqueUserId, uniqueGroupIdsToAdd, uniqueGroupIdsToRemove); } @Override public Group addGroup(GroupBean groupBean) throws IdentityStoreException { return identityStore.addGroup(groupBean); } @Override public Group addGroup(GroupBean groupBean, String domainName) throws IdentityStoreException { return identityStore.addGroup(groupBean, domainName); } @Override public List<Group> addGroups(List<GroupBean> groupBeen) throws IdentityStoreException { return identityStore.addGroups(groupBeen); } @Override public List<Group> addGroups(List<GroupBean> groupBeen, String domainName) throws IdentityStoreException { return identityStore.addGroups(groupBeen, domainName); } @Override public void updateGroupClaims(String uniqueGroupId, List<Claim> claims) throws IdentityStoreException, GroupNotFoundException { identityStore.updateGroupClaims(uniqueGroupId, claims); } @Override public void updateGroupClaims(String uniqueGroupId, List<Claim> claimsToAdd, List<Claim> claimsToRemove) throws IdentityStoreException, GroupNotFoundException { identityStore.updateGroupClaims(uniqueGroupId, claimsToAdd, claimsToRemove); } @Override public void deleteGroup(String uniqueGroupId) throws IdentityStoreException, GroupNotFoundException { identityStore.deleteGroup(uniqueGroupId); doDeleteGroup(uniqueGroupId, identityStore.getPrimaryDomainName()); } @Override public void updateUsersOfGroup(String uniqueGroupId, List<String> uniqueUserIds) throws IdentityStoreException { identityStore.updateUsersOfGroup(uniqueGroupId, uniqueUserIds); } @Override public void updateUsersOfGroup(String uniqueGroupId, List<String> uniqueUserIdsToAdd, List<String> uniqueUserIdsToRemove) throws IdentityStoreException { identityStore.updateUsersOfGroup(uniqueGroupId, uniqueUserIdsToAdd, uniqueUserIdsToRemove); } @Override public AuthenticationContext authenticate(Claim claim, Callback[] credentials, String domainName) throws AuthenticationFailure, IdentityStoreException { return identityStore.authenticate(claim, credentials, domainName); } @Override public String getPrimaryDomainName() throws IdentityStoreException { return identityStore.getPrimaryDomainName(); } @Override public Set<String> getDomainNames() throws IdentityStoreException { return identityStore.getDomainNames(); } @Override public void setUserState(String uniqueUserId, String targetState) throws IdentityStoreException, UserNotFoundException { identityStore.setUserState(uniqueUserId, targetState); } private User doGetUser(String uniqueUserId, String domainName) throws IdentityStoreException, UserNotFoundException { Cache<String, User> userCache = cacheManager.getCache(UNIQUE_USER_CACHE, String.class, User.class); User user = userCache.get(uniqueUserId.hashCode() + ":" + domainName.hashCode()); if (user == null) { user = identityStore.getUser(uniqueUserId); userCache.put(user.getUniqueUserId().hashCode() + ":" + user.getDomainName().hashCode(), user); user.setIdentityStore(this); return user; } user.setIdentityStore(this); return user; } private Group doGetGroup(String uniqueGroupId, String domainName) throws IdentityStoreException, GroupNotFoundException { Cache<String, Group> groupCache = cacheManager.getCache(UNIQUE_GROUP_CACHE, String.class, Group.class); Group group = groupCache.get(uniqueGroupId.hashCode() + ":" + domainName.hashCode()); if (group == null) { group = identityStore.getGroup(uniqueGroupId); groupCache.put(group.getUniqueGroupId().hashCode() + ":" + group.getDomainName().hashCode(), group); group.setIdentityStore(this); return group; } group.setIdentityStore(this); return group; } private void doDeleteUser(String uniqueUserId, String domainName) { Cache<String, User> userCache = cacheManager.getCache(UNIQUE_USER_CACHE, String.class, User.class); userCache.remove(uniqueUserId.hashCode() + ":" + domainName.hashCode()); } private void doDeleteGroup(String uniqueGroupId, String domainName) { Cache<String, Group> groupCache = cacheManager.getCache(UNIQUE_GROUP_CACHE, String.class, Group.class); groupCache.remove(uniqueGroupId.hashCode() + ":" + domainName.hashCode()); } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2014.03.16 at 01:25:33 AM IST // package org.akomantoso.schema.v3.csd08; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAnyAttribute; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.CollapsedStringAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; import javax.xml.namespace.QName; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence maxOccurs="unbounded" minOccurs="0"> * &lt;element ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD08}componentData"/> * &lt;/sequence> * &lt;attGroup ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD08}link"/> * &lt;attGroup ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD08}show"/> * &lt;attGroup ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD08}name"/> * &lt;attGroup ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD08}idreq"/> * &lt;attGroup ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD08}core"/> * &lt;anyAttribute processContents='lax' namespace='##other'/> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "componentData" }) @XmlRootElement(name = "componentData") public class ComponentData { protected List<ComponentData> componentData; @XmlAttribute(name = "href", required = true) @XmlSchemaType(name = "anyURI") protected String href; @XmlAttribute(name = "showAs", required = true) protected String showAs; @XmlAttribute(name = "shortForm") protected String shortForm; @XmlAttribute(name = "name", required = true) protected String name; @XmlAttribute(name = "currentId", required = true) @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlSchemaType(name = "NMTOKEN") protected String currentId; @XmlAttribute(name = "originalId") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlSchemaType(name = "NMTOKEN") protected String originalId; @XmlAttribute(name = "GUID") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlSchemaType(name = "NMTOKEN") protected String guid; @XmlAnyAttribute private Map<QName, String> otherAttributes = new HashMap<QName, String>(); /** * Gets the value of the componentData property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the componentData property. * * <p> * For example, to add a new item, do as follows: * <pre> * getComponentData().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link ComponentData } * * */ public List<ComponentData> getComponentData() { if (componentData == null) { componentData = new ArrayList<ComponentData>(); } return this.componentData; } /** * Gets the value of the href property. * * @return * possible object is * {@link String } * */ public String getHref() { return href; } /** * Sets the value of the href property. * * @param value * allowed object is * {@link String } * */ public void setHref(String value) { this.href = value; } /** * Gets the value of the showAs property. * * @return * possible object is * {@link String } * */ public String getShowAs() { return showAs; } /** * Sets the value of the showAs property. * * @param value * allowed object is * {@link String } * */ public void setShowAs(String value) { this.showAs = value; } /** * Gets the value of the shortForm property. * * @return * possible object is * {@link String } * */ public String getShortForm() { return shortForm; } /** * Sets the value of the shortForm property. * * @param value * allowed object is * {@link String } * */ public void setShortForm(String value) { this.shortForm = value; } /** * Gets the value of the name property. * * @return * possible object is * {@link String } * */ public String getName() { return name; } /** * Sets the value of the name property. * * @param value * allowed object is * {@link String } * */ public void setName(String value) { this.name = value; } /** * Gets the value of the currentId property. * * @return * possible object is * {@link String } * */ public String getCurrentId() { return currentId; } /** * Sets the value of the currentId property. * * @param value * allowed object is * {@link String } * */ public void setCurrentId(String value) { this.currentId = value; } /** * Gets the value of the originalId property. * * @return * possible object is * {@link String } * */ public String getOriginalId() { return originalId; } /** * Sets the value of the originalId property. * * @param value * allowed object is * {@link String } * */ public void setOriginalId(String value) { this.originalId = value; } /** * Gets the value of the guid property. * * @return * possible object is * {@link String } * */ public String getGUID() { return guid; } /** * Sets the value of the guid property. * * @param value * allowed object is * {@link String } * */ public void setGUID(String value) { this.guid = value; } /** * Gets a map that contains attributes that aren't bound to any typed property on this class. * * <p> * the map is keyed by the name of the attribute and * the value is the string value of the attribute. * * the map returned by this method is live, and you can add new attribute * by updating the map directly. Because of this design, there's no setter. * * * @return * always non-null */ public Map<QName, String> getOtherAttributes() { return otherAttributes; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.qpid.jms.provider.amqp.message; import static org.apache.qpid.jms.provider.amqp.message.AmqpMessageSupport.JMS_AMQP_REPLY_TO_GROUP_ID; import static org.apache.qpid.jms.provider.amqp.message.AmqpMessageSupport.JMS_AMQP_TTL; import static org.apache.qpid.jms.provider.amqp.message.AmqpMessageSupport.JMS_AMQP_TYPED_ENCODING; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import javax.jms.JMSException; import javax.jms.MessageFormatException; import org.apache.qpid.jms.util.TypeConversionSupport; /** * Utility class used to intercept calls to Message property sets and gets and map the * correct AMQP fields to the property name being accessed. */ public class AmqpJmsMessagePropertyIntercepter { private static final Map<String, PropertyIntercepter> PROPERTY_INTERCEPTERS = new HashMap<String, PropertyIntercepter>(); /** * Interface for a Property intercepter object used to write JMS style * properties that are part of the JMS Message object members or perform * some needed conversion action before some named property is read or * written. If a property is not writable then the intercepter should * throw an JMSException to indicate the error. */ interface PropertyIntercepter { /** * Called when the names property is queried from an JMS Message object. * * @param message * The message being acted upon. * * @return the correct property value from the given Message. * * @throws JMSException if an error occurs while accessing the property */ Object getProperty(AmqpJmsMessageFacade message) throws JMSException; /** * Called when the names property is assigned from an JMS Message object. * * @param message * The message instance being acted upon. * @param value * The value to assign to the intercepted property. * * @throws JMSException if an error occurs writing the property. */ void setProperty(AmqpJmsMessageFacade message, Object value) throws JMSException; /** * Indicates if the intercepted property has a value currently assigned. * * @param message * The message instance being acted upon. * * @return true if the intercepted property has a value assigned to it. */ boolean propertyExists(AmqpJmsMessageFacade message); /** * Request that the intercepted property be cleared. For properties that * cannot be cleared the value should be set to the default value for that * property. * * @param message * the target message object whose property should be cleared. * * @throws JMSException if an error occurs clearing the property. */ void clearProperty(AmqpJmsMessageFacade message) throws JMSException; } static { PROPERTY_INTERCEPTERS.put(JMS_AMQP_TTL, new PropertyIntercepter() { @Override public Object getProperty(AmqpJmsMessageFacade message) throws JMSException { if (message.hasAmqpTimeToLiveOverride()) { return message.getAmqpTimeToLiveOverride(); } return null; } @Override public void setProperty(AmqpJmsMessageFacade message, Object value) throws JMSException { Long rc = (Long) TypeConversionSupport.convert(value, Long.class); if (rc == null) { throw new JMSException("Property " + JMS_AMQP_TTL + " cannot be set from a " + value.getClass().getName() + "."); } message.setAmqpTimeToLiveOverride(rc); } @Override public boolean propertyExists(AmqpJmsMessageFacade message) { return message.hasAmqpTimeToLiveOverride(); } @Override public void clearProperty(AmqpJmsMessageFacade message) throws JMSException { message.setAmqpTimeToLiveOverride(null); } }); PROPERTY_INTERCEPTERS.put(JMS_AMQP_REPLY_TO_GROUP_ID, new PropertyIntercepter() { @Override public Object getProperty(AmqpJmsMessageFacade message) throws JMSException { return message.getReplyToGroupId(); } @Override public void setProperty(AmqpJmsMessageFacade message, Object value) throws JMSException { String rc = (String) TypeConversionSupport.convert(value, String.class); if (rc == null) { throw new JMSException("Property " + JMS_AMQP_REPLY_TO_GROUP_ID + " cannot be set from a " + value.getClass().getName() + "."); } message.setReplyToGroupId(rc); } @Override public boolean propertyExists(AmqpJmsMessageFacade message) { String replyToGroupId = message.getReplyToGroupId(); return replyToGroupId != null && !replyToGroupId.equals(""); } @Override public void clearProperty(AmqpJmsMessageFacade message) throws JMSException { message.setReplyToGroupId(null); } }); PROPERTY_INTERCEPTERS.put(JMS_AMQP_TYPED_ENCODING, new PropertyIntercepter() { @Override public Object getProperty(AmqpJmsMessageFacade message) throws JMSException { if (message instanceof AmqpJmsObjectMessageFacade) { return ((AmqpJmsObjectMessageFacade) message).isAmqpTypedEncoding(); } return null; } @Override public void setProperty(AmqpJmsMessageFacade message, Object value) throws JMSException { Boolean rc = (Boolean) TypeConversionSupport.convert(value, Boolean.class); if (rc == null) { throw new JMSException("Property " + JMS_AMQP_TYPED_ENCODING + " cannot be set from a " + value.getClass().getName() + "."); } if (message instanceof AmqpJmsObjectMessageFacade) { ((AmqpJmsObjectMessageFacade) message).setUseAmqpTypedEncoding(rc); } else { throw new MessageFormatException(JMS_AMQP_TYPED_ENCODING + " is only applicable to ObjectMessage"); } } @Override public boolean propertyExists(AmqpJmsMessageFacade message) { if (message instanceof AmqpJmsObjectMessageFacade) { return ((AmqpJmsObjectMessageFacade) message).isAmqpTypedEncoding(); } return false; } @Override public void clearProperty(AmqpJmsMessageFacade message) throws JMSException { // TODO - Should we leave encoding intact or change to the default. } }); } /** * Static get method that takes a property name and gets the value either via * a registered property get object or through the AmqpJmsMessageFacade getProperty * method. * * @param message * the AmqpJmsMessageFacade instance to read from * @param name * the property name that is being requested. * * @return the correct value either mapped to an attribute of a Message or a message property. * * @throws JMSException if an error occurs while reading the defined property. */ public static Object getProperty(AmqpJmsMessageFacade message, String name) throws JMSException { Object value = null; PropertyIntercepter propertyExpression = PROPERTY_INTERCEPTERS.get(name); if (propertyExpression != null) { value = propertyExpression.getProperty(message); } else { value = message.getApplicationProperty(name); } return value; } /** * Static set method that takes a property name and sets the value either via * a registered property set object or through the AmqpJmsMessageFacade setProperty * method. * * @param message * the AmqpJmsMessageFacade instance to write to. * @param name * the property name that is being written. * @param value * the new value to assign for the named property. * * @throws JMSException if an error occurs while writing the defined property. */ public static void setProperty(AmqpJmsMessageFacade message, String name, Object value) throws JMSException { PropertyIntercepter propertyExpression = PROPERTY_INTERCEPTERS.get(name); if (propertyExpression != null) { propertyExpression.setProperty(message, value); } else { message.setApplicationProperty(name, value); } } /** * Static query method to determine if a specific property exists in the given message. * * @param message * the AmqpJmsMessageFacade instance to write to. * @param name * the property name that is being checked. * * @return true if the message contains the given property. * * @throws JMSException if an error occurs while inspecting the defined property. */ public static boolean propertyExists(AmqpJmsMessageFacade message, String name) throws JMSException { PropertyIntercepter propertyExpression = PROPERTY_INTERCEPTERS.get(name); if (propertyExpression != null) { return propertyExpression.propertyExists(message); } else { return message.applicationPropertyExists(name); } } /** * For each of the currently configured message property intercepter instance a * string key value is inserted into an Set and returned. * * @return a {@code Set<String>} containing the names of all intercepted properties. */ public static Set<String> getAllPropertyNames() { return PROPERTY_INTERCEPTERS.keySet(); } /** * For each of the currently configured message property intercepter instance a * string key value is inserted into an Set and returned if the property has a * value and is available for a read operation. The Set returned may be * manipulated by the receiver without impacting the facade, and an empty set * will be returned if there are no matching properties. * * @param message * The message being enumerated. * * @return a {@code Set<String>} containing the names of all intercepted properties with a value. */ public static Set<String> getPropertyNames(AmqpJmsMessageFacade message) { Set<String> names = new HashSet<String>(); for (Entry<String, PropertyIntercepter> entry : PROPERTY_INTERCEPTERS.entrySet()) { if (entry.getValue().propertyExists(message)) { names.add(entry.getKey()); } } return message.getApplicationPropertyNames(names); } /** * For each of the currently configured message property intercepter instances clear or * reset the value to its default. * * @param message * the AmqpJmsMessageFacade instance to read from * * @throws JMSException if an error occurs while validating the defined property. */ public static void clearProperties(AmqpJmsMessageFacade message) throws JMSException { for (Entry<String, PropertyIntercepter> entry : PROPERTY_INTERCEPTERS.entrySet()) { entry.getValue().clearProperty(message); } message.clearAllApplicationProperties(); } }
package org.apache.hadoop.tools.posum.simulation.predictor.detailed; import org.apache.commons.lang.ObjectUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.tools.posum.common.records.dataentity.JobProfile; import org.apache.hadoop.tools.posum.common.records.dataentity.TaskProfile; import org.apache.hadoop.tools.posum.common.util.PosumException; import org.apache.hadoop.tools.posum.simulation.predictor.TaskPredictionInput; import org.apache.hadoop.tools.posum.simulation.predictor.TaskPredictionOutput; import org.apache.hadoop.tools.posum.simulation.predictor.simple.SimpleRateBasedPredictor; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.apache.hadoop.tools.posum.common.util.GeneralUtils.orZero; import static org.apache.hadoop.tools.posum.common.util.cluster.ClusterUtils.getDoubleField; import static org.apache.hadoop.tools.posum.common.util.cluster.ClusterUtils.getDuration; import static org.apache.hadoop.tools.posum.common.util.cluster.ClusterUtils.getIntField; import static org.apache.hadoop.tools.posum.common.util.cluster.ClusterUtils.getLongField; import static org.apache.hadoop.tools.posum.simulation.predictor.detailed.FlexKeys.MAP_FINISH; import static org.apache.hadoop.tools.posum.simulation.predictor.detailed.FlexKeys.MAP_GENERAL; import static org.apache.hadoop.tools.posum.simulation.predictor.detailed.FlexKeys.MAP_LOCAL; import static org.apache.hadoop.tools.posum.simulation.predictor.detailed.FlexKeys.MAP_REMOTE; import static org.apache.hadoop.tools.posum.simulation.predictor.detailed.FlexKeys.MAP_SELECTIVITY; import static org.apache.hadoop.tools.posum.simulation.predictor.detailed.FlexKeys.MERGE; import static org.apache.hadoop.tools.posum.simulation.predictor.detailed.FlexKeys.PROFILED_MAPS; import static org.apache.hadoop.tools.posum.simulation.predictor.detailed.FlexKeys.PROFILED_REDUCES; import static org.apache.hadoop.tools.posum.simulation.predictor.detailed.FlexKeys.REDUCE; import static org.apache.hadoop.tools.posum.simulation.predictor.detailed.FlexKeys.SHUFFLE_FIRST; import static org.apache.hadoop.tools.posum.simulation.predictor.detailed.FlexKeys.SHUFFLE_TYPICAL; public class DetailedPredictor extends SimpleRateBasedPredictor<DetailedPredictionModel> { private static final Log logger = LogFactory.getLog(DetailedPredictor.class); public DetailedPredictor(Configuration conf) { super(conf); } @Override protected DetailedPredictionModel initializeModel() { return new DetailedPredictionModel(historyBuffer); } @Override protected Map<String, String> getPredictionProfileUpdates(JobProfile job, boolean fromHistory) { Map<String, String> fieldMap = new HashMap<>(FlexKeys.values().length); long mapFinish = 0L; // keeps track of the finish time of the last map task double mapRate = 0.0, mapRemoteRate = 0.0, mapLocalRate = 0.0, shuffleTypicalRate = 0.0, mergeRate = 0.0, reduceRate = 0.0; int totalMaps = 0, mapRemoteNo = 0, mapLocalNo = 0, typicalShuffleNo = 0, firstShuffleNo = 0, reduceNo = 0; long shuffleFirstTime = 0L; List<TaskProfile> tasks = null; if (!getIntField(job, PROFILED_MAPS.getKey(), 0).equals(job.getCompletedMaps())) { // nothing will work if we don't have input size info if (job.getTotalSplitSize() != null) { Long parsedInputBytes = 0L; tasks = getJobTasks(job.getId(), fromHistory); if (tasks == null) throw new PosumException("Tasks not found or finished for job " + job.getId()); for (TaskProfile task : tasks) { if (!task.isFinished() || !task.getType().equals(TaskType.MAP)) continue; totalMaps++; // this is a finished map task; calculate general, local and remote processing rates Long taskInput = getSplitSize(task, job); parsedInputBytes += taskInput; if (mapFinish < task.getFinishTime()) mapFinish = task.getFinishTime(); // restrict to a minimum of 1 byte per task to avoid multiplication or division by zero Double newRate = 1.0 * taskInput / getDuration(task); mapRate += newRate; if (Boolean.TRUE.equals(task.isLocal())) { // because protos are unpredictable mapLocalRate += newRate; mapLocalNo++; } else { mapRemoteRate += newRate; mapRemoteNo++; } } if (totalMaps != 0) { fieldMap.put(MAP_GENERAL.getKey(), Double.toString(mapRate / totalMaps)); if (mapLocalNo != 0 && mapLocalRate != 0) { fieldMap.put(MAP_LOCAL.getKey(), Double.toString(mapLocalRate / mapLocalNo)); } if (mapRemoteNo != 0 && mapRemoteRate != 0) { fieldMap.put(MAP_REMOTE.getKey(), Double.toString(mapRemoteRate / mapRemoteNo)); } if (job.getMapOutputBytes() != null) { fieldMap.put(MAP_SELECTIVITY.getKey(), // restrict to a minimum of 1 byte per task to avoid multiplication or division by zero Double.toString(1.0 * orZero(job.getMapOutputBytes()) / parsedInputBytes)); } if (totalMaps == job.getTotalMapTasks()) { // all map tasks were parsed fieldMap.put(MAP_FINISH.getKey(), Long.toString(mapFinish)); } } fieldMap.put(PROFILED_MAPS.getKey(), Integer.toString(totalMaps)); } } if (!getIntField(job, PROFILED_REDUCES.getKey(), 0).equals(job.getCompletedReduces())) { if (tasks == null) tasks = getJobTasks(job.getId(), fromHistory); if (tasks == null) throw new PosumException("Tasks not found or finished for job " + job.getId()); if (mapFinish >= Long.MAX_VALUE) { // mapFinish might not have been initialized mapFinish = getLongField(job, MAP_FINISH.getKey(), Long.MAX_VALUE); } for (TaskProfile task : tasks) { if (!task.isFinished() || !task.getType().equals(TaskType.REDUCE) || task.getInputBytes() == null) continue; reduceNo++; // this is a finished reduce task; split stats into shuffle, merge and reduce Long taskInputBytes = Math.max(orZero(task.getInputBytes()), 1); if (orZero(task.getReduceTime()) > 0) reduceRate += 1.0 * taskInputBytes / task.getReduceTime(); if (orZero(task.getMergeTime()) > 0) mergeRate += 1.0 * taskInputBytes / task.getMergeTime(); if (task.getStartTime() >= mapFinish) { // the task was not in the first reduce wave; store shuffle time under typical shuffleTypicalRate += 1.0 * taskInputBytes / task.getShuffleTime(); typicalShuffleNo++; } else { shuffleFirstTime += task.getStartTime() + orZero(task.getShuffleTime()) - mapFinish; firstShuffleNo++; } } if (reduceNo > 0) { fieldMap.put(REDUCE.getKey(), Double.toString(reduceRate / reduceNo)); fieldMap.put(MERGE.getKey(), Double.toString(mergeRate / reduceNo)); if (shuffleFirstTime != 0) { fieldMap.put(SHUFFLE_FIRST.getKey(), Long.toString(shuffleFirstTime / firstShuffleNo)); } if (shuffleTypicalRate != 0) { fieldMap.put(SHUFFLE_TYPICAL.getKey(), Double.toString(shuffleTypicalRate / typicalShuffleNo)); } fieldMap.put(PROFILED_REDUCES.getKey(), Integer.toString(reduceNo)); } } return fieldMap; } @Override protected TaskPredictionOutput predictMapTaskBehavior(TaskPredictionInput input) { JobProfile job = input.getJob(); DetailedMapPredictionStats jobStats = new DetailedMapPredictionStats(1, 0); jobStats.addSource(job); Boolean local = null; if (input.getNodeAddress() != null) local = input.getTask().getSplitLocations().contains(input.getNodeAddress()); else { if (input instanceof DetailedTaskPredictionInput) local = ((DetailedTaskPredictionInput) input).getLocal(); } Double rate = local == null ? jobStats.getAvgRate() : local ? jobStats.getAvgLocalRate() : jobStats.getAvgRemoteRate(); if (rate == null) { // we don't know the rate of that type // get the appropriate average map processing rate from history DetailedMapPredictionStats mapStats = model.getRelevantMapStats(job); if (mapStats == null) return handleNoMapInfo(job); if (mapStats.getRelevance() > 1 && job.getAvgMapDuration() != null) // if history is not relevant and we have the current average duration, return it return new TaskPredictionOutput(job.getAvgMapDuration()); rate = local == null ? mapStats.getAvgRate() : local ? mapStats.getAvgLocalRate() : mapStats.getAvgRemoteRate(); if (rate == null) return handleNoMapInfo(job); } // multiply by how much input each task has Long splitSize = getSplitSize(input.getTask(), job); if (splitSize == null) return handleNoMapInfo(job); Double duration = splitSize / rate; logger.trace("Map duration for " + job.getId() + " should be " + splitSize + " / " + rate + "=" + duration); return new TaskPredictionOutput(duration.longValue()); } @Override protected TaskPredictionOutput handleNoMapInfo(JobProfile job) { if (job.getAvgMapDuration() != null) // if we do have at least the current average duration, return that, regardless of location return new TaskPredictionOutput(job.getAvgMapDuration()); return super.handleNoMapInfo(job); } @Override protected TaskPredictionOutput predictReduceTaskBehavior(TaskPredictionInput input) { JobProfile job = input.getJob(); Double avgSelectivity = getMapTaskSelectivity( job, model.getRelevantMapStats(job), MAP_SELECTIVITY.getKey() ); DetailedReducePredictionStats jobStats = new DetailedReducePredictionStats(1, 0); jobStats.addSource(job); if (jobStats.isIncomplete()) { // we are missing information; get averages from history to compensate DetailedReducePredictionStats reduceStats = model.getRelevantReduceStats(job); if (reduceStats != null) { jobStats.completeFrom(reduceStats); } } // try a detailed prediction using phase-specific statistics TaskPredictionOutput duration = predictReduceByPhases(job, avgSelectivity, jobStats); if (duration != null) // prediction was possible return duration; if (jobStats.getAvgReduceDuration() == null) // we have no current or historical reduce information, not even average duration return handleNoReduceInfo(job, avgSelectivity, getDoubleField(job, MAP_GENERAL.getKey(), null)); // we are still missing information // just return average reduce duration of the current job or historical jobs logger.trace("Reduce duration calculated as simple average for " + job.getId() + " = " + jobStats.getAvgReduceDuration()); return new TaskPredictionOutput(jobStats.getAvgReduceDuration().longValue()); } private TaskPredictionOutput predictReduceByPhases(JobProfile job, Double avgSelectivity, DetailedReducePredictionStats jobStats) { if (avgSelectivity == null) return null; // calculate how much input the task should have based on how much is left and how many reduces remain // restrict to a minimum of 1 byte per task to avoid multiplication or division by zero Double inputLeft = orZero(job.getTotalSplitSize()) * avgSelectivity - orZero(job.getReduceInputBytes()); Double inputPerTask = Math.max(inputLeft / (job.getTotalReduceTasks() - job.getCompletedReduces()), 1); Long shuffleTime = predictShuffleTime(jobStats, ObjectUtils.equals(job.getCompletedMaps(), job.getTotalMapTasks()), inputPerTask); if (shuffleTime == null) return null; Double duration = shuffleTime + inputPerTask / jobStats.getAvgMergeRate() + inputPerTask / jobStats.getAvgReduceRate(); logger.trace("Reduce duration for " + job.getId() + " should be " + shuffleTime + " + " + inputPerTask + " / " + jobStats.getAvgMergeRate() + " + " + inputPerTask + " / " + jobStats.getAvgReduceRate() + "=" + duration); return new TaskPredictionOutput(duration.longValue()); } private Long predictShuffleTime(DetailedReducePredictionStats jobStats, boolean isFirstShuffle, Double inputPerTask) { if (isFirstShuffle && jobStats.getAvgShuffleFirstTime() != null) return jobStats.getAvgShuffleFirstTime().longValue(); if (jobStats.getAvgShuffleTypicalRate() == null) return null; return Double.valueOf(inputPerTask / jobStats.getAvgShuffleTypicalRate()).longValue(); } }
package com.codahale.metrics; import org.junit.Before; import org.junit.Test; import java.util.HashMap; import java.util.Map; import static com.codahale.metrics.MetricRegistry.name; import static org.fest.assertions.api.Assertions.assertThat; import static org.fest.assertions.data.MapEntry.entry; import static org.mockito.Mockito.*; public class MetricRegistryTest { private final MetricRegistryListener listener = mock(MetricRegistryListener.class); private final MetricRegistry registry = new MetricRegistry(); @SuppressWarnings("unchecked") private final Gauge<String> gauge = mock(Gauge.class); private final Counter counter = mock(Counter.class); private final Histogram histogram = mock(Histogram.class); private final Meter meter = mock(Meter.class); private final Timer timer = mock(Timer.class); @Before public void setUp() throws Exception { registry.addListener(listener); } @Test public void registeringAGaugeTriggersANotification() throws Exception { assertThat(registry.register("thing", gauge)) .isEqualTo(gauge); verify(listener).onGaugeAdded("thing", gauge); } @Test public void removingAGaugeTriggersANotification() throws Exception { registry.register("thing", gauge); assertThat(registry.remove("thing")) .isTrue(); verify(listener).onGaugeRemoved("thing"); } @Test public void registeringACounterTriggersANotification() throws Exception { assertThat(registry.register("thing", counter)) .isEqualTo(counter); verify(listener).onCounterAdded("thing", counter); } @Test public void accessingACounterRegistersAndReusesTheCounter() throws Exception { final Counter counter1 = registry.counter("thing"); final Counter counter2 = registry.counter("thing"); assertThat(counter1) .isSameAs(counter2); verify(listener).onCounterAdded("thing", counter1); } @Test public void removingACounterTriggersANotification() throws Exception { registry.register("thing", counter); assertThat(registry.remove("thing")) .isTrue(); verify(listener).onCounterRemoved("thing"); } @Test public void registeringAHistogramTriggersANotification() throws Exception { assertThat(registry.register("thing", histogram)) .isEqualTo(histogram); verify(listener).onHistogramAdded("thing", histogram); } @Test public void accessingAHistogramRegistersAndReusesIt() throws Exception { final Histogram histogram1 = registry.histogram("thing"); final Histogram histogram2 = registry.histogram("thing"); assertThat(histogram1) .isSameAs(histogram2); verify(listener).onHistogramAdded("thing", histogram1); } @Test public void removingAHistogramTriggersANotification() throws Exception { registry.register("thing", histogram); assertThat(registry.remove("thing")) .isTrue(); verify(listener).onHistogramRemoved("thing"); } @Test public void registeringAMeterTriggersANotification() throws Exception { assertThat(registry.register("thing", meter)) .isEqualTo(meter); verify(listener).onMeterAdded("thing", meter); } @Test public void accessingAMeterRegistersAndReusesIt() throws Exception { final Meter meter1 = registry.meter("thing"); final Meter meter2 = registry.meter("thing"); assertThat(meter1) .isSameAs(meter2); verify(listener).onMeterAdded("thing", meter1); } @Test public void removingAMeterTriggersANotification() throws Exception { registry.register("thing", meter); assertThat(registry.remove("thing")) .isTrue(); verify(listener).onMeterRemoved("thing"); } @Test public void registeringATimerTriggersANotification() throws Exception { assertThat(registry.register("thing", timer)) .isEqualTo(timer); verify(listener).onTimerAdded("thing", timer); } @Test public void accessingATimerRegistersAndReusesIt() throws Exception { final Timer timer1 = registry.timer("thing"); final Timer timer2 = registry.timer("thing"); assertThat(timer1) .isSameAs(timer2); verify(listener).onTimerAdded("thing", timer1); } @Test public void removingATimerTriggersANotification() throws Exception { registry.register("thing", timer); assertThat(registry.remove("thing")) .isTrue(); verify(listener).onTimerRemoved("thing"); } @Test public void addingAListenerWithExistingMetricsCatchesItUp() throws Exception { registry.register("gauge", gauge); registry.register("counter", counter); registry.register("histogram", histogram); registry.register("meter", meter); registry.register("timer", timer); final MetricRegistryListener other = mock(MetricRegistryListener.class); registry.addListener(other); verify(other).onGaugeAdded("gauge", gauge); verify(other).onCounterAdded("counter", counter); verify(other).onHistogramAdded("histogram", histogram); verify(other).onMeterAdded("meter", meter); verify(other).onTimerAdded("timer", timer); } @Test public void aRemovedListenerDoesNotReceiveUpdates() throws Exception { registry.register("gauge", gauge); registry.removeListener(listener); registry.register("gauge2", gauge); verify(listener, never()).onGaugeAdded("gauge2", gauge); } @Test public void hasAMapOfRegisteredGauges() throws Exception { registry.register("gauge", gauge); assertThat(registry.getGauges()) .contains(entry("gauge", gauge)); } @Test public void hasAMapOfRegisteredCounters() throws Exception { registry.register("counter", counter); assertThat(registry.getCounters()) .contains(entry("counter", counter)); } @Test public void hasAMapOfRegisteredHistograms() throws Exception { registry.register("histogram", histogram); assertThat(registry.getHistograms()) .contains(entry("histogram", histogram)); } @Test public void hasAMapOfRegisteredMeters() throws Exception { registry.register("meter", meter); assertThat(registry.getMeters()) .contains(entry("meter", meter)); } @Test public void hasAMapOfRegisteredTimers() throws Exception { registry.register("timer", timer); assertThat(registry.getTimers()) .contains(entry("timer", timer)); } @Test public void hasASetOfRegisteredMetricNames() throws Exception { registry.register("gauge", gauge); registry.register("counter", counter); registry.register("histogram", histogram); registry.register("meter", meter); registry.register("timer", timer); assertThat(registry.getNames()) .containsOnly("gauge", "counter", "histogram", "meter", "timer"); } @Test public void registersMultipleMetrics() throws Exception { final MetricSet metrics = new MetricSet() { @Override public Map<String, Metric> getMetrics() { final Map<String, Metric> metrics = new HashMap<String, Metric>(); metrics.put("gauge", gauge); metrics.put("counter", counter); return metrics; } }; registry.registerAll(metrics); assertThat(registry.getNames()) .containsOnly("gauge", "counter"); } @Test public void registersMultipleMetricsWithAPrefix() throws Exception { final MetricSet metrics = new MetricSet() { @Override public Map<String, Metric> getMetrics() { final Map<String, Metric> metrics = new HashMap<String, Metric>(); metrics.put("gauge", gauge); metrics.put("counter", counter); return metrics; } }; registry.register("my", metrics); assertThat(registry.getNames()) .containsOnly("my.gauge", "my.counter"); } @Test public void registersRecursiveMetricSets() throws Exception { final MetricSet inner = new MetricSet() { @Override public Map<String, Metric> getMetrics() { final Map<String, Metric> metrics = new HashMap<String, Metric>(); metrics.put("gauge", gauge); return metrics; } }; final MetricSet outer = new MetricSet() { @Override public Map<String, Metric> getMetrics() { final Map<String, Metric> metrics = new HashMap<String, Metric>(); metrics.put("inner", inner); metrics.put("counter", counter); return metrics; } }; registry.register("my", outer); assertThat(registry.getNames()) .containsOnly("my.inner.gauge", "my.counter"); } @Test public void registersMetricsFromAnotherRegistry() throws Exception { MetricRegistry other = new MetricRegistry(); other.register("gauge", gauge); registry.register("nested", other); assertThat(registry.getNames()).containsOnly("nested.gauge"); } @Test public void concatenatesStringsToFormADottedName() throws Exception { assertThat(name("one", "two", "three")) .isEqualTo("one.two.three"); } @Test public void elidesNullValuesFromNames() throws Exception { assertThat(name("one", null, "three")) .isEqualTo("one.three"); } @Test public void elidesEmptyStringsFromNames() throws Exception { assertThat(name("one", "", "three")) .isEqualTo("one.three"); } @Test public void concatenatesClassNamesWithStringsToFormADottedName() throws Exception { assertThat(name(MetricRegistryTest.class, "one", "two")) .isEqualTo("com.codahale.metrics.MetricRegistryTest.one.two"); } @Test public void concatenatesClassesWithoutCanonicalNamesWithStrings() throws Exception { final Gauge<String> g = new Gauge<String>() { @Override public String getValue() { return null; } }; assertThat(name(g.getClass(), "one", "two")) .isEqualTo("com.codahale.metrics.MetricRegistryTest$5.one.two"); } @Test public void removesMetricsMatchingAFilter() throws Exception { registry.timer("timer-1"); registry.timer("timer-2"); registry.histogram("histogram-1"); assertThat(registry.getNames()) .contains("timer-1", "timer-2", "histogram-1"); registry.removeMatching(new MetricFilter() { @Override public boolean matches(String name, Metric metric) { return name.endsWith("1"); } }); assertThat(registry.getNames()) .doesNotContain("timer-1", "histogram-1"); assertThat(registry.getNames()) .contains("timer-2"); verify(listener).onTimerRemoved("timer-1"); verify(listener).onHistogramRemoved("histogram-1"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.distributed; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.atomic.AtomicInteger; import javax.cache.processor.EntryProcessor; import javax.cache.processor.MutableEntry; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteTransactions; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.processors.cache.GridCacheAbstractSelfTest; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.transactions.Transaction; import org.apache.ignite.transactions.TransactionConcurrency; import org.apache.ignite.transactions.TransactionIsolation; import org.jetbrains.annotations.Nullable; import static org.apache.ignite.cache.CacheRebalanceMode.ASYNC; import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC; import static org.apache.ignite.transactions.TransactionConcurrency.OPTIMISTIC; import static org.apache.ignite.transactions.TransactionConcurrency.PESSIMISTIC; /** * Tests transaction during cache preloading. */ public abstract class IgniteTxPreloadAbstractTest extends GridCacheAbstractSelfTest { /** */ private static final int GRID_CNT = 6; /** */ private static volatile boolean keyNotLoaded; /** {@inheritDoc} */ @Override protected void beforeTest() throws Exception { keyNotLoaded = false; startGrid(0); } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { stopAllGrids(); } /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { } /** {@inheritDoc} */ @Override protected int gridCount() { return GRID_CNT; } /** * @throws Exception If failed. */ public void testRemoteTxPreloading() throws Exception { IgniteCache<String, Integer> cache = jcache(0); for (int i = 0; i < 10000; i++) cache.put(String.valueOf(i), 0); final AtomicInteger gridIdx = new AtomicInteger(1); IgniteInternalFuture<?> fut = GridTestUtils.runMultiThreadedAsync( new Callable<Object>() { @Nullable @Override public Object call() throws Exception { int idx = gridIdx.getAndIncrement(); startGrid(idx); return null; } }, GRID_CNT - 1, "grid-starter-" + getName() ); waitForRemoteNodes(grid(0), 2); Set<String> keys = new HashSet<>(); for (int i = 0; i < 10; i++) keys.add(String.valueOf(i * 1000)); cache.invokeAll(keys, new EntryProcessor<String, Integer, Void>() { @Override public Void process(MutableEntry<String, Integer> e, Object... args) { Integer val = e.getValue(); if (val == null) { keyNotLoaded = true; e.setValue(1); return null; } e.setValue(val + 1); return null; } }); assertFalse(keyNotLoaded); fut.get(); for (int i = 0; i < GRID_CNT; i++) // Wait for preloader. jcache(i).rebalance().get(); for (int i = 0; i < GRID_CNT; i++) { for (String key : keys) assertEquals("Unexpected value for cache " + i, (Integer)1, jcache(i).get(key)); } } /** * @throws Exception If failed. */ public void testLocalTxPreloadingOptimistic() throws Exception { testLocalTxPreloading(OPTIMISTIC); } /** * @throws Exception If failed. */ public void testLocalTxPreloadingPessimistic() throws Exception { testLocalTxPreloading(PESSIMISTIC); } /** * Tries to execute transaction doing transform when target key is not yet preloaded. * * @param txConcurrency Transaction concurrency; * @throws Exception If failed. */ private void testLocalTxPreloading(TransactionConcurrency txConcurrency) throws Exception { Map<String, Integer> map = new HashMap<>(); for (int i = 0; i < 10000; i++) map.put(String.valueOf(i), 0); IgniteCache<String, Integer> cache0 = jcache(0); cache0.putAll(map); final String TX_KEY = "9000"; int expVal = 0; for (int i = 1; i < GRID_CNT; i++) { assertEquals((Integer)expVal, cache0.get(TX_KEY)); startGrid(i); IgniteCache<String, Integer> cache = jcache(i); IgniteTransactions txs = ignite(i).transactions(); try (Transaction tx = txs.txStart(txConcurrency, TransactionIsolation.READ_COMMITTED)) { cache.invoke(TX_KEY, new EntryProcessor<String, Integer, Void>() { @Override public Void process(MutableEntry<String, Integer> e, Object... args) { Integer val = e.getValue(); if (val == null) { keyNotLoaded = true; e.setValue(1); return null; } e.setValue(val + 1); return null; } }); tx.commit(); } assertFalse(keyNotLoaded); expVal++; assertEquals((Integer)expVal, cache.get(TX_KEY)); } for (int i = 0; i < GRID_CNT; i++) assertEquals("Unexpected value for cache " + i, (Integer)expVal, jcache(i).get(TX_KEY)); } /** {@inheritDoc} */ @Override protected CacheConfiguration cacheConfiguration(String gridName) throws Exception { CacheConfiguration cfg = super.cacheConfiguration(gridName); cfg.setRebalanceMode(ASYNC); cfg.setWriteSynchronizationMode(FULL_SYNC); cfg.setCacheStoreFactory(null); return cfg; } }
package package.name; import apg.Grammar; import java.io.PrintStream; /** This class has been generated automatically from an SABNF grammer by * Java APG, the {@link apg.Generator} class.<br> * It is an extension of the {@link apg.Grammar} * class containing additional members and enums not found * in the base class.<br> * The function {@link #getInstance()} will return a reference to a static, * singleton instance of the class. * <p>For more information visit <a href="http://www.coasttocoastresearch.com" target="_blank">http://www.coasttocoastresearch.com</a>. */ public class ECLparserBrief.java extends Grammar{ // public API /** Called to get a singleton instance of this class. * @return a singleton instance of this class, cast as the base class, Grammar. */ public static Grammar getInstance(){ if(factoryInstance == null){ factoryInstance = new ECLparserBrief.java(getRules(), getUdts(), getOpcodes()); } return factoryInstance; } // rule name enum /** The number of rules in the grammar */ public static int ruleCount = 66; /** This enum provides easy to remember enum constants for locating the rule identifiers and names. * The enum constants have the same spelling as the rule names rendered in all caps with underscores replacing hyphens. */ public enum RuleNames{ /** id = <code>17</code>, name = <code>"ancestorOf"</code> */ ANCESTOROF("ancestorOf", 17, 92, 1), /** id = <code>18</code>, name = <code>"ancestorOrSelfOf"</code> */ ANCESTORORSELFOF("ancestorOrSelfOf", 18, 93, 1), /** id = <code>60</code>, name = <code>"anyNonEscapedChar"</code> */ ANYNONESCAPEDCHAR("anyNonEscapedChar", 60, 329, 10), /** id = <code>31</code>, name = <code>"attribute"</code> */ ATTRIBUTE("attribute", 31, 197, 28), /** id = <code>30</code>, name = <code>"attributeGroup"</code> */ ATTRIBUTEGROUP("attributeGroup", 30, 187, 10), /** id = <code>37</code>, name = <code>"attributeName"</code> */ ATTRIBUTENAME("attributeName", 37, 239, 3), /** id = <code>36</code>, name = <code>"attributeOperator"</code> */ ATTRIBUTEOPERATOR("attributeOperator", 36, 236, 3), /** id = <code>26</code>, name = <code>"attributeSet"</code> */ ATTRIBUTESET("attributeSet", 26, 160, 7), /** id = <code>55</code>, name = <code>"BS"</code> */ BS("BS", 55, 319, 1), /** id = <code>32</code>, name = <code>"cardinality"</code> */ CARDINALITY("cardinality", 32, 225, 8), /** id = <code>3</code>, name = <code>"compoundExpressionConstraint"</code> */ COMPOUNDEXPRESSIONCONSTRAINT("compoundExpressionConstraint", 3, 19, 4), /** id = <code>11</code>, name = <code>"conceptId"</code> */ CONCEPTID("conceptId", 11, 74, 1), /** id = <code>10</code>, name = <code>"conceptReference"</code> */ CONCEPTREFERENCE("conceptReference", 10, 64, 10), /** id = <code>19</code>, name = <code>"conjunction"</code> */ CONJUNCTION("conjunction", 19, 94, 13), /** id = <code>27</code>, name = <code>"conjunctionAttributeSet"</code> */ CONJUNCTIONATTRIBUTESET("conjunctionAttributeSet", 27, 167, 6), /** id = <code>4</code>, name = <code>"conjunctionExpressionConstraint"</code> */ CONJUNCTIONEXPRESSIONCONSTRAINT("conjunctionExpressionConstraint", 4, 23, 8), /** id = <code>23</code>, name = <code>"conjunctionRefinementSet"</code> */ CONJUNCTIONREFINEMENTSET("conjunctionRefinementSet", 23, 139, 6), /** id = <code>14</code>, name = <code>"constraintOperator"</code> */ CONSTRAINTOPERATOR("constraintOperator", 14, 85, 5), /** id = <code>52</code>, name = <code>"CR"</code> */ CR("CR", 52, 316, 1), /** id = <code>45</code>, name = <code>"decimalValue"</code> */ DECIMALVALUE("decimalValue", 45, 287, 5), /** id = <code>15</code>, name = <code>"descendantOf"</code> */ DESCENDANTOF("descendantOf", 15, 90, 1), /** id = <code>16</code>, name = <code>"descendantOrSelfOf"</code> */ DESCENDANTORSELFOF("descendantOrSelfOf", 16, 91, 1), /** id = <code>56</code>, name = <code>"digit"</code> */ DIGIT("digit", 56, 320, 1), /** id = <code>58</code>, name = <code>"digitNonZero"</code> */ DIGITNONZERO("digitNonZero", 58, 322, 1), /** id = <code>20</code>, name = <code>"disjunction"</code> */ DISJUNCTION("disjunction", 20, 107, 8), /** id = <code>28</code>, name = <code>"disjunctionAttributeSet"</code> */ DISJUNCTIONATTRIBUTESET("disjunctionAttributeSet", 28, 173, 6), /** id = <code>5</code>, name = <code>"disjunctionExpressionConstraint"</code> */ DISJUNCTIONEXPRESSIONCONSTRAINT("disjunctionExpressionConstraint", 5, 31, 8), /** id = <code>24</code>, name = <code>"disjunctionRefinementSet"</code> */ DISJUNCTIONREFINEMENTSET("disjunctionRefinementSet", 24, 145, 6), /** id = <code>61</code>, name = <code>"escapedChar"</code> */ ESCAPEDCHAR("escapedChar", 61, 339, 7), /** id = <code>21</code>, name = <code>"exclusion"</code> */ EXCLUSION("exclusion", 21, 115, 17), /** id = <code>6</code>, name = <code>"exclusionExpressionConstraint"</code> */ EXCLUSIONEXPRESSIONCONSTRAINT("exclusionExpressionConstraint", 6, 39, 6), /** id = <code>39</code>, name = <code>"expressionComparisonOperator"</code> */ EXPRESSIONCOMPARISONOPERATOR("expressionComparisonOperator", 39, 252, 3), /** id = <code>0</code>, name = <code>"expressionConstraint"</code> */ EXPRESSIONCONSTRAINT("expressionConstraint", 0, 0, 7), /** id = <code>38</code>, name = <code>"expressionConstraintValue"</code> */ EXPRESSIONCONSTRAINTVALUE("expressionConstraintValue", 38, 242, 10), /** id = <code>8</code>, name = <code>"focusConcept"</code> */ FOCUSCONCEPT("focusConcept", 8, 55, 8), /** id = <code>51</code>, name = <code>"HTAB"</code> */ HTAB("HTAB", 51, 315, 1), /** id = <code>44</code>, name = <code>"integerValue"</code> */ INTEGERVALUE("integerValue", 44, 277, 10), /** id = <code>53</code>, name = <code>"LF"</code> */ LF("LF", 53, 317, 1), /** id = <code>34</code>, name = <code>"many"</code> */ MANY("many", 34, 234, 1), /** id = <code>9</code>, name = <code>"memberOf"</code> */ MEMBEROF("memberOf", 9, 63, 1), /** id = <code>49</code>, name = <code>"mws"</code> */ MWS("mws", 49, 308, 6), /** id = <code>46</code>, name = <code>"nonNegativeIntegerValue"</code> */ NONNEGATIVEINTEGERVALUE("nonNegativeIntegerValue", 46, 292, 6), /** id = <code>59</code>, name = <code>"nonwsNonPipe"</code> */ NONWSNONPIPE("nonwsNonPipe", 59, 323, 6), /** id = <code>40</code>, name = <code>"numericComparisonOperator"</code> */ NUMERICCOMPARISONOPERATOR("numericComparisonOperator", 40, 255, 7), /** id = <code>42</code>, name = <code>"numericValue"</code> */ NUMERICVALUE("numericValue", 42, 265, 5), /** id = <code>54</code>, name = <code>"QM"</code> */ QM("QM", 54, 318, 1), /** id = <code>2</code>, name = <code>"refinedExpressionConstraint"</code> */ REFINEDEXPRESSIONCONSTRAINT("refinedExpressionConstraint", 2, 13, 6), /** id = <code>22</code>, name = <code>"refinement"</code> */ REFINEMENT("refinement", 22, 132, 7), /** id = <code>35</code>, name = <code>"reverseFlag"</code> */ REVERSEFLAG("reverseFlag", 35, 235, 1), /** id = <code>47</code>, name = <code>"sctId"</code> */ SCTID("sctId", 47, 298, 4), /** id = <code>1</code>, name = <code>"simpleExpressionConstraint"</code> */ SIMPLEEXPRESSIONCONSTRAINT("simpleExpressionConstraint", 1, 7, 6), /** id = <code>50</code>, name = <code>"SP"</code> */ SP("SP", 50, 314, 1), /** id = <code>41</code>, name = <code>"stringComparisonOperator"</code> */ STRINGCOMPARISONOPERATOR("stringComparisonOperator", 41, 262, 3), /** id = <code>43</code>, name = <code>"stringValue"</code> */ STRINGVALUE("stringValue", 43, 270, 7), /** id = <code>29</code>, name = <code>"subAttributeSet"</code> */ SUBATTRIBUTESET("subAttributeSet", 29, 179, 8), /** id = <code>7</code>, name = <code>"subExpressionConstraint"</code> */ SUBEXPRESSIONCONSTRAINT("subExpressionConstraint", 7, 45, 10), /** id = <code>25</code>, name = <code>"subRefinement"</code> */ SUBREFINEMENT("subRefinement", 25, 151, 9), /** id = <code>12</code>, name = <code>"term"</code> */ TERM("term", 12, 75, 9), /** id = <code>33</code>, name = <code>"to"</code> */ TO("to", 33, 233, 1), /** id = <code>62</code>, name = <code>"UTF8-2"</code> */ UTF8_2("UTF8-2", 62, 346, 3), /** id = <code>63</code>, name = <code>"UTF8-3"</code> */ UTF8_3("UTF8-3", 63, 349, 17), /** id = <code>64</code>, name = <code>"UTF8-4"</code> */ UTF8_4("UTF8-4", 64, 366, 15), /** id = <code>65</code>, name = <code>"UTF8-tail"</code> */ UTF8_TAIL("UTF8-tail", 65, 381, 1), /** id = <code>13</code>, name = <code>"wildCard"</code> */ WILDCARD("wildCard", 13, 84, 1), /** id = <code>48</code>, name = <code>"ws"</code> */ WS("ws", 48, 302, 6), /** id = <code>57</code>, name = <code>"zero"</code> */ ZERO("zero", 57, 321, 1); private String name; private int id; private int offset; private int count; RuleNames(String string, int id, int offset, int count){ this.name = string; this.id = id; this.offset = offset; this.count = count; } /** Associates the enum with the original grammar name of the rule it represents. * @return the original grammar rule name. */ public String ruleName(){return name;} /** Associates the enum with an identifier for the grammar rule it represents. * @return the rule name identifier. */ public int ruleID(){return id;} private int opcodeOffset(){return offset;} private int opcodeCount(){return count;} } // UDT name enum /** The number of UDTs in the grammar */ public static int udtCount = 0; /** This enum provides easy to remember enum constants for locating the UDT identifiers and names. * The enum constants have the same spelling as the UDT names rendered in all caps with underscores replacing hyphens. */ public enum UdtNames{ } // private private static ECLparserBrief.java factoryInstance = null; private ECLparserBrief.java(Rule[] rules, Udt[] udts, Opcode[] opcodes){ super(rules, udts, opcodes); } private static Rule[] getRules(){ Rule[] rules = new Rule[66]; for(RuleNames r : RuleNames.values()){ rules[r.ruleID()] = getRule(r.ruleID(), r.ruleName(), r.opcodeOffset(), r.opcodeCount()); } return rules; } private static Udt[] getUdts(){ Udt[] udts = new Udt[0]; return udts; } // opcodes private static Opcode[] getOpcodes(){ Opcode[] op = new Opcode[382]; {int[] a = {1,2,6}; op[0] = getOpcodeCat(a);} op[1] = getOpcodeRnm(48, 302); // ws {int[] a = {3,4,5}; op[2] = getOpcodeAlt(a);} op[3] = getOpcodeRnm(2, 13); // refinedExpressionConstraint op[4] = getOpcodeRnm(3, 19); // compoundExpressionConstraint op[5] = getOpcodeRnm(1, 7); // simpleExpressionConstraint op[6] = getOpcodeRnm(48, 302); // ws {int[] a = {8,12}; op[7] = getOpcodeCat(a);} op[8] = getOpcodeRep((char)0, (char)1, 9); {int[] a = {10,11}; op[9] = getOpcodeCat(a);} op[10] = getOpcodeRnm(14, 85); // constraintOperator op[11] = getOpcodeRnm(48, 302); // ws op[12] = getOpcodeRnm(8, 55); // focusConcept {int[] a = {14,15,16,17,18}; op[13] = getOpcodeCat(a);} op[14] = getOpcodeRnm(1, 7); // simpleExpressionConstraint op[15] = getOpcodeRnm(48, 302); // ws {char[] a = {58}; op[16] = getOpcodeTls(a);} op[17] = getOpcodeRnm(48, 302); // ws op[18] = getOpcodeRnm(22, 132); // refinement {int[] a = {20,21,22}; op[19] = getOpcodeAlt(a);} op[20] = getOpcodeRnm(4, 23); // conjunctionExpressionConstraint op[21] = getOpcodeRnm(5, 31); // disjunctionExpressionConstraint op[22] = getOpcodeRnm(6, 39); // exclusionExpressionConstraint {int[] a = {24,25}; op[23] = getOpcodeCat(a);} op[24] = getOpcodeRnm(7, 45); // subExpressionConstraint op[25] = getOpcodeRep((char)1, Character.MAX_VALUE, 26); {int[] a = {27,28,29,30}; op[26] = getOpcodeCat(a);} op[27] = getOpcodeRnm(48, 302); // ws op[28] = getOpcodeRnm(19, 94); // conjunction op[29] = getOpcodeRnm(48, 302); // ws op[30] = getOpcodeRnm(7, 45); // subExpressionConstraint {int[] a = {32,33}; op[31] = getOpcodeCat(a);} op[32] = getOpcodeRnm(7, 45); // subExpressionConstraint op[33] = getOpcodeRep((char)1, Character.MAX_VALUE, 34); {int[] a = {35,36,37,38}; op[34] = getOpcodeCat(a);} op[35] = getOpcodeRnm(48, 302); // ws op[36] = getOpcodeRnm(20, 107); // disjunction op[37] = getOpcodeRnm(48, 302); // ws op[38] = getOpcodeRnm(7, 45); // subExpressionConstraint {int[] a = {40,41,42,43,44}; op[39] = getOpcodeCat(a);} op[40] = getOpcodeRnm(7, 45); // subExpressionConstraint op[41] = getOpcodeRnm(48, 302); // ws op[42] = getOpcodeRnm(21, 115); // exclusion op[43] = getOpcodeRnm(48, 302); // ws op[44] = getOpcodeRnm(7, 45); // subExpressionConstraint {int[] a = {46,47}; op[45] = getOpcodeAlt(a);} op[46] = getOpcodeRnm(1, 7); // simpleExpressionConstraint {int[] a = {48,49,50,53,54}; op[47] = getOpcodeCat(a);} {char[] a = {40}; op[48] = getOpcodeTls(a);} op[49] = getOpcodeRnm(48, 302); // ws {int[] a = {51,52}; op[50] = getOpcodeAlt(a);} op[51] = getOpcodeRnm(3, 19); // compoundExpressionConstraint op[52] = getOpcodeRnm(2, 13); // refinedExpressionConstraint op[53] = getOpcodeRnm(48, 302); // ws {char[] a = {41}; op[54] = getOpcodeTls(a);} {int[] a = {56,60}; op[55] = getOpcodeCat(a);} op[56] = getOpcodeRep((char)0, (char)1, 57); {int[] a = {58,59}; op[57] = getOpcodeCat(a);} op[58] = getOpcodeRnm(9, 63); // memberOf op[59] = getOpcodeRnm(48, 302); // ws {int[] a = {61,62}; op[60] = getOpcodeAlt(a);} op[61] = getOpcodeRnm(10, 64); // conceptReference op[62] = getOpcodeRnm(13, 84); // wildCard {char[] a = {94}; op[63] = getOpcodeTls(a);} {int[] a = {65,66}; op[64] = getOpcodeCat(a);} op[65] = getOpcodeRnm(11, 74); // conceptId op[66] = getOpcodeRep((char)0, (char)1, 67); {int[] a = {68,69,70,71,72,73}; op[67] = getOpcodeCat(a);} op[68] = getOpcodeRnm(48, 302); // ws {char[] a = {124}; op[69] = getOpcodeTls(a);} op[70] = getOpcodeRnm(48, 302); // ws op[71] = getOpcodeRnm(12, 75); // term op[72] = getOpcodeRnm(48, 302); // ws {char[] a = {124}; op[73] = getOpcodeTls(a);} op[74] = getOpcodeRnm(47, 298); // sctId {int[] a = {76,78}; op[75] = getOpcodeCat(a);} op[76] = getOpcodeRep((char)1, Character.MAX_VALUE, 77); op[77] = getOpcodeRnm(59, 323); // nonwsNonPipe op[78] = getOpcodeRep((char)0, Character.MAX_VALUE, 79); {int[] a = {80,82}; op[79] = getOpcodeCat(a);} op[80] = getOpcodeRep((char)1, Character.MAX_VALUE, 81); op[81] = getOpcodeRnm(50, 314); // SP op[82] = getOpcodeRep((char)1, Character.MAX_VALUE, 83); op[83] = getOpcodeRnm(59, 323); // nonwsNonPipe {char[] a = {42}; op[84] = getOpcodeTls(a);} {int[] a = {86,87,88,89}; op[85] = getOpcodeAlt(a);} op[86] = getOpcodeRnm(16, 91); // descendantOrSelfOf op[87] = getOpcodeRnm(15, 90); // descendantOf op[88] = getOpcodeRnm(18, 93); // ancestorOrSelfOf op[89] = getOpcodeRnm(17, 92); // ancestorOf {char[] a = {60}; op[90] = getOpcodeTls(a);} {char[] a = {60,60}; op[91] = getOpcodeTls(a);} {char[] a = {62}; op[92] = getOpcodeTls(a);} {char[] a = {62,62}; op[93] = getOpcodeTls(a);} {int[] a = {95,106}; op[94] = getOpcodeAlt(a);} {int[] a = {96,99,102,105}; op[95] = getOpcodeCat(a);} {int[] a = {97,98}; op[96] = getOpcodeAlt(a);} {char[] a = {97}; op[97] = getOpcodeTls(a);} {char[] a = {65}; op[98] = getOpcodeTls(a);} {int[] a = {100,101}; op[99] = getOpcodeAlt(a);} {char[] a = {110}; op[100] = getOpcodeTls(a);} {char[] a = {78}; op[101] = getOpcodeTls(a);} {int[] a = {103,104}; op[102] = getOpcodeAlt(a);} {char[] a = {100}; op[103] = getOpcodeTls(a);} {char[] a = {68}; op[104] = getOpcodeTls(a);} op[105] = getOpcodeRnm(49, 308); // mws {char[] a = {44}; op[106] = getOpcodeTls(a);} {int[] a = {108,111,114}; op[107] = getOpcodeCat(a);} {int[] a = {109,110}; op[108] = getOpcodeAlt(a);} {char[] a = {111}; op[109] = getOpcodeTls(a);} {char[] a = {79}; op[110] = getOpcodeTls(a);} {int[] a = {112,113}; op[111] = getOpcodeAlt(a);} {char[] a = {114}; op[112] = getOpcodeTls(a);} {char[] a = {82}; op[113] = getOpcodeTls(a);} op[114] = getOpcodeRnm(49, 308); // mws {int[] a = {116,119,122,125,128,131}; op[115] = getOpcodeCat(a);} {int[] a = {117,118}; op[116] = getOpcodeAlt(a);} {char[] a = {109}; op[117] = getOpcodeTls(a);} {char[] a = {77}; op[118] = getOpcodeTls(a);} {int[] a = {120,121}; op[119] = getOpcodeAlt(a);} {char[] a = {105}; op[120] = getOpcodeTls(a);} {char[] a = {73}; op[121] = getOpcodeTls(a);} {int[] a = {123,124}; op[122] = getOpcodeAlt(a);} {char[] a = {110}; op[123] = getOpcodeTls(a);} {char[] a = {78}; op[124] = getOpcodeTls(a);} {int[] a = {126,127}; op[125] = getOpcodeAlt(a);} {char[] a = {117}; op[126] = getOpcodeTls(a);} {char[] a = {85}; op[127] = getOpcodeTls(a);} {int[] a = {129,130}; op[128] = getOpcodeAlt(a);} {char[] a = {115}; op[129] = getOpcodeTls(a);} {char[] a = {83}; op[130] = getOpcodeTls(a);} op[131] = getOpcodeRnm(49, 308); // mws {int[] a = {133,134,135}; op[132] = getOpcodeCat(a);} op[133] = getOpcodeRnm(25, 151); // subRefinement op[134] = getOpcodeRnm(48, 302); // ws op[135] = getOpcodeRep((char)0, (char)1, 136); {int[] a = {137,138}; op[136] = getOpcodeAlt(a);} op[137] = getOpcodeRnm(23, 139); // conjunctionRefinementSet op[138] = getOpcodeRnm(24, 145); // disjunctionRefinementSet op[139] = getOpcodeRep((char)1, Character.MAX_VALUE, 140); {int[] a = {141,142,143,144}; op[140] = getOpcodeCat(a);} op[141] = getOpcodeRnm(48, 302); // ws op[142] = getOpcodeRnm(19, 94); // conjunction op[143] = getOpcodeRnm(48, 302); // ws op[144] = getOpcodeRnm(25, 151); // subRefinement op[145] = getOpcodeRep((char)1, Character.MAX_VALUE, 146); {int[] a = {147,148,149,150}; op[146] = getOpcodeCat(a);} op[147] = getOpcodeRnm(48, 302); // ws op[148] = getOpcodeRnm(20, 107); // disjunction op[149] = getOpcodeRnm(48, 302); // ws op[150] = getOpcodeRnm(25, 151); // subRefinement {int[] a = {152,153,154}; op[151] = getOpcodeAlt(a);} op[152] = getOpcodeRnm(26, 160); // attributeSet op[153] = getOpcodeRnm(30, 187); // attributeGroup {int[] a = {155,156,157,158,159}; op[154] = getOpcodeCat(a);} {char[] a = {40}; op[155] = getOpcodeTls(a);} op[156] = getOpcodeRnm(48, 302); // ws op[157] = getOpcodeRnm(22, 132); // refinement op[158] = getOpcodeRnm(48, 302); // ws {char[] a = {41}; op[159] = getOpcodeTls(a);} {int[] a = {161,162,163}; op[160] = getOpcodeCat(a);} op[161] = getOpcodeRnm(29, 179); // subAttributeSet op[162] = getOpcodeRnm(48, 302); // ws op[163] = getOpcodeRep((char)0, (char)1, 164); {int[] a = {165,166}; op[164] = getOpcodeAlt(a);} op[165] = getOpcodeRnm(27, 167); // conjunctionAttributeSet op[166] = getOpcodeRnm(28, 173); // disjunctionAttributeSet op[167] = getOpcodeRep((char)1, Character.MAX_VALUE, 168); {int[] a = {169,170,171,172}; op[168] = getOpcodeCat(a);} op[169] = getOpcodeRnm(48, 302); // ws op[170] = getOpcodeRnm(19, 94); // conjunction op[171] = getOpcodeRnm(48, 302); // ws op[172] = getOpcodeRnm(29, 179); // subAttributeSet op[173] = getOpcodeRep((char)1, Character.MAX_VALUE, 174); {int[] a = {175,176,177,178}; op[174] = getOpcodeCat(a);} op[175] = getOpcodeRnm(48, 302); // ws op[176] = getOpcodeRnm(20, 107); // disjunction op[177] = getOpcodeRnm(48, 302); // ws op[178] = getOpcodeRnm(29, 179); // subAttributeSet {int[] a = {180,181}; op[179] = getOpcodeAlt(a);} op[180] = getOpcodeRnm(31, 197); // attribute {int[] a = {182,183,184,185,186}; op[181] = getOpcodeCat(a);} {char[] a = {40}; op[182] = getOpcodeTls(a);} op[183] = getOpcodeRnm(48, 302); // ws op[184] = getOpcodeRnm(26, 160); // attributeSet op[185] = getOpcodeRnm(48, 302); // ws {char[] a = {41}; op[186] = getOpcodeTls(a);} {int[] a = {188,192,193,194,195,196}; op[187] = getOpcodeCat(a);} op[188] = getOpcodeRep((char)0, (char)1, 189); {int[] a = {190,191}; op[189] = getOpcodeCat(a);} op[190] = getOpcodeRnm(32, 225); // cardinality op[191] = getOpcodeRnm(48, 302); // ws {char[] a = {123}; op[192] = getOpcodeTls(a);} op[193] = getOpcodeRnm(48, 302); // ws op[194] = getOpcodeRnm(26, 160); // attributeSet op[195] = getOpcodeRnm(48, 302); // ws {char[] a = {125}; op[196] = getOpcodeTls(a);} {int[] a = {198,202,206,210,211,212}; op[197] = getOpcodeCat(a);} op[198] = getOpcodeRep((char)0, (char)1, 199); {int[] a = {200,201}; op[199] = getOpcodeCat(a);} op[200] = getOpcodeRnm(32, 225); // cardinality op[201] = getOpcodeRnm(48, 302); // ws op[202] = getOpcodeRep((char)0, (char)1, 203); {int[] a = {204,205}; op[203] = getOpcodeCat(a);} op[204] = getOpcodeRnm(35, 235); // reverseFlag op[205] = getOpcodeRnm(48, 302); // ws op[206] = getOpcodeRep((char)0, (char)1, 207); {int[] a = {208,209}; op[207] = getOpcodeCat(a);} op[208] = getOpcodeRnm(36, 236); // attributeOperator op[209] = getOpcodeRnm(48, 302); // ws op[210] = getOpcodeRnm(37, 239); // attributeName op[211] = getOpcodeRnm(48, 302); // ws {int[] a = {213,217,221}; op[212] = getOpcodeAlt(a);} {int[] a = {214,215,216}; op[213] = getOpcodeCat(a);} op[214] = getOpcodeRnm(39, 252); // expressionComparisonOperator op[215] = getOpcodeRnm(48, 302); // ws op[216] = getOpcodeRnm(38, 242); // expressionConstraintValue {int[] a = {218,219,220}; op[217] = getOpcodeCat(a);} op[218] = getOpcodeRnm(40, 255); // numericComparisonOperator op[219] = getOpcodeRnm(48, 302); // ws op[220] = getOpcodeRnm(42, 265); // numericValue {int[] a = {222,223,224}; op[221] = getOpcodeCat(a);} op[222] = getOpcodeRnm(41, 262); // stringComparisonOperator op[223] = getOpcodeRnm(48, 302); // ws op[224] = getOpcodeRnm(43, 270); // stringValue {int[] a = {226,227,228,229,232}; op[225] = getOpcodeCat(a);} {char[] a = {91}; op[226] = getOpcodeTls(a);} op[227] = getOpcodeRnm(46, 292); // nonNegativeIntegerValue op[228] = getOpcodeRnm(33, 233); // to {int[] a = {230,231}; op[229] = getOpcodeAlt(a);} op[230] = getOpcodeRnm(46, 292); // nonNegativeIntegerValue op[231] = getOpcodeRnm(34, 234); // many {char[] a = {93}; op[232] = getOpcodeTls(a);} {char[] a = {46,46}; op[233] = getOpcodeTls(a);} {char[] a = {42}; op[234] = getOpcodeTls(a);} {char[] a = {82}; op[235] = getOpcodeTls(a);} {int[] a = {237,238}; op[236] = getOpcodeAlt(a);} op[237] = getOpcodeRnm(16, 91); // descendantOrSelfOf op[238] = getOpcodeRnm(15, 90); // descendantOf {int[] a = {240,241}; op[239] = getOpcodeAlt(a);} op[240] = getOpcodeRnm(10, 64); // conceptReference op[241] = getOpcodeRnm(13, 84); // wildCard {int[] a = {243,244}; op[242] = getOpcodeAlt(a);} op[243] = getOpcodeRnm(1, 7); // simpleExpressionConstraint {int[] a = {245,246,247,250,251}; op[244] = getOpcodeCat(a);} {char[] a = {40}; op[245] = getOpcodeTls(a);} op[246] = getOpcodeRnm(48, 302); // ws {int[] a = {248,249}; op[247] = getOpcodeAlt(a);} op[248] = getOpcodeRnm(2, 13); // refinedExpressionConstraint op[249] = getOpcodeRnm(3, 19); // compoundExpressionConstraint op[250] = getOpcodeRnm(48, 302); // ws {char[] a = {41}; op[251] = getOpcodeTls(a);} {int[] a = {253,254}; op[252] = getOpcodeAlt(a);} {char[] a = {61}; op[253] = getOpcodeTls(a);} {char[] a = {33,61}; op[254] = getOpcodeTls(a);} {int[] a = {256,257,258,259,260,261}; op[255] = getOpcodeAlt(a);} {char[] a = {61}; op[256] = getOpcodeTls(a);} {char[] a = {33,61}; op[257] = getOpcodeTls(a);} {char[] a = {60,61}; op[258] = getOpcodeTls(a);} {char[] a = {60}; op[259] = getOpcodeTls(a);} {char[] a = {62,61}; op[260] = getOpcodeTls(a);} {char[] a = {62}; op[261] = getOpcodeTls(a);} {int[] a = {263,264}; op[262] = getOpcodeAlt(a);} {char[] a = {61}; op[263] = getOpcodeTls(a);} {char[] a = {33,61}; op[264] = getOpcodeTls(a);} {int[] a = {266,267}; op[265] = getOpcodeCat(a);} {char[] a = {35}; op[266] = getOpcodeTls(a);} {int[] a = {268,269}; op[267] = getOpcodeAlt(a);} op[268] = getOpcodeRnm(45, 287); // decimalValue op[269] = getOpcodeRnm(44, 277); // integerValue {int[] a = {271,272,276}; op[270] = getOpcodeCat(a);} op[271] = getOpcodeRnm(54, 318); // QM op[272] = getOpcodeRep((char)1, Character.MAX_VALUE, 273); {int[] a = {274,275}; op[273] = getOpcodeAlt(a);} op[274] = getOpcodeRnm(60, 329); // anyNonEscapedChar op[275] = getOpcodeRnm(61, 339); // escapedChar op[276] = getOpcodeRnm(54, 318); // QM {int[] a = {278,286}; op[277] = getOpcodeAlt(a);} {int[] a = {279,283,284}; op[278] = getOpcodeCat(a);} op[279] = getOpcodeRep((char)0, (char)1, 280); {int[] a = {281,282}; op[280] = getOpcodeAlt(a);} {char[] a = {45}; op[281] = getOpcodeTls(a);} {char[] a = {43}; op[282] = getOpcodeTls(a);} op[283] = getOpcodeRnm(58, 322); // digitNonZero op[284] = getOpcodeRep((char)0, Character.MAX_VALUE, 285); op[285] = getOpcodeRnm(56, 320); // digit op[286] = getOpcodeRnm(57, 321); // zero {int[] a = {288,289,290}; op[287] = getOpcodeCat(a);} op[288] = getOpcodeRnm(44, 277); // integerValue {char[] a = {46}; op[289] = getOpcodeTls(a);} op[290] = getOpcodeRep((char)1, Character.MAX_VALUE, 291); op[291] = getOpcodeRnm(56, 320); // digit {int[] a = {293,297}; op[292] = getOpcodeAlt(a);} {int[] a = {294,295}; op[293] = getOpcodeCat(a);} op[294] = getOpcodeRnm(58, 322); // digitNonZero op[295] = getOpcodeRep((char)0, Character.MAX_VALUE, 296); op[296] = getOpcodeRnm(56, 320); // digit op[297] = getOpcodeRnm(57, 321); // zero {int[] a = {299,300}; op[298] = getOpcodeCat(a);} op[299] = getOpcodeRnm(58, 322); // digitNonZero op[300] = getOpcodeRep((char)5, (char)17, 301); op[301] = getOpcodeRnm(56, 320); // digit op[302] = getOpcodeRep((char)0, Character.MAX_VALUE, 303); {int[] a = {304,305,306,307}; op[303] = getOpcodeAlt(a);} op[304] = getOpcodeRnm(50, 314); // SP op[305] = getOpcodeRnm(51, 315); // HTAB op[306] = getOpcodeRnm(52, 316); // CR op[307] = getOpcodeRnm(53, 317); // LF op[308] = getOpcodeRep((char)1, Character.MAX_VALUE, 309); {int[] a = {310,311,312,313}; op[309] = getOpcodeAlt(a);} op[310] = getOpcodeRnm(50, 314); // SP op[311] = getOpcodeRnm(51, 315); // HTAB op[312] = getOpcodeRnm(52, 316); // CR op[313] = getOpcodeRnm(53, 317); // LF {char[] a = {32}; op[314] = getOpcodeTbs(a);} {char[] a = {9}; op[315] = getOpcodeTbs(a);} {char[] a = {13}; op[316] = getOpcodeTbs(a);} {char[] a = {10}; op[317] = getOpcodeTbs(a);} {char[] a = {34}; op[318] = getOpcodeTbs(a);} {char[] a = {92}; op[319] = getOpcodeTbs(a);} op[320] = getOpcodeTrg((char)48, (char)57); {char[] a = {48}; op[321] = getOpcodeTbs(a);} op[322] = getOpcodeTrg((char)49, (char)57); {int[] a = {324,325,326,327,328}; op[323] = getOpcodeAlt(a);} op[324] = getOpcodeTrg((char)33, (char)123); op[325] = getOpcodeTrg((char)125, (char)126); op[326] = getOpcodeRnm(62, 346); // UTF8-2 op[327] = getOpcodeRnm(63, 349); // UTF8-3 op[328] = getOpcodeRnm(64, 366); // UTF8-4 {int[] a = {330,331,332,333,334,335,336,337,338}; op[329] = getOpcodeAlt(a);} op[330] = getOpcodeRnm(51, 315); // HTAB op[331] = getOpcodeRnm(52, 316); // CR op[332] = getOpcodeRnm(53, 317); // LF op[333] = getOpcodeTrg((char)32, (char)33); op[334] = getOpcodeTrg((char)35, (char)91); op[335] = getOpcodeTrg((char)93, (char)126); op[336] = getOpcodeRnm(62, 346); // UTF8-2 op[337] = getOpcodeRnm(63, 349); // UTF8-3 op[338] = getOpcodeRnm(64, 366); // UTF8-4 {int[] a = {340,343}; op[339] = getOpcodeAlt(a);} {int[] a = {341,342}; op[340] = getOpcodeCat(a);} op[341] = getOpcodeRnm(55, 319); // BS op[342] = getOpcodeRnm(54, 318); // QM {int[] a = {344,345}; op[343] = getOpcodeCat(a);} op[344] = getOpcodeRnm(55, 319); // BS op[345] = getOpcodeRnm(55, 319); // BS {int[] a = {347,348}; op[346] = getOpcodeCat(a);} op[347] = getOpcodeTrg((char)194, (char)223); op[348] = getOpcodeRnm(65, 381); // UTF8-tail {int[] a = {350,354,358,362}; op[349] = getOpcodeAlt(a);} {int[] a = {351,352,353}; op[350] = getOpcodeCat(a);} {char[] a = {224}; op[351] = getOpcodeTbs(a);} op[352] = getOpcodeTrg((char)160, (char)191); op[353] = getOpcodeRnm(65, 381); // UTF8-tail {int[] a = {355,356}; op[354] = getOpcodeCat(a);} op[355] = getOpcodeTrg((char)225, (char)236); op[356] = getOpcodeRep((char)2, (char)2, 357); op[357] = getOpcodeRnm(65, 381); // UTF8-tail {int[] a = {359,360,361}; op[358] = getOpcodeCat(a);} {char[] a = {237}; op[359] = getOpcodeTbs(a);} op[360] = getOpcodeTrg((char)128, (char)159); op[361] = getOpcodeRnm(65, 381); // UTF8-tail {int[] a = {363,364}; op[362] = getOpcodeCat(a);} op[363] = getOpcodeTrg((char)238, (char)239); op[364] = getOpcodeRep((char)2, (char)2, 365); op[365] = getOpcodeRnm(65, 381); // UTF8-tail {int[] a = {367,372,376}; op[366] = getOpcodeAlt(a);} {int[] a = {368,369,370}; op[367] = getOpcodeCat(a);} {char[] a = {240}; op[368] = getOpcodeTbs(a);} op[369] = getOpcodeTrg((char)144, (char)191); op[370] = getOpcodeRep((char)2, (char)2, 371); op[371] = getOpcodeRnm(65, 381); // UTF8-tail {int[] a = {373,374}; op[372] = getOpcodeCat(a);} op[373] = getOpcodeTrg((char)241, (char)243); op[374] = getOpcodeRep((char)3, (char)3, 375); op[375] = getOpcodeRnm(65, 381); // UTF8-tail {int[] a = {377,378,379}; op[376] = getOpcodeCat(a);} {char[] a = {244}; op[377] = getOpcodeTbs(a);} op[378] = getOpcodeTrg((char)128, (char)143); op[379] = getOpcodeRep((char)2, (char)2, 380); op[380] = getOpcodeRnm(65, 381); // UTF8-tail op[381] = getOpcodeTrg((char)128, (char)191); return op; } /** Displays the original SABNF grammar on the output device. * @param out the output device to display on.*/ public static void display(PrintStream out){ out.println(";"); out.println("; package.name.ECLparserBrief.java"); out.println(";"); out.println("expressionConstraint = ws ( refinedExpressionConstraint / compoundExpressionConstraint / simpleExpressionConstraint ) ws"); out.println("simpleExpressionConstraint = [constraintOperator ws] focusConcept "); out.println("refinedExpressionConstraint = simpleExpressionConstraint ws \":\" ws refinement"); out.println("compoundExpressionConstraint = conjunctionExpressionConstraint / disjunctionExpressionConstraint / exclusionExpressionConstraint"); out.println("conjunctionExpressionConstraint = subExpressionConstraint 1*(ws conjunction ws subExpressionConstraint)"); out.println("disjunctionExpressionConstraint = subExpressionConstraint 1*(ws disjunction ws subExpressionConstraint)"); out.println("exclusionExpressionConstraint = subExpressionConstraint ws exclusion ws subExpressionConstraint"); out.println("subExpressionConstraint = simpleExpressionConstraint / \"(\" ws (compoundExpressionConstraint / refinedExpressionConstraint) ws \")\""); out.println("focusConcept = [ memberOf ws ] (conceptReference / wildCard) "); out.println("memberOf = \"^\""); out.println("conceptReference = conceptId [ws \"|\" ws term ws \"|\"]"); out.println("conceptId = sctId"); out.println("term = 1*nonwsNonPipe *( 1*SP 1*nonwsNonPipe )"); out.println("wildCard = \"*\""); out.println("constraintOperator = descendantOrSelfOf / descendantOf / ancestorOrSelfOf / ancestorOf"); out.println("descendantOf = \"<\""); out.println("descendantOrSelfOf = \"<<\" "); out.println("ancestorOf = \">\" "); out.println("ancestorOrSelfOf = \">>\" "); out.println("conjunction = ((\"a\"/\"A\") (\"n\"/\"N\") (\"d\"/\"D\") mws) / \",\""); out.println("disjunction = (\"o\"/\"O\") (\"r\"/\"R\") mws"); out.println("exclusion = (\"m\"/\"M\") (\"i\"/\"I\") (\"n\"/\"N\") (\"u\"/\"U\") (\"s\"/\"S\") mws "); out.println("refinement = subRefinement ws [conjunctionRefinementSet / disjunctionRefinementSet]"); out.println("conjunctionRefinementSet = 1*(ws conjunction ws subRefinement)"); out.println("disjunctionRefinementSet = 1*(ws disjunction ws subRefinement)"); out.println("subRefinement = attributeSet / attributeGroup / \"(\" ws refinement ws \")\""); out.println("attributeSet = subAttributeSet ws [conjunctionAttributeSet / disjunctionAttributeSet]"); out.println("conjunctionAttributeSet = 1*(ws conjunction ws subAttributeSet)"); out.println("disjunctionAttributeSet = 1*(ws disjunction ws subAttributeSet)"); out.println("subAttributeSet = attribute / \"(\" ws attributeSet ws \")\""); out.println("attributeGroup = [cardinality ws] \"{\" ws attributeSet ws \"}\""); out.println("attribute = [cardinality ws] [reverseFlag ws] [attributeOperator ws] attributeName ws (expressionComparisonOperator ws expressionConstraintValue / numericComparisonOperator ws numericValue / stringComparisonOperator ws stringValue )"); out.println("cardinality = \"[\" nonNegativeIntegerValue to (nonNegativeIntegerValue / many) \"]\" "); out.println("to = \"..\""); out.println("many = \"*\""); out.println("reverseFlag = \"R\""); out.println("attributeOperator = descendantOrSelfOf / descendantOf "); out.println("attributeName = conceptReference / wildCard"); out.println("expressionConstraintValue = simpleExpressionConstraint / \"(\" ws (refinedExpressionConstraint / compoundExpressionConstraint) ws \")\""); out.println("expressionComparisonOperator = \"=\" / \"!=\" "); out.println("numericComparisonOperator = \"=\" / \"!=\" / \"<=\" / \"<\" / \">=\" / \">\""); out.println("stringComparisonOperator = \"=\" / \"!=\" "); out.println("numericValue = \"#\" ( decimalValue / integerValue)"); out.println("stringValue = QM 1*(anyNonEscapedChar / escapedChar) QM "); out.println("integerValue = ( [\"-\"/\"+\"] digitNonZero *digit ) / zero"); out.println("decimalValue = integerValue \".\" 1*digit"); out.println("nonNegativeIntegerValue = (digitNonZero *digit ) / zero"); out.println("sctId = digitNonZero 5*17( digit )"); out.println("ws = *( SP / HTAB / CR / LF ) ; optional white space"); out.println("mws = 1*( SP / HTAB / CR / LF ) ; mandatory white space"); out.println("SP = %x20 ; space"); out.println("HTAB = %x09 ; tab"); out.println("CR = %x0D ; carriage return"); out.println("LF = %x0A ; line feed"); out.println("QM = %x22 ; quotation mark"); out.println("BS = %x5C ; back slash"); out.println("digit = %x30-39"); out.println("zero = %x30"); out.println("digitNonZero = %x31-39 "); out.println("nonwsNonPipe = %x21-7B / %x7D-7E / UTF8-2 / UTF8-3 / UTF8-4"); out.println("anyNonEscapedChar = HTAB / CR / LF / %x20-21 / %x23-5B / %x5D-7E / UTF8-2 / UTF8-3 / UTF8-4"); out.println("escapedChar = BS QM / BS BS"); out.println("UTF8-2 = %xC2-DF UTF8-tail"); out.println("UTF8-3 = %xE0 %xA0-BF UTF8-tail / %xE1-EC 2( UTF8-tail ) / %xED %x80-9F UTF8-tail / %xEE-EF 2( UTF8-tail )"); out.println("UTF8-4 = %xF0 %x90-BF 2( UTF8-tail ) / %xF1-F3 3( UTF8-tail ) / %xF4 %x80-8F 2( UTF8-tail )"); out.println("UTF8-tail = %x80-BF"); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.lightsail.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/lightsail-2016-11-28/SetResourceAccessForBucket" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class SetResourceAccessForBucketRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The name of the Lightsail instance for which to set bucket access. The instance must be in a running or stopped * state. * </p> */ private String resourceName; /** * <p> * The name of the bucket for which to set access to another Lightsail resource. * </p> */ private String bucketName; /** * <p> * The access setting. * </p> * <p> * The following access settings are available: * </p> * <ul> * <li> * <p> * <code>allow</code> - Allows access to the bucket and its objects. * </p> * </li> * <li> * <p> * <code>deny</code> - Denies access to the bucket and its objects. Use this setting to remove access for a resource * previously set to <code>allow</code>. * </p> * </li> * </ul> */ private String access; /** * <p> * The name of the Lightsail instance for which to set bucket access. The instance must be in a running or stopped * state. * </p> * * @param resourceName * The name of the Lightsail instance for which to set bucket access. The instance must be in a running or * stopped state. */ public void setResourceName(String resourceName) { this.resourceName = resourceName; } /** * <p> * The name of the Lightsail instance for which to set bucket access. The instance must be in a running or stopped * state. * </p> * * @return The name of the Lightsail instance for which to set bucket access. The instance must be in a running or * stopped state. */ public String getResourceName() { return this.resourceName; } /** * <p> * The name of the Lightsail instance for which to set bucket access. The instance must be in a running or stopped * state. * </p> * * @param resourceName * The name of the Lightsail instance for which to set bucket access. The instance must be in a running or * stopped state. * @return Returns a reference to this object so that method calls can be chained together. */ public SetResourceAccessForBucketRequest withResourceName(String resourceName) { setResourceName(resourceName); return this; } /** * <p> * The name of the bucket for which to set access to another Lightsail resource. * </p> * * @param bucketName * The name of the bucket for which to set access to another Lightsail resource. */ public void setBucketName(String bucketName) { this.bucketName = bucketName; } /** * <p> * The name of the bucket for which to set access to another Lightsail resource. * </p> * * @return The name of the bucket for which to set access to another Lightsail resource. */ public String getBucketName() { return this.bucketName; } /** * <p> * The name of the bucket for which to set access to another Lightsail resource. * </p> * * @param bucketName * The name of the bucket for which to set access to another Lightsail resource. * @return Returns a reference to this object so that method calls can be chained together. */ public SetResourceAccessForBucketRequest withBucketName(String bucketName) { setBucketName(bucketName); return this; } /** * <p> * The access setting. * </p> * <p> * The following access settings are available: * </p> * <ul> * <li> * <p> * <code>allow</code> - Allows access to the bucket and its objects. * </p> * </li> * <li> * <p> * <code>deny</code> - Denies access to the bucket and its objects. Use this setting to remove access for a resource * previously set to <code>allow</code>. * </p> * </li> * </ul> * * @param access * The access setting.</p> * <p> * The following access settings are available: * </p> * <ul> * <li> * <p> * <code>allow</code> - Allows access to the bucket and its objects. * </p> * </li> * <li> * <p> * <code>deny</code> - Denies access to the bucket and its objects. Use this setting to remove access for a * resource previously set to <code>allow</code>. * </p> * </li> * @see ResourceBucketAccess */ public void setAccess(String access) { this.access = access; } /** * <p> * The access setting. * </p> * <p> * The following access settings are available: * </p> * <ul> * <li> * <p> * <code>allow</code> - Allows access to the bucket and its objects. * </p> * </li> * <li> * <p> * <code>deny</code> - Denies access to the bucket and its objects. Use this setting to remove access for a resource * previously set to <code>allow</code>. * </p> * </li> * </ul> * * @return The access setting.</p> * <p> * The following access settings are available: * </p> * <ul> * <li> * <p> * <code>allow</code> - Allows access to the bucket and its objects. * </p> * </li> * <li> * <p> * <code>deny</code> - Denies access to the bucket and its objects. Use this setting to remove access for a * resource previously set to <code>allow</code>. * </p> * </li> * @see ResourceBucketAccess */ public String getAccess() { return this.access; } /** * <p> * The access setting. * </p> * <p> * The following access settings are available: * </p> * <ul> * <li> * <p> * <code>allow</code> - Allows access to the bucket and its objects. * </p> * </li> * <li> * <p> * <code>deny</code> - Denies access to the bucket and its objects. Use this setting to remove access for a resource * previously set to <code>allow</code>. * </p> * </li> * </ul> * * @param access * The access setting.</p> * <p> * The following access settings are available: * </p> * <ul> * <li> * <p> * <code>allow</code> - Allows access to the bucket and its objects. * </p> * </li> * <li> * <p> * <code>deny</code> - Denies access to the bucket and its objects. Use this setting to remove access for a * resource previously set to <code>allow</code>. * </p> * </li> * @return Returns a reference to this object so that method calls can be chained together. * @see ResourceBucketAccess */ public SetResourceAccessForBucketRequest withAccess(String access) { setAccess(access); return this; } /** * <p> * The access setting. * </p> * <p> * The following access settings are available: * </p> * <ul> * <li> * <p> * <code>allow</code> - Allows access to the bucket and its objects. * </p> * </li> * <li> * <p> * <code>deny</code> - Denies access to the bucket and its objects. Use this setting to remove access for a resource * previously set to <code>allow</code>. * </p> * </li> * </ul> * * @param access * The access setting.</p> * <p> * The following access settings are available: * </p> * <ul> * <li> * <p> * <code>allow</code> - Allows access to the bucket and its objects. * </p> * </li> * <li> * <p> * <code>deny</code> - Denies access to the bucket and its objects. Use this setting to remove access for a * resource previously set to <code>allow</code>. * </p> * </li> * @return Returns a reference to this object so that method calls can be chained together. * @see ResourceBucketAccess */ public SetResourceAccessForBucketRequest withAccess(ResourceBucketAccess access) { this.access = access.toString(); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getResourceName() != null) sb.append("ResourceName: ").append(getResourceName()).append(","); if (getBucketName() != null) sb.append("BucketName: ").append(getBucketName()).append(","); if (getAccess() != null) sb.append("Access: ").append(getAccess()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof SetResourceAccessForBucketRequest == false) return false; SetResourceAccessForBucketRequest other = (SetResourceAccessForBucketRequest) obj; if (other.getResourceName() == null ^ this.getResourceName() == null) return false; if (other.getResourceName() != null && other.getResourceName().equals(this.getResourceName()) == false) return false; if (other.getBucketName() == null ^ this.getBucketName() == null) return false; if (other.getBucketName() != null && other.getBucketName().equals(this.getBucketName()) == false) return false; if (other.getAccess() == null ^ this.getAccess() == null) return false; if (other.getAccess() != null && other.getAccess().equals(this.getAccess()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getResourceName() == null) ? 0 : getResourceName().hashCode()); hashCode = prime * hashCode + ((getBucketName() == null) ? 0 : getBucketName().hashCode()); hashCode = prime * hashCode + ((getAccess() == null) ? 0 : getAccess().hashCode()); return hashCode; } @Override public SetResourceAccessForBucketRequest clone() { return (SetResourceAccessForBucketRequest) super.clone(); } }
/******************************************************************************* * Copyright (c) 2009 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation * Zend Technologies *******************************************************************************/ package org.eclipse.php.internal.debug.core.xdebug.dbgp; import org.eclipse.core.runtime.Preferences; import org.eclipse.jface.dialogs.IDialogConstants; import org.eclipse.php.internal.debug.core.PHPDebugCoreMessages; import org.eclipse.php.internal.debug.core.PHPDebugPlugin; import org.eclipse.php.internal.debug.core.preferences.AbstractDebuggerConfigurationDialog; import org.eclipse.php.internal.debug.core.xdebug.XDebugPreferenceMgr; import org.eclipse.swt.SWT; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.SelectionListener; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.*; /** * An XDebug configuration dialog for editing/viewing the XDebug debugger * configurations. * * @author Shalom Gibly, Dave Kelsey */ public class XDebugConfigurationDialog extends AbstractDebuggerConfigurationDialog { private ComboListener comboListener; // general options private Text portTextBox; private Button showGlobals; private Spinner variableDepth; private Spinner maxChildren; private Button useMultiSession; private Combo acceptRemoteSession; // output capture options private Combo captureStdout; private Combo captureStderr; // proxy options private Button useProxy; private Text idekeyTextBox; private Text proxyTextBox; private XDebugDebuggerConfiguration xdebugDebuggerConfiguration; /** * Constructs a new XDebug configuration dialog. * * @param xdebugDebuggerConfiguration * * @param parentShell */ public XDebugConfigurationDialog( XDebugDebuggerConfiguration xdebugDebuggerConfiguration, Shell parentShell) { super(parentShell); setShellStyle(getShellStyle() | SWT.RESIZE); this.xdebugDebuggerConfiguration = xdebugDebuggerConfiguration; } private void toggleProxyFields(boolean selection) { idekeyTextBox.setEnabled(selection); proxyTextBox.setEnabled(selection); } protected Control createDialogArea(Composite parent) { comboListener = new ComboListener(); parent = (Composite) super.createDialogArea(parent); setTitle(PHPDebugCoreMessages.XDebugConfigurationDialog_mainTitle); Composite[] subsections = createSubsections(parent, PHPDebugCoreMessages.XDebugConfigurationDialog_generalGroup, PHPDebugCoreMessages.XDebugConfigurationDialog_captureGroup, PHPDebugCoreMessages.XDebugConfigurationDialog_proxyGroup); // main Composite mainSubSection = subsections[0]; addLabelControl(mainSubSection, PHPDebugCoreMessages.DebuggerConfigurationDialog_debugPort, XDebugPreferenceMgr.XDEBUG_PREF_PORT); portTextBox = addNumTextField(mainSubSection, XDebugPreferenceMgr.XDEBUG_PREF_PORT, 5, 2, false); showGlobals = addCheckBox( mainSubSection, PHPDebugCoreMessages.XDebugConfigurationDialog_showSuperGlobals, XDebugPreferenceMgr.XDEBUG_PREF_SHOWSUPERGLOBALS, 0); addLabelControl(mainSubSection, PHPDebugCoreMessages.XDebugConfigurationDialog_maxArrayDepth, XDebugPreferenceMgr.XDEBUG_PREF_ARRAYDEPTH); variableDepth = addVariableLevel(mainSubSection, XDebugPreferenceMgr.XDEBUG_PREF_ARRAYDEPTH, 1, 150, 2); addLabelControl(mainSubSection, PHPDebugCoreMessages.XDebugConfigurationDialog_maxChildren, XDebugPreferenceMgr.XDEBUG_PREF_CHILDREN); maxChildren = addVariableLevel(mainSubSection, XDebugPreferenceMgr.XDEBUG_PREF_CHILDREN, 1, 500, 2); useMultiSession = addCheckBox(mainSubSection, PHPDebugCoreMessages.XDebugConfigurationDialog_useMultisession, XDebugPreferenceMgr.XDEBUG_PREF_MULTISESSION, 0); acceptRemoteSession = addComboField(mainSubSection, PHPDebugCoreMessages.XDebugConfigurationDialog_remoteSession, XDebugPreferenceMgr.remoteSessionOptions); // output capture Composite captureSubSection = subsections[1]; captureStdout = addComboField(captureSubSection, PHPDebugCoreMessages.XDebugConfigurationDialog_captureStdout, XDebugPreferenceMgr.captureOutputOptions); captureStderr = addComboField(captureSubSection, PHPDebugCoreMessages.XDebugConfigurationDialog_captureStderr, XDebugPreferenceMgr.captureOutputOptions); // proxy Composite proxySubSection = subsections[2]; useProxy = addCheckBox(proxySubSection, PHPDebugCoreMessages.XDebugConfigurationDialog_useProxy, XDebugPreferenceMgr.XDEBUG_PREF_USEPROXY, 0); useProxy.addSelectionListener(new SelectionListener() { public void widgetSelected(SelectionEvent e) { toggleProxyFields(useProxy.getSelection()); } public void widgetDefaultSelected(SelectionEvent e) { toggleProxyFields(useProxy.getSelection()); } }); addLabelControl(proxySubSection, PHPDebugCoreMessages.XDebugConfigurationDialog_idekey, XDebugPreferenceMgr.XDEBUG_PREF_IDEKEY); idekeyTextBox = addATextField(proxySubSection, XDebugPreferenceMgr.XDEBUG_PREF_IDEKEY, 100, 2); addLabelControl(proxySubSection, PHPDebugCoreMessages.XDebugConfigurationDialog_proxy, XDebugPreferenceMgr.XDEBUG_PREF_PROXY); proxyTextBox = addATextField(proxySubSection, XDebugPreferenceMgr.XDEBUG_PREF_PROXY, 100, 2); GridData gridData = (GridData) proxyTextBox.getLayoutData(); gridData.widthHint = convertWidthInCharsToPixels(90); internalInitializeValues(); // Initialize the dialog's values. return parent; } private Text addNumTextField(Composite parent, String key, int textLimit, int horizontalIndent, boolean isTimeout) { Text text = super .addTextField(parent, key, textLimit, horizontalIndent); text.addModifyListener(new NumFieldValidateListener(isTimeout)); return text; } private Text addATextField(Composite parent, String key, int minWidth, int horizontalIndent) { Text textBox = new Text(parent, SWT.BORDER | SWT.SINGLE); textBox.setData(key); GridData data = new GridData(GridData.FILL_HORIZONTAL); data.horizontalIndent = horizontalIndent; data.horizontalSpan = 2; data.grabExcessHorizontalSpace = true; data.minimumWidth = minWidth; textBox.setLayoutData(data); return textBox; } private Combo addComboField(Composite parent, String text, String[] options) { addLabelControl(parent, text, null); Combo comboBox = new Combo(parent, SWT.DROP_DOWN | SWT.READ_ONLY); GridData data = new GridData(GridData.FILL_HORIZONTAL); data.horizontalIndent = 1; data.horizontalSpan = 2; data.grabExcessHorizontalSpace = true; comboBox.setLayoutData(data); comboBox.setItems(options); comboBox.addSelectionListener(comboListener); return comboBox; } /** * Creates a subsection group. * * @param parent * @param label * @return */ protected Composite[] createSubsections(Composite parent, String label, String label2, String label3) { // A cosmetic composite that will add a basic indent parent = new Composite(parent, SWT.NONE); parent.setLayout(new GridLayout(1, true)); GridData data = new GridData(GridData.FILL_BOTH); parent.setLayoutData(data); // subsection 1 Group group = new Group(parent, SWT.SHADOW_NONE); group.setText(label); data = new GridData(SWT.FILL, SWT.FILL, true, true); group.setLayoutData(data); GridLayout layout = new GridLayout(); layout.numColumns = 3; group.setLayout(layout); // subsection 2 Group group2 = new Group(parent, SWT.SHADOW_NONE); group2.setText(label2); data = new GridData(SWT.FILL, SWT.FILL, true, true); group2.setLayoutData(data); GridLayout layout2 = new GridLayout(); layout2.numColumns = 3; group2.setLayout(layout2); // subsection 3 Group group3 = new Group(parent, SWT.SHADOW_NONE); group3.setText(label3); data = new GridData(SWT.FILL, SWT.FILL, true, true); group3.setLayoutData(data); GridLayout layout3 = new GridLayout(); layout3.numColumns = 2; group3.setLayout(layout3); return new Group[] { group, group2, group3 }; } /* * (non-Javadoc) * * @see org.eclipse.jface.dialogs.Dialog#okPressed() */ protected void okPressed() { // TODO: move to preference manager Preferences prefs = XDebugPreferenceMgr.getPreferences(); // general prefs.setValue(XDebugPreferenceMgr.XDEBUG_PREF_PORT, portTextBox .getText()); prefs.setValue(XDebugPreferenceMgr.XDEBUG_PREF_SHOWSUPERGLOBALS, showGlobals.getSelection()); prefs.setValue(XDebugPreferenceMgr.XDEBUG_PREF_ARRAYDEPTH, variableDepth.getSelection()); prefs.setValue(XDebugPreferenceMgr.XDEBUG_PREF_CHILDREN, maxChildren .getSelection()); prefs.setValue(XDebugPreferenceMgr.XDEBUG_PREF_MULTISESSION, useMultiSession.getSelection()); prefs.setValue(XDebugPreferenceMgr.XDEBUG_PREF_REMOTESESSION, acceptRemoteSession.getSelectionIndex()); // capture output prefs.setValue(XDebugPreferenceMgr.XDEBUG_PREF_CAPTURESTDOUT, captureStdout.getSelectionIndex()); prefs.setValue(XDebugPreferenceMgr.XDEBUG_PREF_CAPTURESTDERR, captureStderr.getSelectionIndex()); // proxy prefs.setValue(XDebugPreferenceMgr.XDEBUG_PREF_USEPROXY, useProxy .getSelection()); prefs.setValue(XDebugPreferenceMgr.XDEBUG_PREF_IDEKEY, idekeyTextBox .getText()); prefs.setValue(XDebugPreferenceMgr.XDEBUG_PREF_PROXY, proxyTextBox .getText()); DBGpProxyHandler.instance.configure(); PHPDebugPlugin.getDefault().savePluginPreferences(); // save super.okPressed(); } // Initialize the dialog's values. private void internalInitializeValues() { // TODO: move to preference manager Preferences prefs = XDebugPreferenceMgr.getPreferences(); int port = prefs.getInt(XDebugPreferenceMgr.XDEBUG_PREF_PORT); if (0 == port) { XDebugPreferenceMgr.setDefaults(); port = prefs.getInt(XDebugPreferenceMgr.XDEBUG_PREF_PORT); } portTextBox.setText(Integer.toString(port)); showGlobals.setSelection(prefs .getBoolean(XDebugPreferenceMgr.XDEBUG_PREF_SHOWSUPERGLOBALS)); useMultiSession.setSelection(prefs .getBoolean(XDebugPreferenceMgr.XDEBUG_PREF_MULTISESSION)); variableDepth.setSelection(prefs .getInt(XDebugPreferenceMgr.XDEBUG_PREF_ARRAYDEPTH)); maxChildren.setSelection(prefs .getInt(XDebugPreferenceMgr.XDEBUG_PREF_CHILDREN)); acceptRemoteSession.select(prefs .getInt(XDebugPreferenceMgr.XDEBUG_PREF_REMOTESESSION)); // capture output captureStdout.select(prefs .getInt(XDebugPreferenceMgr.XDEBUG_PREF_CAPTURESTDOUT)); captureStderr.select(prefs .getInt(XDebugPreferenceMgr.XDEBUG_PREF_CAPTURESTDERR)); // proxy defaults boolean useProxyState = prefs .getBoolean(XDebugPreferenceMgr.XDEBUG_PREF_USEPROXY); useProxy.setSelection(useProxyState); String ideKey = prefs.getString(XDebugPreferenceMgr.XDEBUG_PREF_IDEKEY); if (ideKey == null || ideKey.length() == 0) { ideKey = DBGpProxyHandler.instance.generateIDEKey(); } idekeyTextBox.setText(ideKey); proxyTextBox.setText(prefs .getString(XDebugPreferenceMgr.XDEBUG_PREF_PROXY)); toggleProxyFields(useProxyState); } /** * add a spinner control * * @param parent * @param key * @param min * @param max * @param horizontalIndent * @return */ private Spinner addVariableLevel(Composite parent, String key, int min, int max, int horizontalIndent) { Spinner spin = new Spinner(parent, SWT.VERTICAL); spin.setData(key); spin.setMinimum(min); spin.setMaximum(max); GridData data = new GridData(); data.horizontalIndent = horizontalIndent; data.horizontalSpan = 2; spin.setLayoutData(data); spin.setIncrement(1); spin.setPageIncrement(3); return spin; } class ComboListener implements SelectionListener { public void widgetDefaultSelected(SelectionEvent e) { // TODO Auto-generated method stub Object source = e.getSource(); if (source == acceptRemoteSession) { } else if (source == captureStdout) { } else if (source == captureStderr) { } } public void widgetSelected(SelectionEvent e) { // TODO Auto-generated method stub Object source = e.getSource(); if (source == acceptRemoteSession) { } else if (source == captureStdout) { } else if (source == captureStderr) { } } } /** * numeric field validator class * */ class NumFieldValidateListener implements ModifyListener { private boolean timeoutField; NumFieldValidateListener(boolean isTimeout) { timeoutField = isTimeout; } public void modifyText(ModifyEvent e) { String errorMessage = null; boolean valid = true; String value = ((Text) e.widget).getText(); try { Integer iValue = new Integer(value); int i = iValue.intValue(); if (!timeoutField) { if (i <= 0 || i > 65535) { valid = false; errorMessage = PHPDebugCoreMessages.XDebugConfigurationDialog_invalidPortRange; } } else { if (i < 10 || i > 100000) { valid = false; errorMessage = PHPDebugCoreMessages.XDebugConfigurationDialog_invalidTimeout; } } } catch (NumberFormatException e1) { valid = false; if (!timeoutField) { errorMessage = PHPDebugCoreMessages.DebuggerConfigurationDialog_invalidPort; } else { errorMessage = PHPDebugCoreMessages.XDebugConfigurationDialog_invalidTimeoutValue; } } setErrorMessage(errorMessage); Button bt = getButton(IDialogConstants.OK_ID); if (bt != null) { bt.setEnabled(valid); } } } }
/* Copyright 2011-2012 Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.opera.core.systems; import com.google.common.hash.Hashing; import com.google.common.io.Files; import com.opera.core.systems.model.ScreenCaptureReply; import com.opera.core.systems.runner.OperaRunnerException; import com.opera.core.systems.runner.launcher.OperaLauncherRunner; import com.opera.core.systems.testing.Ignore; import com.opera.core.systems.testing.NoDriver; import com.opera.core.systems.testing.OperaDriverTestCase; import com.opera.core.systems.testing.drivers.TestDriverBuilder; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.openqa.selenium.Platform; import org.openqa.selenium.net.PortProber; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.security.NoSuchAlgorithmException; import java.util.Arrays; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import static com.opera.core.systems.OperaProduct.DESKTOP; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.junit.matchers.JUnitMatchers.containsString; import static org.junit.matchers.JUnitMatchers.hasItem; @NoDriver public class OperaLauncherRunnerTest extends OperaDriverTestCase { public static final String OLD_OPERA_PATH = System.getenv(OperaBinary.OPERA_PATH_ENV_VAR); public Logger logger = Logger.getLogger(getClass().getName()); public OperaSettings settings; public TestOperaLauncherRunner runner; @Before public void beforeEach() { settings = new OperaSettings(); settings.setRunner(OperaLauncherRunner.class); settings.setBinary(TestDriverBuilder.detect().getSettings().getBinary()); settings.logging().setLevel(Level.FINE); } @After public void afterEach() { try { if (runner != null && runner.isOperaRunning()) { runner.stopOpera(); assertFalse(runner.isOperaRunning()); } } catch (Exception e) { logger.warning("Got exception while attempting to stop Opera: " + e); } finally { if (runner != null) { runner.shutdown(); } runner = null; settings = null; } } @After public void resetEnvironmentPath() { environment.set(OperaBinary.OPERA_PATH_ENV_VAR, OLD_OPERA_PATH); } @Test public void constructor() { runner = new TestOperaLauncherRunner(); assertNotNull(runner); } @Test public void constructorWithSettingsBinary() { settings.setBinary(OperaBinary.find()); runner = new TestOperaLauncherRunner(settings); assertNotNull(runner); } @Test public void verifyDefaultStateOfOperaRunning() { runner = new TestOperaLauncherRunner(settings); assertFalse(runner.isOperaRunning()); } @Test @Ignore public void launcherInDefaultLocationIsOverwritten() throws IOException, NoSuchAlgorithmException { OperaDriverTestCase.removeDriver(); File outdatedLauncher = resources.executableBinary(); try { Files.copy(outdatedLauncher, OperaLauncherRunner.LAUNCHER_DEFAULT_LOCATION); } catch (FileNotFoundException e) { fail("Opera instance from previous was not shutdown, and leaked over into this test: " + e.getMessage()); } try { runner = new TestOperaLauncherRunner(settings); assertFalse("launcher should have been replaced by extracted launcher", Arrays.equals(md5(outdatedLauncher), md5(OperaLauncherRunner.LAUNCHER_DEFAULT_LOCATION))); } catch (OperaRunnerException e) { if (e.getMessage().contains("Timeout")) { fail("launcher was not replaced"); } } } @Test public void defaultProductIsDesktop() { assertEquals(OperaProduct.DESKTOP, settings.getProduct()); } @Test public void unableToFindProductForGogi() { environment.unset(OperaBinary.OPERA_PATH_ENV_VAR); settings.setBinary(null); settings.setProduct(OperaProduct.CORE); try { runner = new TestOperaLauncherRunner(settings); fail("Expected exception"); } catch (RuntimeException e) { assertThat(e, is(instanceOf(OperaRunnerException.class))); assertThat(e.getMessage(), containsString("Unable to find executable for product " + OperaProduct.CORE.toString())); } } @Test public void profileArgumentNotSetIfProductIsAll() { settings.setProduct(OperaProduct.ALL); assertThat(TestOperaLauncherRunner.buildArguments(settings), hasItem(not("-profile"))); } @Test public void profileArgumentSetIfProductIsSpecified() { OperaProduct product = OperaProduct.DESKTOP; settings.setProduct(product); List<String> arguments = TestOperaLauncherRunner.buildArguments(settings); assertThat(arguments, hasItem("-profile")); assertThat(arguments, hasItem(product.getDescriptionString())); } @Test public void profileArgumentNotSetIfProductIsCore() { settings.setProduct(OperaProduct.CORE); settings.setBinary(OperaBinary.find(OperaProduct.ALL)); assertThat(TestOperaLauncherRunner.buildArguments(settings), hasItem(not("-profile"))); } @Test public void testDefaultCrashedState() { runner = new TestOperaLauncherRunner(settings); assertFalse(runner.hasOperaCrashed()); } @Test public void testDefaultIsOperaRunning() { runner = new TestOperaLauncherRunner(settings); assertFalse(runner.isOperaRunning()); } @Test public void testStartAndStopOpera() { runner = new TestOperaLauncherRunner(settings); runner.startOpera(); assertTrue(runner.isOperaRunning()); runner.stopOpera(); assertFalse(runner.isOperaRunning()); } @Test public void startAfterShutdownShouldThrow() { runner = new TestOperaLauncherRunner(settings); runner.startOpera(); assertTrue(runner.isOperaRunning()); runner.shutdown(); assertFalse(runner.isOperaRunning()); try { runner.startOpera(); fail("Expected OperaRunnerException"); } catch (RuntimeException e) { assertThat(e, is(instanceOf(OperaRunnerException.class))); assertThat(e.getMessage(), containsString("launcher was shutdown")); } } @Test(expected = OperaRunnerException.class) public void stopAfterShutdownShouldThrow() { runner = new TestOperaLauncherRunner(settings); runner.shutdown(); runner.stopOpera(); } @Test public void shutdownShouldNotThrow() { runner = new TestOperaLauncherRunner(settings); runner.shutdown(); } @Test public void shutdownTwiceShouldNotThrow() { runner = new TestOperaLauncherRunner(settings); runner.shutdown(); runner.shutdown(); } @Test public void constructorWithSettingsArguments() { runner = new TestOperaLauncherRunner(settings); runner.startOpera(); assertTrue(runner.isOperaRunning()); } @Test @Ignore(products = DESKTOP, value = "mzajaczkowski_watir_1_cleaned contains fix for this") public void testStartAndStopOperaTenTimes() { runner = new TestOperaLauncherRunner(settings); for (int i = 0; i < 10; i++) { runner.startOpera(); assertTrue(runner.isOperaRunning()); runner.stopOpera(); assertFalse(runner.isOperaRunning()); } runner.shutdown(); } @Test public void badLauncherShouldThrow() throws IOException { assertTrue("Imposter launcher exists", resources.executableBinary().exists()); settings.setLauncher(resources.executableBinary()); try { runner = new TestOperaLauncherRunner(settings); fail("Expected OperaRunnerException"); } catch (RuntimeException e) { assertThat(e, is(instanceOf(OperaRunnerException.class))); if (Platform.getCurrent().is(Platform.WINDOWS)) { assertThat(e.getMessage(), containsString("exited immediately")); } else { assertThat(e.getMessage(), containsString("Timeout waiting for launcher to connect")); } } } @Test public void isOperaRunning() { runner = new TestOperaLauncherRunner(settings); assertFalse(runner.isOperaRunning()); runner.startOpera(); assertTrue(runner.isOperaRunning()); } @Test public void isOperaRunningShouldNotThrowAfterShutdown() { runner = new TestOperaLauncherRunner(settings); runner.shutdown(); assertFalse(runner.isOperaRunning()); } @Test // TODO(andreastt): Trigger something which actually generates a crashlog public void testGetOperaDefaultCrashlog() { runner = new TestOperaLauncherRunner(settings); runner.startOpera(); String crashlog = runner.getOperaCrashlog(); assertNull(crashlog); } @Test public void saveScreenshot() { runner = new TestOperaLauncherRunner(settings); ScreenCaptureReply screenshot = runner.captureScreen(); assertNotNull(screenshot); } @Test public void saveScreenshotAfterShutdownShouldThrow() { runner = new TestOperaLauncherRunner(settings); runner.shutdown(); try { runner.captureScreen(); fail("Expected OperaRunnerException"); } catch (RuntimeException e) { assertThat(e, is(instanceOf(OperaRunnerException.class))); assertThat(e.getMessage(), containsString("launcher was shutdown")); } } @Test public void displayArgumentDoesContainColon() { int display = 42; settings.setDisplay(display); List<String> arguments = TestOperaLauncherRunner.buildArguments(settings); String displayArgument = null; String displayArgumentValue = null; for (int i = 0; i < arguments.size(); ++i) { String argument = arguments.get(i); if (argument.startsWith("-display")) { displayArgument = argument; displayArgumentValue = arguments.get(i + 1); } } if (displayArgument == null) { fail("List of launcher arguments did not contain -display"); } assertThat(displayArgument, containsString("display")); assertThat(displayArgumentValue, containsString(":")); assertThat(displayArgumentValue, containsString(String.valueOf(display))); } @Test public void testLoggingLevel() { assertEquals(Level.SEVERE, TestOperaLauncherRunner.toLauncherLoggingLevel(Level.SEVERE)); } @Test public void testLoggingLevelToAll() { assertEquals(Level.FINEST, TestOperaLauncherRunner.toLauncherLoggingLevel(Level.ALL)); } @Test public void testLoggingLevelToConfig() { assertEquals(Level.FINE, TestOperaLauncherRunner.toLauncherLoggingLevel(Level.CONFIG)); } @Test public void testLoggingLevelToFiner() { assertEquals(Level.FINE, TestOperaLauncherRunner.toLauncherLoggingLevel(Level.FINER)); } @Test public void testLoggingLevelToOff() { assertEquals(Level.OFF, TestOperaLauncherRunner.toLauncherLoggingLevel(Level.OFF)); } /** * Get the MD5 hash of the given file. * * @param file file to compute a hash on * @return a byte array of the MD5 hash * @throws IOException if file cannot be found * @throws NoSuchAlgorithmException if MD5 is not available */ private static byte[] md5(File file) throws NoSuchAlgorithmException, IOException { return Files.hash(file, Hashing.md5()).asBytes(); } private static class TestOperaLauncherRunner extends OperaLauncherRunner { public TestOperaLauncherRunner() { super(); } public TestOperaLauncherRunner(OperaSettings settings) { super(settings); } public static List<String> buildArguments(OperaSettings settings) { return OperaLauncherRunner.buildArguments(settings, PortProber.findFreePort()); } public static Level toLauncherLoggingLevel(Level javaLevel) { return OperaLauncherRunner.toLauncherLoggingLevel(javaLevel); } } }
package lightningmats.entity.chest; import cpw.mods.fml.relauncher.Side; import cpw.mods.fml.relauncher.SideOnly; import java.util.Iterator; import java.util.Random; import net.minecraft.block.Block; import net.minecraft.block.BlockContainer; import net.minecraft.block.material.Material; import net.minecraft.client.renderer.texture.IIconRegister; import net.minecraft.creativetab.CreativeTabs; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityLivingBase; import net.minecraft.entity.item.EntityItem; import net.minecraft.entity.passive.EntityOcelot; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.inventory.Container; import net.minecraft.inventory.IInventory; import net.minecraft.inventory.InventoryLargeChest; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.tileentity.TileEntity; import net.minecraft.tileentity.TileEntityChest; import net.minecraft.util.AxisAlignedBB; import net.minecraft.util.MathHelper; import net.minecraft.world.IBlockAccess; import net.minecraft.world.World; import static net.minecraftforge.common.util.ForgeDirection.*; public class LightningChest extends BlockContainer { private final Random field_149955_b = new Random(); public final int field_149956_a; private static final String __OBFID = "CL_00000214"; public LightningChest(int p_i45397_1_) { super(Material.wood); this.field_149956_a = p_i45397_1_; this.setCreativeTab(CreativeTabs.tabDecorations); this.setBlockBounds(0.0625F, 0.0F, 0.0625F, 0.9375F, 0.875F, 0.9375F); } /** * Is this block (a) opaque and (b) a full 1m cube? This determines whether or not to render the shared face of two * adjacent blocks and also whether the player can attach torches, redstone wire, etc to this block. */ public boolean isOpaqueCube() { return false; } /** * If this block doesn't render as an ordinary block it will return False (examples: signs, buttons, stairs, etc) */ public boolean renderAsNormalBlock() { return false; } /** * The type of render function that is called for this block */ public int getRenderType() { return 22; } /** * Updates the blocks bounds based on its current state. Args: world, x, y, z */ public void setBlockBoundsBasedOnState(IBlockAccess p_149719_1_, int p_149719_2_, int p_149719_3_, int p_149719_4_) { this.setBlockBounds(0.0625F, 0.0F, 0.0625F, 0.9375F, 0.875F, 0.9375F); } /** * Called whenever the block is added into the world. Args: world, x, y, z */ public void onBlockAdded(World p_149726_1_, int p_149726_2_, int p_149726_3_, int p_149726_4_) { super.onBlockAdded(p_149726_1_, p_149726_2_, p_149726_3_, p_149726_4_); this.func_149954_e(p_149726_1_, p_149726_2_, p_149726_3_, p_149726_4_); Block block = p_149726_1_.getBlock(p_149726_2_, p_149726_3_, p_149726_4_ - 1); Block block1 = p_149726_1_.getBlock(p_149726_2_, p_149726_3_, p_149726_4_ + 1); Block block2 = p_149726_1_.getBlock(p_149726_2_ - 1, p_149726_3_, p_149726_4_); Block block3 = p_149726_1_.getBlock(p_149726_2_ + 1, p_149726_3_, p_149726_4_); } /** * Called when the block is placed in the world. */ public void onBlockPlacedBy(World p_149689_1_, int p_149689_2_, int p_149689_3_, int p_149689_4_, EntityLivingBase p_149689_5_, ItemStack p_149689_6_) { Block block = p_149689_1_.getBlock(p_149689_2_, p_149689_3_, p_149689_4_ - 1); Block block1 = p_149689_1_.getBlock(p_149689_2_, p_149689_3_, p_149689_4_ + 1); Block block2 = p_149689_1_.getBlock(p_149689_2_ - 1, p_149689_3_, p_149689_4_); Block block3 = p_149689_1_.getBlock(p_149689_2_ + 1, p_149689_3_, p_149689_4_); byte b0 = 0; int l = MathHelper.floor_double((double)(p_149689_5_.rotationYaw * 4.0F / 360.0F) + 0.5D) & 3; if (l == 0) { b0 = 2; } if (l == 1) { b0 = 5; } if (l == 2) { b0 = 3; } if (l == 3) { b0 = 4; } if (p_149689_6_.hasDisplayName()) { ((TileEntityLightningChest)p_149689_1_.getTileEntity(p_149689_2_, p_149689_3_, p_149689_4_)).func_145976_a(p_149689_6_.getDisplayName()); } } public void func_149954_e(World p_149954_1_, int p_149954_2_, int p_149954_3_, int p_149954_4_) { if (!p_149954_1_.isRemote) { Block block = p_149954_1_.getBlock(p_149954_2_, p_149954_3_, p_149954_4_ - 1); Block block1 = p_149954_1_.getBlock(p_149954_2_, p_149954_3_, p_149954_4_ + 1); Block block2 = p_149954_1_.getBlock(p_149954_2_ - 1, p_149954_3_, p_149954_4_); Block block3 = p_149954_1_.getBlock(p_149954_2_ + 1, p_149954_3_, p_149954_4_); boolean flag = true; int l; Block block4; int i1; Block block5; boolean flag1; byte b0; int j1; if (block != this && block1 != this) { if (block2 != this && block3 != this) { b0 = 3; if (block.func_149730_j() && !block1.func_149730_j()) { b0 = 3; } if (block1.func_149730_j() && !block.func_149730_j()) { b0 = 2; } if (block2.func_149730_j() && !block3.func_149730_j()) { b0 = 5; } if (block3.func_149730_j() && !block2.func_149730_j()) { b0 = 4; } } else { l = block2 == this ? p_149954_2_ - 1 : p_149954_2_ + 1; block4 = p_149954_1_.getBlock(l, p_149954_3_, p_149954_4_ - 1); i1 = block2 == this ? p_149954_2_ - 1 : p_149954_2_ + 1; block5 = p_149954_1_.getBlock(i1, p_149954_3_, p_149954_4_ + 1); b0 = 3; flag1 = true; if (block2 == this) { j1 = p_149954_1_.getBlockMetadata(p_149954_2_ - 1, p_149954_3_, p_149954_4_); } else { j1 = p_149954_1_.getBlockMetadata(p_149954_2_ + 1, p_149954_3_, p_149954_4_); } if (j1 == 2) { b0 = 2; } if ((block.func_149730_j() || block4.func_149730_j()) && !block1.func_149730_j() && !block5.func_149730_j()) { b0 = 3; } if ((block1.func_149730_j() || block5.func_149730_j()) && !block.func_149730_j() && !block4.func_149730_j()) { b0 = 2; } } } else { l = block == this ? p_149954_4_ - 1 : p_149954_4_ + 1; block4 = p_149954_1_.getBlock(p_149954_2_ - 1, p_149954_3_, l); i1 = block == this ? p_149954_4_ - 1 : p_149954_4_ + 1; block5 = p_149954_1_.getBlock(p_149954_2_ + 1, p_149954_3_, i1); b0 = 5; flag1 = true; if (block == this) { j1 = p_149954_1_.getBlockMetadata(p_149954_2_, p_149954_3_, p_149954_4_ - 1); } else { j1 = p_149954_1_.getBlockMetadata(p_149954_2_, p_149954_3_, p_149954_4_ + 1); } if (j1 == 4) { b0 = 4; } if ((block2.func_149730_j() || block4.func_149730_j()) && !block3.func_149730_j() && !block5.func_149730_j()) { b0 = 5; } if ((block3.func_149730_j() || block5.func_149730_j()) && !block2.func_149730_j() && !block4.func_149730_j()) { b0 = 4; } } p_149954_1_.setBlockMetadataWithNotify(p_149954_2_, p_149954_3_, p_149954_4_, b0, 3); } } /** * Checks to see if its valid to put this block at the specified coordinates. Args: world, x, y, z */ private boolean func_149952_n(World p_149952_1_, int p_149952_2_, int p_149952_3_, int p_149952_4_) { return p_149952_1_.getBlock(p_149952_2_, p_149952_3_, p_149952_4_) != this ? false : (p_149952_1_.getBlock(p_149952_2_ - 1, p_149952_3_, p_149952_4_) == this ? true : (p_149952_1_.getBlock(p_149952_2_ + 1, p_149952_3_, p_149952_4_) == this ? true : (p_149952_1_.getBlock(p_149952_2_, p_149952_3_, p_149952_4_ - 1) == this ? true : p_149952_1_.getBlock(p_149952_2_, p_149952_3_, p_149952_4_ + 1) == this))); } /** * Lets the block know when one of its neighbor changes. Doesn't know which neighbor changed (coordinates passed are * their own) Args: x, y, z, neighbor Block */ public void onNeighborBlockChange(World p_149695_1_, int p_149695_2_, int p_149695_3_, int p_149695_4_, Block p_149695_5_) { super.onNeighborBlockChange(p_149695_1_, p_149695_2_, p_149695_3_, p_149695_4_, p_149695_5_); TileEntityLightningChest TileEntityChest1 = (TileEntityLightningChest)p_149695_1_.getTileEntity(p_149695_2_, p_149695_3_, p_149695_4_); if (TileEntityChest1 != null) { TileEntityChest1.updateContainingBlockInfo(); } } public void breakBlock(World p_149749_1_, int p_149749_2_, int p_149749_3_, int p_149749_4_, Block p_149749_5_, int p_149749_6_) { TileEntityLightningChest TileEntityChest1 = (TileEntityLightningChest)p_149749_1_.getTileEntity(p_149749_2_, p_149749_3_, p_149749_4_); if (TileEntityChest1 != null) { for (int i1 = 0; i1 < TileEntityChest1.getSizeInventory(); ++i1) { ItemStack itemstack = TileEntityChest1.getStackInSlot(i1); if (itemstack != null) { float f = this.field_149955_b.nextFloat() * 0.8F + 0.1F; float f1 = this.field_149955_b.nextFloat() * 0.8F + 0.1F; EntityItem entityitem; for (float f2 = this.field_149955_b.nextFloat() * 0.8F + 0.1F; itemstack.stackSize > 0; p_149749_1_.spawnEntityInWorld(entityitem)) { int j1 = this.field_149955_b.nextInt(21) + 10; if (j1 > itemstack.stackSize) { j1 = itemstack.stackSize; } itemstack.stackSize -= j1; entityitem = new EntityItem(p_149749_1_, (double)((float)p_149749_2_ + f), (double)((float)p_149749_3_ + f1), (double)((float)p_149749_4_ + f2), new ItemStack(itemstack.getItem(), j1, itemstack.getItemDamage())); float f3 = 0.05F; entityitem.motionX = (double)((float)this.field_149955_b.nextGaussian() * f3); entityitem.motionY = (double)((float)this.field_149955_b.nextGaussian() * f3 + 0.2F); entityitem.motionZ = (double)((float)this.field_149955_b.nextGaussian() * f3); if (itemstack.hasTagCompound()) { entityitem.getEntityItem().setTagCompound((NBTTagCompound)itemstack.getTagCompound().copy()); } } } } p_149749_1_.func_147453_f(p_149749_2_, p_149749_3_, p_149749_4_, p_149749_5_); } super.breakBlock(p_149749_1_, p_149749_2_, p_149749_3_, p_149749_4_, p_149749_5_, p_149749_6_); } /** * Called upon block activation (right click on the block.) */ public boolean onBlockActivated(World p_149727_1_, int p_149727_2_, int p_149727_3_, int p_149727_4_, EntityPlayer p_149727_5_, int p_149727_6_, float p_149727_7_, float p_149727_8_, float p_149727_9_) { if (p_149727_1_.isRemote) { return true; } else { IInventory iinventory = this.func_149951_m(p_149727_1_, p_149727_2_, p_149727_3_, p_149727_4_); if (iinventory != null) { p_149727_5_.displayGUIChest(iinventory); } return true; } } public IInventory func_149951_m(World p_149951_1_, int p_149951_2_, int p_149951_3_, int p_149951_4_) { Object object = (TileEntityLightningChest)p_149951_1_.getTileEntity(p_149951_2_, p_149951_3_, p_149951_4_); if (object == null) { return null; } else if (p_149951_1_.isSideSolid(p_149951_2_, p_149951_3_ + 1, p_149951_4_, DOWN)) { return null; } else if (func_149953_o(p_149951_1_, p_149951_2_, p_149951_3_, p_149951_4_)) { return null; } else if (p_149951_1_.getBlock(p_149951_2_ - 1, p_149951_3_, p_149951_4_) == this && (p_149951_1_.isSideSolid(p_149951_2_ - 1, p_149951_3_ + 1, p_149951_4_, DOWN) || func_149953_o(p_149951_1_, p_149951_2_ - 1, p_149951_3_, p_149951_4_))) { return null; } else if (p_149951_1_.getBlock(p_149951_2_ + 1, p_149951_3_, p_149951_4_) == this && (p_149951_1_.isSideSolid(p_149951_2_ + 1, p_149951_3_ + 1, p_149951_4_, DOWN) || func_149953_o(p_149951_1_, p_149951_2_ + 1, p_149951_3_, p_149951_4_))) { return null; } else if (p_149951_1_.getBlock(p_149951_2_, p_149951_3_, p_149951_4_ - 1) == this && (p_149951_1_.isSideSolid(p_149951_2_, p_149951_3_ + 1, p_149951_4_ - 1, DOWN) || func_149953_o(p_149951_1_, p_149951_2_, p_149951_3_, p_149951_4_ - 1))) { return null; } else if (p_149951_1_.getBlock(p_149951_2_, p_149951_3_, p_149951_4_ + 1) == this && (p_149951_1_.isSideSolid(p_149951_2_, p_149951_3_ + 1, p_149951_4_ + 1, DOWN) || func_149953_o(p_149951_1_, p_149951_2_, p_149951_3_, p_149951_4_ + 1))) { return null; } return (IInventory)object; } /** * Returns a new instance of a block's tile entity class. Called on placing the block. */ public TileEntity createNewTileEntity(World p_149915_1_, int p_149915_2_) { TileEntityLightningChest TileEntityChest1 = new TileEntityLightningChest(); return TileEntityChest1; } /** * Can this block provide power. Only wire currently seems to have this change based on its state. */ public boolean canProvidePower() { return this.field_149956_a == 1; } public int isProvidingWeakPower(IBlockAccess p_149709_1_, int p_149709_2_, int p_149709_3_, int p_149709_4_, int p_149709_5_) { if (!this.canProvidePower()) { return 0; } else { int i1 = ((TileEntityLightningChest)p_149709_1_.getTileEntity(p_149709_2_, p_149709_3_, p_149709_4_)).numPlayersUsing; return MathHelper.clamp_int(i1, 0, 15); } } public int isProvidingStrongPower(IBlockAccess p_149748_1_, int p_149748_2_, int p_149748_3_, int p_149748_4_, int p_149748_5_) { return p_149748_5_ == 1 ? this.isProvidingWeakPower(p_149748_1_, p_149748_2_, p_149748_3_, p_149748_4_, p_149748_5_) : 0; } private static boolean func_149953_o(World p_149953_0_, int p_149953_1_, int p_149953_2_, int p_149953_3_) { Iterator iterator = p_149953_0_.getEntitiesWithinAABB(EntityOcelot.class, AxisAlignedBB.getBoundingBox((double)p_149953_1_, (double)(p_149953_2_ + 1), (double)p_149953_3_, (double)(p_149953_1_ + 1), (double)(p_149953_2_ + 2), (double)(p_149953_3_ + 1))).iterator(); EntityOcelot entityocelot; do { if (!iterator.hasNext()) { return false; } Entity entity = (Entity)iterator.next(); entityocelot = (EntityOcelot)entity; } while (!entityocelot.isSitting()); return true; } /** * If this returns true, then comparators facing away from this block will use the value from * getComparatorInputOverride instead of the actual redstone signal strength. */ public boolean hasComparatorInputOverride() { return true; } /** * If hasComparatorInputOverride returns true, the return value from this is used instead of the redstone signal * strength when this block inputs to a comparator. */ public int getComparatorInputOverride(World p_149736_1_, int p_149736_2_, int p_149736_3_, int p_149736_4_, int p_149736_5_) { return Container.calcRedstoneFromInventory(this.func_149951_m(p_149736_1_, p_149736_2_, p_149736_3_, p_149736_4_)); } @SideOnly(Side.CLIENT) public void registerBlockIcons(IIconRegister p_149651_1_) { this.blockIcon = p_149651_1_.registerIcon("planks_oak"); } }
// HTMLParser Library $Name: v1_5 $ - A java-based parser for HTML // http://sourceforge.org/projects/htmlparser // Copyright (C) 2004 Derrick Oswald // // Revision Control Information // // $Source: /cvsroot/htmlparser/htmlparser/src/org/htmlparser/scanners/ScriptDecoder.java,v $ // $Author: derrickoswald $ // $Date: 2005/05/15 11:49:04 $ // $Revision: 1.4 $ // // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public // License as published by the Free Software Foundation; either // version 2.1 of the License, or (at your option) any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public // License along with this library; if not, write to the Free Software // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA // package org.htmlparser.scanners; import org.htmlparser.lexer.Cursor; import org.htmlparser.lexer.Page; import org.htmlparser.util.ParserException; /** * Decode script. * Script obfuscated by the <A href="http://www.microsoft.com/downloads/details.aspx?FamilyId=E7877F67-C447-4873-B1B0-21F0626A6329&displaylang=en" target="_parent">Windows Script Encoder</A> * provided by Microsoft, is converted to plaintext. This code is based loosely * on example code provided by MrBrownstone with changes by Joe Steele, see * <A href="http://www.virtualconspiracy.com/download/scrdec14.c" target="_parent">scrdec14.c</A>. */ public class ScriptDecoder { /** * Termination state. */ public static final int STATE_DONE = 0; /** * State on entry. */ public static final int STATE_INITIAL = 1; /** * State while reading the encoded length. */ protected static final int STATE_LENGTH = 2; /** * State when reading up to decoded text. */ protected static final int STATE_PREFIX = 3; /** * State while decoding. */ protected static final int STATE_DECODE = 4; /** * State when reading an escape sequence. */ protected static final int STATE_ESCAPE = 5; /** * State when reading the checksum. */ protected static final int STATE_CHECKSUM = 6; /** * State while exiting. */ protected static final int STATE_FINAL = 7; /** * The state to enter when decrypting is complete. * If this is STATE_DONE, the decryption will return with any characters * following the encoded text still unconsumed. * Otherwise, if this is STATE_INITIAL, the input will be exhausted and * all following characters will be contained in the return value * of the <code>Decode()</code> method. */ public static int LAST_STATE = STATE_DONE; /** * Table of lookup choice. * The decoding cycles between three flavours determined * by this sequence of 64 choices, corresponding to the * first dimension of the lookup table. */ protected static byte mEncodingIndex[] = { 1, 2, 0, 1, 2, 0, 2, 0, 0, 2, 0, 2, 1, 0, 2, 0, 1, 0, 2, 0, 1, 1, 2, 0, 0, 2, 1, 0, 2, 0, 0, 2, 1, 1, 0, 2, 0, 2, 0, 1, 0, 1, 1, 2, 0, 1, 0, 2, 1, 0, 2, 0, 1, 1, 2, 0, 0, 1, 1, 2, 0, 1, 0, 2, }; /** * Two dimensional lookup table. * The decoding uses this table to determine the plaintext for * characters that aren't mEscaped. */ protected static char mLookupTable[][] = { { '{', '2', '0', '!', ')', '[', '8', '3', '=', 'X', ':', '5', 'e', '9', '\\', 'V', 's', 'f', 'N', 'E', 'k', 'b', 'Y', 'x', '^', '}', 'J', 'm', 'q', 0, '`', 0, 'S', 0, 'B', '\'', 'H', 'r', 'u', '1', '7', 'M', 'R', '"', 'T', 'j', 'G', 'd', '-', ' ', '', '.', 'L', ']', '~', 'l', 'o', 'y', 't', 'C', '&', 'v', '%', '$', '+', '(', '#', 'A', '4', '\t', '*', 'D', '?', 'w', ';', 'U', 'i', 'a', 'c', 'P', 'g', 'Q', 'I', 'O', 'F', 'h', '|', '6', 'p', 'n', 'z', '/', '_', 'K', 'Z', ',', 'W', }, { 'W', '.', 'G', 'z', 'V', 'B', 'j', '/', '&', 'I', 'A', '4', '2', '[', 'v', 'r', 'C', '8', '9', 'p', 'E', 'h', 'q', 'O', '\t', 'b', 'D', '#', 'u', 0, '~', 0, '^', 0, 'w', 'J', 'a', ']', '"', 'K', 'o', 'N', ';', 'L', 'P', 'g', '*', '}', 't', 'T', '+', '-', ',', '0', 'n', 'k', 'f', '5', '%', '!', 'd', 'M', 'R', 'c', '?', '{', 'x', ')', '(', 's', 'Y', '3', '', 'm', 'U', 'S', '|', ':', '_', 'e', 'F', 'X', '1', 'i', 'l', 'Z', 'H', '\'', '\\', '=', '$', 'y', '7', '`', 'Q', ' ', '6', }, { 'n', '-', 'u', 'R', '`', 'q', '^', 'I', '\\', 'b', '}', ')', '6', ' ', '|', 'z', '', 'k', 'c', '3', '+', 'h', 'Q', 'f', 'v', '1', 'd', 'T', 'C', 0, ':', 0, '~', 0, 'E', ',', '*', 't', '\'', '7', 'D', 'y', 'Y', '/', 'o', '&', 'r', 'j', '9', '{', '?', '8', 'w', 'g', 'S', 'G', '4', 'x', ']', '0', '#', 'Z', '[', 'l', 'H', 'U', 'p', 'i', '.', 'L', '!', '$', 'N', 'P', '\t', 'V', 's', '5', 'a', 'K', 'X', ';', 'W', '"', 'm', 'M', '%', '(', 'F', 'J', '2', 'A', '=', '_', 'O', 'B', 'e', }, }; /** * The base 64 decoding table. * This array determines the value of decoded base 64 elements. */ protected static int mDigits[]; static { mDigits = new int[0x7b]; for (int i = 0; i < 26; i++) { mDigits['A' + i] = i; mDigits['a' + i] = i + 26; } for (int i = 0; i < 10; i++) mDigits['0' + i] = i + 52; mDigits[0x2b] = '>'; mDigits[0x2f] = '?'; } /** * The leader. * The prefix to the encoded script is #@~^nnnnnn== where the n are the * length digits in base64. */ protected static char mLeader[] = { '#', '@', '~', '^', }; /** * The prefix. * The prfix separates the encoded text from the length. */ protected static char mPrefix[] = { '=', '=', }; /** * The trailer. * The suffix to the encoded script is nnnnnn==^#~@ where the n are the * checksum digits in base64. These characters are the part after the checksum. */ protected static char mTrailer[] = { '=', '=', '^', '#', '~', '@', }; /** * Escape sequence characters. */ protected static char mEscapes[] = { '#', '&', '!', '*', '$', }; /** * The escaped characters corresponding to the each escape sequence. */ protected static char mEscaped[] = //"\r\n<>@"; { '\r', '\n', '<', '>', '@', }; /** * Extract the base 64 encoded number. * This is a very limited subset of base 64 encoded characters. * Six characters are expected. These are translated into a single long * value. For a more complete base 64 codec see for example the base64 * package of <A href="http://sourceforge.net/projects/iharder/" target="_parent">iHarder.net</A> * @param p Six base 64 encoded digits. * @return The value of the decoded number. */ protected static long decodeBase64 (char[] p) { long ret; ret = 0; ret += (mDigits[p[0]] << 2); ret += (mDigits[p[1]] >> 4); ret += (mDigits[p[1]] & 0xf) << 12; ret += ((mDigits[p[2]] >> 2) << 8); ret += ((mDigits[p[2]] & 0x3) << 22); ret += (mDigits[p[3]] << 16); ret += ((mDigits[p[4]] << 2) << 24); ret += ((mDigits[p[5]] >> 4) << 24); return (ret); } /** * Decode script encoded by the Microsoft obfuscator. * @param page The source for encoded text. * @param cursor The position at which to start decoding. * This is advanced to the end of the encoded text. * @return The plaintext. * @exception ParserException If an error is discovered while decoding. */ public static String Decode (Page page, Cursor cursor) throws ParserException { int state; int substate_initial; int substate_length; int substate_prefix; int substate_checksum; int substate_final; long checksum; long length; char buffer[]; buffer = new char[6]; int index; char character; int input_character; boolean found; StringBuilder ret; ret = new StringBuilder (1024); state = STATE_INITIAL; substate_initial = 0; substate_length = 0; substate_prefix = 0; substate_checksum = 0; substate_final = 0; length = 0L; checksum = 0L; index = 0; while (STATE_DONE != state) { input_character = page.getCharacter (cursor); character = (char)input_character; if (Page.EOF == input_character) { if ( (STATE_INITIAL != state) || (0 != substate_initial) || (0 != substate_length) || (0 != substate_prefix) || (0 != substate_checksum) || (0 != substate_final)) throw new ParserException ("illegal state for exit"); state = STATE_DONE; } else switch (state) { case STATE_INITIAL: if (character == mLeader[substate_initial]) { substate_initial++; if (substate_initial == mLeader.length) { substate_initial = 0; state = STATE_LENGTH; } } else { // oops, flush for (int k = 0; 0 < substate_initial; k++) { ret.append (mLeader[k++]); substate_initial--; } ret.append (character); } break; case STATE_LENGTH: buffer[substate_length] = character; substate_length++; if (substate_length >= buffer.length) { length = decodeBase64 (buffer); if (0 > length) throw new ParserException ("illegal length: " + length); substate_length = 0; state = STATE_PREFIX; } break; case STATE_PREFIX: if (character == mPrefix[substate_prefix]) substate_prefix++; else throw new ParserException ("illegal character encountered: " + (int)character + " ('" + character + "')"); if (substate_prefix >= mPrefix.length) { substate_prefix = 0; state = STATE_DECODE; } break; case STATE_DECODE: if ('@' == character) state = STATE_ESCAPE; else { if (input_character < 0x80) { if (input_character == '\t') input_character = 0; else if (input_character >= ' ') input_character -= ' ' - 1; else throw new ParserException ("illegal encoded character: " + input_character + " ('" + character + "')"); char ch = mLookupTable[mEncodingIndex[index % 64]][input_character]; ret.append (ch); checksum += ch; index++; } else ret.append (character); } length--; if (0 == length) { index = 0; state = STATE_CHECKSUM; } break; case STATE_ESCAPE: found = false; for (int i = 0; i < mEscapes.length; i++) if (character == mEscapes[i]) { found = true; character = mEscaped[i]; } if (!found) throw new ParserException ("unexpected escape character: " + (int)character + " ('" + character + "')"); ret.append (character); checksum += character; index++; state = STATE_DECODE; length--; if (0 == length) { index = 0; state = STATE_CHECKSUM; } break; case STATE_CHECKSUM: buffer[substate_checksum] = character; substate_checksum++; if (substate_checksum >= buffer.length) { long check = decodeBase64 (buffer); if (check != checksum) throw new ParserException ("incorrect checksum, expected " + check + ", calculated " + checksum); checksum = 0; substate_checksum = 0; state = STATE_FINAL; } break; case STATE_FINAL: if (character == mTrailer[substate_final]) substate_final++; else throw new ParserException ("illegal character encountered: " + (int)character + " ('" + character + "')"); if (substate_final >= mTrailer.length) { substate_final = 0; state = LAST_STATE; } break; default: throw new ParserException ("invalid state: " + state); } } return (ret.toString ()); } // /** // * Example mainline for decrypting script. // * Change a file with encrypted script into one without. // * <em>WARNING: This does not preserve DOS type line endings.</em> // * @param args Command line arguments. Two file names, input and output. // * Optionally, the character set to use as a third argument. // * @exception IOException If the input file doesn't exist, or the output // * file cannot be created. // * @exception ParserException If there is a decryption problem. // */ // public static void main (String[] args) // throws // IOException, // ParserException // { // String charset; // FileInputStream in; // Page page; // Cursor cursor; // String string; // int ret; // // if (args.length < 2) // { // System.out.println ("Usage: java org.htmlparser.scanners.ScriptDecoder <infile> <outfile> [charset]"); // ret = 1; // } // else // { // if (2 < args.length) // charset = args[2]; // else // charset = "ISO-8859-1"; // in = new FileInputStream (args[0]); // page = new Page (in, charset); // cursor = new Cursor (page, 0); // ScriptDecoder.LAST_STATE = STATE_INITIAL; // string = ScriptDecoder.Decode (page, cursor); // in.close (); // // FileOutputStream outfile = new FileOutputStream (args[1]); // outfile.write (string.getBytes (charset)); // outfile.close (); // ret = (0 != string.length ()) ? 0 : 1; // } // // System.exit (ret); // } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.cdi.transaction; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ScheduledExecutorService; import javax.transaction.TransactionRolledbackException; import org.apache.camel.AsyncCallback; import org.apache.camel.AsyncProcessor; import org.apache.camel.CamelContext; import org.apache.camel.Exchange; import org.apache.camel.LoggingLevel; import org.apache.camel.Navigate; import org.apache.camel.Processor; import org.apache.camel.RuntimeCamelException; import org.apache.camel.impl.AsyncCallbackToCompletableFutureAdapter; import org.apache.camel.processor.ErrorHandlerSupport; import org.apache.camel.processor.exceptionpolicy.ExceptionPolicyStrategy; import org.apache.camel.spi.ShutdownPrepared; import org.apache.camel.support.ExchangeHelper; import org.apache.camel.util.ObjectHelper; import org.apache.camel.support.ServiceHelper; /** * Does transactional execution according given policy. This class is based on * {@link org.apache.camel.spring.spi.TransactionErrorHandler} excluding * redelivery functionality. In the Spring implementation redelivering is done * within the transaction which is not appropriate in JTA since every error * breaks the current transaction. */ public class TransactionErrorHandler extends ErrorHandlerSupport implements AsyncProcessor, ShutdownPrepared, Navigate<Processor> { protected final Processor output; protected volatile boolean preparingShutdown; private ExceptionPolicyStrategy exceptionPolicy; private JtaTransactionPolicy transactionPolicy; private final String transactionKey; private final LoggingLevel rollbackLoggingLevel; /** * Creates the transaction error handler. * * @param camelContext * the camel context * @param output * outer processor that should use this default error handler * @param exceptionPolicyStrategy * strategy for onException handling * @param transactionPolicy * the transaction policy * @param executorService * the {@link java.util.concurrent.ScheduledExecutorService} to * be used for redelivery thread pool. Can be <tt>null</tt>. * @param rollbackLoggingLevel * logging level to use for logging transaction rollback occurred */ public TransactionErrorHandler(CamelContext camelContext, Processor output, ExceptionPolicyStrategy exceptionPolicyStrategy, JtaTransactionPolicy transactionPolicy, ScheduledExecutorService executorService, LoggingLevel rollbackLoggingLevel) { this.output = output; this.transactionPolicy = transactionPolicy; this.rollbackLoggingLevel = rollbackLoggingLevel; this.transactionKey = ObjectHelper.getIdentityHashCode(transactionPolicy); setExceptionPolicy(exceptionPolicyStrategy); } public void process(Exchange exchange) throws Exception { // we have to run this synchronously as a JTA Transaction does *not* // support using multiple threads to span a transaction if (exchange.getUnitOfWork().isTransactedBy(transactionKey)) { // already transacted by this transaction template // so lets just let the error handler process it processByErrorHandler(exchange); } else { // not yet wrapped in transaction so lets do that // and then have it invoke the error handler from within that // transaction processInTransaction(exchange); } } public boolean process(Exchange exchange, AsyncCallback callback) { // invoke this synchronous method as JTA Transaction does *not* // support using multiple threads to span a transaction try { process(exchange); } catch (Throwable e) { exchange.setException(e); } // notify callback we are done synchronously callback.done(true); return true; } @Override public CompletableFuture<Exchange> processAsync(Exchange exchange) { AsyncCallbackToCompletableFutureAdapter<Exchange> callback = new AsyncCallbackToCompletableFutureAdapter<>(exchange); process(exchange, callback); return callback.getFuture(); } protected void processInTransaction(final Exchange exchange) throws Exception { // is the exchange redelivered, for example JMS brokers support such // details Boolean externalRedelivered = exchange.isExternalRedelivered(); final String redelivered = externalRedelivered != null ? externalRedelivered.toString() : "unknown"; final String ids = ExchangeHelper.logIds(exchange); try { // mark the beginning of this transaction boundary exchange.getUnitOfWork().beginTransactedBy(transactionKey); // do in transaction logTransactionBegin(redelivered, ids); doInTransactionTemplate(exchange); logTransactionCommit(redelivered, ids); } catch (TransactionRolledbackException e) { // do not set as exception, as its just a dummy exception to force // spring TX to rollback logTransactionRollback(redelivered, ids, null, true); } catch (Throwable e) { exchange.setException(e); logTransactionRollback(redelivered, ids, e, false); } finally { // mark the end of this transaction boundary exchange.getUnitOfWork().endTransactedBy(transactionKey); } // if it was a local rollback only then remove its marker so outer // transaction wont see the marker Boolean onlyLast = (Boolean) exchange.removeProperty(Exchange.ROLLBACK_ONLY_LAST); if (onlyLast != null && onlyLast) { // we only want this logged at debug level if (log.isDebugEnabled()) { // log exception if there was a cause exception so we have the // stack trace Exception cause = exchange.getException(); if (cause != null) { log.debug("Transaction rollback ({}) redelivered({}) for {} " + "due exchange was marked for rollbackOnlyLast and caught: ", transactionKey, redelivered, ids, cause); } else { log.debug("Transaction rollback ({}) redelivered({}) for {} " + "due exchange was marked for rollbackOnlyLast", transactionKey, redelivered, ids); } } // remove caused exception due we was marked as rollback only last // so by removing the exception, any outer transaction will not be // affected exchange.setException(null); } } public void setTransactionPolicy(JtaTransactionPolicy transactionPolicy) { this.transactionPolicy = transactionPolicy; } protected void doInTransactionTemplate(final Exchange exchange) throws Throwable { // spring transaction template is working best with rollback if you // throw it a runtime exception // otherwise it may not rollback messages send to JMS queues etc. transactionPolicy.run(new JtaTransactionPolicy.Runnable() { @Override public void run() throws Throwable { // wrapper exception to throw if the exchange failed // IMPORTANT: Must be a runtime exception to let Spring regard // it as to do "rollback" Throwable rce; // and now let process the exchange by the error handler processByErrorHandler(exchange); // after handling and still an exception or marked as rollback // only then rollback if (exchange.getException() != null || exchange.isRollbackOnly()) { // wrap exception in transacted exception if (exchange.getException() != null) { rce = exchange.getException(); } else { // create dummy exception to force spring transaction // manager to rollback rce = new TransactionRolledbackException(); } // throw runtime exception to force rollback (which works // best to rollback with Spring transaction manager) if (log.isTraceEnabled()) { log.trace("Throwing runtime exception to force transaction to rollback on {}", transactionPolicy); } throw rce; } } }); } /** * Processes the {@link Exchange} using the error handler. * <p/> * This implementation will invoke ensure this occurs synchronously, that * means if the async routing engine did kick in, then this implementation * will wait for the task to complete before it continues. * * @param exchange * the exchange */ protected void processByErrorHandler(final Exchange exchange) { try { output.process(exchange); } catch (Throwable e) { throw new RuntimeCamelException(e); } } /** * Logs the transaction begin */ private void logTransactionBegin(String redelivered, String ids) { if (log.isDebugEnabled()) { log.debug("Transaction begin ({}) redelivered({}) for {})", transactionKey, redelivered, ids); } } /** * Logs the transaction commit */ private void logTransactionCommit(String redelivered, String ids) { if ("true".equals(redelivered)) { // okay its a redelivered message so log at INFO level if // rollbackLoggingLevel is INFO or higher // this allows people to know that the redelivered message was // committed this time if (rollbackLoggingLevel == LoggingLevel.INFO || rollbackLoggingLevel == LoggingLevel.WARN || rollbackLoggingLevel == LoggingLevel.ERROR) { log.info("Transaction commit ({}) redelivered({}) for {})", transactionKey, redelivered, ids); // return after we have logged return; } } // log non redelivered by default at DEBUG level log.debug("Transaction commit ({}) redelivered({}) for {})", transactionKey, redelivered, ids); } /** * Logs the transaction rollback. */ private void logTransactionRollback(String redelivered, String ids, Throwable e, boolean rollbackOnly) { if (rollbackLoggingLevel == LoggingLevel.OFF) { return; } else if (rollbackLoggingLevel == LoggingLevel.ERROR && log.isErrorEnabled()) { if (rollbackOnly) { log.error("Transaction rollback ({}) redelivered({}) for {} due exchange was marked for rollbackOnly", transactionKey, redelivered, ids); } else { log.error("Transaction rollback ({}) redelivered({}) for {} caught: {}", transactionKey, redelivered, ids, e.getMessage()); } } else if (rollbackLoggingLevel == LoggingLevel.WARN && log.isWarnEnabled()) { if (rollbackOnly) { log.warn("Transaction rollback ({}) redelivered({}) for {} due exchange was marked for rollbackOnly", transactionKey, redelivered, ids); } else { log.warn("Transaction rollback ({}) redelivered({}) for {} caught: {}", transactionKey, redelivered, ids, e.getMessage()); } } else if (rollbackLoggingLevel == LoggingLevel.INFO && log.isInfoEnabled()) { if (rollbackOnly) { log.info("Transaction rollback ({}) redelivered({}) for {} due exchange was marked for rollbackOnly", transactionKey, redelivered, ids); } else { log.info("Transaction rollback ({}) redelivered({}) for {} caught: {}", transactionKey, redelivered, ids, e.getMessage()); } } else if (rollbackLoggingLevel == LoggingLevel.DEBUG && log.isDebugEnabled()) { if (rollbackOnly) { log.debug("Transaction rollback ({}) redelivered({}) for {} due exchange was marked for rollbackOnly", transactionKey, redelivered, ids); } else { log.debug("Transaction rollback ({}) redelivered({}) for {} caught: {}", transactionKey, redelivered, ids, e.getMessage()); } } else if (rollbackLoggingLevel == LoggingLevel.TRACE && log.isTraceEnabled()) { if (rollbackOnly) { log.trace("Transaction rollback ({}) redelivered({}) for {} due exchange was marked for rollbackOnly", transactionKey, redelivered, ids); } else { log.trace("Transaction rollback ({}) redelivered({}) for {} caught: {}", transactionKey, redelivered, ids, e.getMessage()); } } } public void setExceptionPolicy(ExceptionPolicyStrategy exceptionPolicy) { this.exceptionPolicy = exceptionPolicy; } public ExceptionPolicyStrategy getExceptionPolicy() { return exceptionPolicy; } @Override public Processor getOutput() { return output; } @Override protected void doStart() throws Exception { ServiceHelper.startService(output); preparingShutdown = false; } @Override protected void doStop() throws Exception { // noop, do not stop any services which we only do when shutting down // as the error handler can be context scoped, and should not stop in // case a route stops } @Override protected void doShutdown() throws Exception { ServiceHelper.stopAndShutdownServices(output); } @Override public boolean supportTransacted() { return true; } public boolean hasNext() { return output != null; } @Override public List<Processor> next() { if (!hasNext()) { return null; } List<Processor> answer = new ArrayList<>(1); answer.add(output); return answer; } @Override public void prepareShutdown(boolean suspendOnly, boolean forced) { // prepare for shutdown, eg do not allow redelivery if configured log.trace("Prepare shutdown on error handler {}", this); preparingShutdown = true; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.geronimo.tomcat; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Map.Entry; import javax.net.ssl.KeyManagerFactory; import org.apache.catalina.connector.Connector; import org.apache.geronimo.crypto.KeystoreUtil; import org.apache.geronimo.gbean.AbstractName; import org.apache.geronimo.gbean.AbstractNameQuery; import org.apache.geronimo.gbean.GBeanData; import org.apache.geronimo.gbean.GBeanInfo; import org.apache.geronimo.gbean.GBeanInfoBuilder; import org.apache.geronimo.gbean.ReferencePatterns; import org.apache.geronimo.gbean.annotation.AnnotationGBeanInfoFactory; import org.apache.geronimo.gbean.annotation.ParamReference; import org.apache.geronimo.gbean.annotation.ParamSpecial; import org.apache.geronimo.gbean.annotation.SpecialAttributeType; import org.apache.geronimo.kernel.GBeanNotFoundException; import org.apache.geronimo.kernel.InternalKernelException; import org.apache.geronimo.kernel.Kernel; import org.apache.geronimo.kernel.proxy.ProxyManager; import org.apache.geronimo.management.geronimo.NetworkConnector; import org.apache.geronimo.management.geronimo.WebAccessLog; import org.apache.geronimo.management.geronimo.WebContainer; import org.apache.geronimo.management.geronimo.WebManager; import org.apache.geronimo.system.serverinfo.ServerInfo; import org.apache.geronimo.tomcat.connector.AJP13ConnectorGBean; import org.apache.geronimo.tomcat.connector.ConnectorGBean; import org.apache.geronimo.tomcat.connector.Http11APRConnectorGBean; import org.apache.geronimo.tomcat.connector.Http11ConnectorGBean; import org.apache.geronimo.tomcat.connector.Http11NIOConnectorGBean; import org.apache.geronimo.tomcat.connector.Https11APRConnectorGBean; import org.apache.geronimo.tomcat.connector.Https11ConnectorGBean; import org.apache.geronimo.tomcat.connector.Https11NIOConnectorGBean; import org.apache.geronimo.tomcat.connector.TomcatWebConnector; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Tomcat implementation of the WebManager management API. Knows how to * manipulate other Tomcat objects for management purposes. * * @version $Rev$ $Date$ */ public class TomcatManagerImpl implements WebManager { private static final Logger log = LoggerFactory.getLogger(TomcatManagerImpl.class); private final Kernel kernel; private TomcatServerConfigManager tomcatServerConfigManager; private static final ConnectorType HTTP_BIO = new ConnectorType(Messages.getString("TomcatManagerImpl.0")); //$NON-NLS-1$ private static final ConnectorType HTTPS_BIO = new ConnectorType(Messages.getString("TomcatManagerImpl.1")); //$NON-NLS-1$ private static final ConnectorType HTTP_NIO = new ConnectorType(Messages.getString("TomcatManagerImpl.2")); //$NON-NLS-1$ private static final ConnectorType HTTPS_NIO = new ConnectorType(Messages.getString("TomcatManagerImpl.3")); //$NON-NLS-1$ private static final ConnectorType HTTP_APR = new ConnectorType(Messages.getString("TomcatManagerImpl.4")); //$NON-NLS-1$ private static final ConnectorType HTTPS_APR = new ConnectorType(Messages.getString("TomcatManagerImpl.5")); //$NON-NLS-1$ private static final ConnectorType AJP = new ConnectorType(Messages.getString("TomcatManagerImpl.6")); //$NON-NLS-1$ private static List<ConnectorType> CONNECTOR_TYPES = Arrays.asList( HTTP_BIO, HTTPS_BIO, HTTP_NIO, HTTPS_NIO, HTTP_APR, HTTPS_APR, AJP ); private static List<ConnectorType> NON_APR_CONNECTOR_TYPES = Arrays.asList( HTTP_BIO, HTTPS_BIO, HTTP_NIO, HTTPS_NIO, AJP ); private static Map<ConnectorType, List<ConnectorAttribute>> CONNECTOR_ATTRIBUTES = new HashMap<ConnectorType, List<ConnectorAttribute>>(); static { //******************* HTTP - BIO CONNECTOR List<ConnectorAttribute> connectorAttributes = new ArrayList<ConnectorAttribute>(); addCommonConnectorAttributes(connectorAttributes); addHttpConnectorAttributes(connectorAttributes); CONNECTOR_ATTRIBUTES.put(HTTP_BIO, connectorAttributes); //******************* HTTPS - BIO CONNECTOR connectorAttributes = new ArrayList<ConnectorAttribute>(); addCommonConnectorAttributes(connectorAttributes); addHttpConnectorAttributes(connectorAttributes); addSslConnectorAttributes(connectorAttributes); setAttribute(connectorAttributes, "port", 8443); // SSL port CONNECTOR_ATTRIBUTES.put(HTTPS_BIO, connectorAttributes); //******************* HTTP - NIO CONNECTOR connectorAttributes = new ArrayList<ConnectorAttribute>(); addCommonConnectorAttributes(connectorAttributes); addHttpConnectorAttributes(connectorAttributes); addNioConnectorAttributes(connectorAttributes); CONNECTOR_ATTRIBUTES.put(HTTP_NIO, connectorAttributes); //******************* HTTPS - NIO CONNECTOR connectorAttributes = new ArrayList<ConnectorAttribute>(); addCommonConnectorAttributes(connectorAttributes); addHttpConnectorAttributes(connectorAttributes); addSslConnectorAttributes(connectorAttributes); addNioConnectorAttributes(connectorAttributes); setAttribute(connectorAttributes, "port", 8443); // SSL port CONNECTOR_ATTRIBUTES.put(HTTPS_NIO, connectorAttributes); //******************* HTTP - APR CONNECTOR connectorAttributes = new ArrayList<ConnectorAttribute>(); addCommonConnectorAttributes(connectorAttributes); addHttpConnectorAttributes(connectorAttributes); addAprConnectorAttributes(connectorAttributes); CONNECTOR_ATTRIBUTES.put(HTTP_APR, connectorAttributes); //******************* HTTPS - APR CONNECTOR connectorAttributes = new ArrayList<ConnectorAttribute>(); addCommonConnectorAttributes(connectorAttributes); addHttpConnectorAttributes(connectorAttributes); addAprConnectorAttributes(connectorAttributes); //APR SSL specific values, different from BIO and NIO SSL because it uses openssl connectorAttributes.add(new ConnectorAttribute<String>("sslProtocol", "all", Messages.getString("TomcatManagerImpl.11"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ connectorAttributes.add(new ConnectorAttribute<String>("sslCipherSuite", "ALL", Messages.getString("TomcatManagerImpl.14"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ connectorAttributes.add(new ConnectorAttribute<String>("sslCertificateFile", "", Messages.getString("TomcatManagerImpl.17"), String.class, true)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ connectorAttributes.add(new ConnectorAttribute<String>("sslCertificateKeyFile", null, Messages.getString("TomcatManagerImpl.19"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<String>("sslPassword", null, Messages.getString("TomcatManagerImpl.21"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<String>("sslVerifyClient", "none", Messages.getString("TomcatManagerImpl.24"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ connectorAttributes.add(new ConnectorAttribute<Integer>("sslVerifyDepth", 10, Messages.getString("TomcatManagerImpl.26"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<String>("sslCACertificateFile", null, Messages.getString("TomcatManagerImpl.28"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<String>("sslCACertificatePath", null, Messages.getString("TomcatManagerImpl.30"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<String>("sslCertificateChainFile", null, Messages.getString("TomcatManagerImpl.32"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<String>("sslCARevocationFile", null, Messages.getString("TomcatManagerImpl.34"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<String>("sslCARevocationPath", null, Messages.getString("TomcatManagerImpl.36"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ setAttribute(connectorAttributes, "port", 8443); // SSL port CONNECTOR_ATTRIBUTES.put(HTTPS_APR, connectorAttributes); //******************* AJP CONNECTOR connectorAttributes = new ArrayList<ConnectorAttribute>(); addCommonConnectorAttributes(connectorAttributes); //AJP Attributes, see http://tomcat.apache.org/tomcat-6.0-doc/config/ajp.html connectorAttributes.add(new ConnectorAttribute<String>("host", "0.0.0.0", Messages.getString("TomcatManagerImpl.40"), String.class, true)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ connectorAttributes.add(new ConnectorAttribute<Integer>("port", 8009, Messages.getString("TomcatManagerImpl.42"), Integer.class, true)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("backlog", 10, Messages.getString("TomcatManagerImpl.44"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("bufferSize", -1, Messages.getString("TomcatManagerImpl.46"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("connectionTimeout", org.apache.coyote.ajp.Constants.DEFAULT_CONNECTION_TIMEOUT, Messages.getString("TomcatManagerImpl.48"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<String>("executor", "DefaultThreadPool", Messages.getString("TomcatManagerImpl.122"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("keepAliveTimeout", org.apache.coyote.ajp.Constants.DEFAULT_CONNECTION_TIMEOUT, Messages.getString("TomcatManagerImpl.50"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("maxThreads", 40, Messages.getString("TomcatManagerImpl.52"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("minSpareThreads", 10, Messages.getString("TomcatManagerImpl.54"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("maxSpareThreads", 100, Messages.getString("TomcatManagerImpl.56"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Boolean>("tcpNoDelay", true, Messages.getString("TomcatManagerImpl.58"), Boolean.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Boolean>("tomcatAuthentication", true, Messages.getString("TomcatManagerImpl.60"), Boolean.class)); //$NON-NLS-1$ //$NON-NLS-2$ CONNECTOR_ATTRIBUTES.put(AJP, connectorAttributes); } private static Map<ConnectorType, GBeanInfo> CONNECTOR_GBEAN_INFOS = new HashMap<ConnectorType, GBeanInfo>(); private static List<String> defaultAttributeNames = new ArrayList<String>(); private static AnnotationGBeanInfoFactory annotationGbeanInfoFactory=new AnnotationGBeanInfoFactory(); static { CONNECTOR_GBEAN_INFOS.put(HTTP_BIO, annotationGbeanInfoFactory.getGBeanInfo(Http11ConnectorGBean.class)); CONNECTOR_GBEAN_INFOS.put(HTTPS_BIO, annotationGbeanInfoFactory.getGBeanInfo(Https11ConnectorGBean.class)); CONNECTOR_GBEAN_INFOS.put(HTTP_NIO, annotationGbeanInfoFactory.getGBeanInfo(Http11NIOConnectorGBean.class)); CONNECTOR_GBEAN_INFOS.put(HTTPS_NIO, annotationGbeanInfoFactory.getGBeanInfo(Https11NIOConnectorGBean.class)); CONNECTOR_GBEAN_INFOS.put(HTTP_APR, annotationGbeanInfoFactory.getGBeanInfo(Http11APRConnectorGBean.class)); CONNECTOR_GBEAN_INFOS.put(HTTPS_APR, annotationGbeanInfoFactory.getGBeanInfo(Https11APRConnectorGBean.class)); CONNECTOR_GBEAN_INFOS.put(AJP, annotationGbeanInfoFactory.getGBeanInfo(AJP13ConnectorGBean.class)); defaultAttributeNames.add("name"); defaultAttributeNames.add("protocol"); defaultAttributeNames.add("host"); defaultAttributeNames.add("port"); defaultAttributeNames.add("executor"); defaultAttributeNames.add("redirectPort"); defaultAttributeNames.add("connectionTimeout"); defaultAttributeNames.add("scheme"); defaultAttributeNames.add("secure"); defaultAttributeNames.add("sslProtocol"); defaultAttributeNames.add("sslEnabled"); } public TomcatManagerImpl( @ParamSpecial(type = SpecialAttributeType.kernel) Kernel kernel, @ParamReference(name = "Server") TomcatServerGBean tomcatServerGBean) { this.kernel = kernel; this.tomcatServerConfigManager = tomcatServerGBean.getTomcatServerConfigManager(); } public String getProductName() { return "Tomcat"; } /** * Gets the network containers. */ public Object[] getContainers() { ProxyManager proxyManager = kernel.getProxyManager(); AbstractNameQuery query = new AbstractNameQuery(TomcatWebContainer.class.getName()); Set names = kernel.listGBeans(query); TomcatWebContainer[] results = new TomcatWebContainer[names.size()]; int i=0; for (Iterator it = names.iterator(); it.hasNext(); i++) { AbstractName name = (AbstractName) it.next(); results[i] = (TomcatWebContainer) proxyManager.createProxy(name, TomcatWebContainer.class.getClassLoader()); } return results; } /** * Gets the protocols which this container can configure connectors for. */ public String[] getSupportedProtocols() { return new String[]{PROTOCOL_HTTP, PROTOCOL_HTTPS, PROTOCOL_AJP}; } /** * Removes a connector. This shuts it down if necessary, and removes it from the server environment. It must be a * connector that uses this network technology. * @param connectorName */ public void removeConnector(AbstractName connectorAbstractName) { try { //kernel.invoke(connectorAbstractName, "doStop"); String connectorName=(String)kernel.getGBeanData(connectorAbstractName).getAttribute("name"); tomcatServerConfigManager.removeConnector(connectorName); kernel.unloadGBean(connectorAbstractName); } catch (Exception e) { log.error("error when removing connector:"+connectorAbstractName,e); } } /** * Gets the ObjectNames of any existing connectors for this network technology for the specified protocol. * * @param protocol A protocol as returned by getSupportedProtocols */ public NetworkConnector[] getConnectors(String protocol) { if(protocol == null) { return getConnectors(); } List<TomcatWebConnector> result = new ArrayList<TomcatWebConnector>(); ProxyManager proxyManager = kernel.getProxyManager(); AbstractNameQuery query = new AbstractNameQuery(TomcatWebConnector.class.getName()); Set<AbstractName> names = kernel.listGBeans(query); for (AbstractName name : names) { try { if (kernel.getAttribute(name, "protocol").equals(protocol)) { result.add((TomcatWebConnector)proxyManager.createProxy(name, TomcatWebConnector.class.getClassLoader())); } } catch (Exception e) { log.error("Unable to check the protocol for a connector", e); } } return result.toArray(new TomcatWebConnector[names.size()]); } public WebAccessLog getAccessLog(WebContainer container) { AbstractNameQuery query = new AbstractNameQuery(TomcatLogManager.class.getName()); Set<AbstractName> names = kernel.listGBeans(query); if(names.size() == 0) { return null; } else if(names.size() > 1) { throw new IllegalStateException("Should not be more than one Tomcat access log manager"); } return (WebAccessLog) kernel.getProxyManager().createProxy(names.iterator().next(), TomcatLogManager.class.getClassLoader()); } public List<ConnectorType> getConnectorTypes() { if (isNativeAPRLibInstalled()) return CONNECTOR_TYPES; else return NON_APR_CONNECTOR_TYPES; } public List<ConnectorAttribute> getConnectorAttributes(ConnectorType connectorType) { return ConnectorAttribute.copy(CONNECTOR_ATTRIBUTES.get(connectorType)); } public AbstractName getConnectorConfiguration(ConnectorType connectorType, List<ConnectorAttribute> connectorAttributes, WebContainer container, String uniqueName) { GBeanInfo gbeanInfo = CONNECTOR_GBEAN_INFOS.get(connectorType); AbstractName containerName = kernel.getAbstractNameFor(container); AbstractName name = kernel.getNaming().createSiblingName(containerName, uniqueName, GBeanInfoBuilder.DEFAULT_J2EE_TYPE); GBeanData gbeanData = new GBeanData(name, gbeanInfo); gbeanData.setAttribute("name", uniqueName); gbeanData.setReferencePattern(ConnectorGBean.CONNECTOR_CONTAINER_REFERENCE, containerName); Map<String, Object> initParams = new HashMap<String, Object>(); for (ConnectorAttribute connectorAttribute : connectorAttributes) { gbeanData.setAttribute(connectorAttribute.getAttributeName(), connectorAttribute.getValue()); initParams.put(connectorAttribute.getAttributeName(), connectorAttribute.getStringValue()); } gbeanData.setAttribute("initParams", initParams); AbstractNameQuery query = new AbstractNameQuery(ServerInfo.class.getName()); Set set = kernel.listGBeans(query); AbstractName serverInfo = (AbstractName) set.iterator().next(); gbeanData.setReferencePattern("ServerInfo", serverInfo); try { kernel.loadGBean(gbeanData, container.getBundleContext()); kernel.startGBean(name); } catch (Exception e) { log.error("Error when adding new tomcat connector" + uniqueName, e); } return name; } /** * Gets the ObjectNames of any existing connectors associated with this network technology. */ public NetworkConnector[] getConnectors() { ProxyManager proxyManager = kernel.getProxyManager(); AbstractNameQuery query = new AbstractNameQuery(TomcatWebConnector.class.getName()); Set names = kernel.listGBeans(query); TomcatWebConnector[] results = new TomcatWebConnector[names.size()]; int i=0; for (Iterator it = names.iterator(); it.hasNext(); i++) { AbstractName name = (AbstractName) it.next(); results[i] = (TomcatWebConnector) proxyManager.createProxy(name, TomcatWebConnector.class.getClassLoader()); } return results; } /** * Gets the ObjectNames of any existing connectors for the specified container for the specified protocol. * * @param protocol A protocol as returned by getSupportedProtocols */ public NetworkConnector[] getConnectorsForContainer(Object container, String protocol) { if(protocol == null) { return getConnectorsForContainer(container); } AbstractName containerName = kernel.getAbstractNameFor(container); ProxyManager mgr = kernel.getProxyManager(); try { List results = new ArrayList(); AbstractNameQuery query = new AbstractNameQuery(TomcatWebConnector.class.getName()); Set set = kernel.listGBeans(query); // all Tomcat connectors for (Iterator it = set.iterator(); it.hasNext();) { AbstractName name = (AbstractName) it.next(); // a single Tomcat connector GBeanData data = kernel.getGBeanData(name); ReferencePatterns refs = data.getReferencePatterns(ConnectorGBean.CONNECTOR_CONTAINER_REFERENCE); if(containerName.equals(refs.getAbstractName())) { try { String testProtocol = (String) kernel.getAttribute(name, "protocol"); if(testProtocol != null && testProtocol.equals(protocol)) { results.add(mgr.createProxy(name, TomcatWebConnector.class.getClassLoader())); } } catch (Exception e) { log.error("Unable to look up protocol for connector '"+name+"'",e); } break; } } return (TomcatWebConnector[]) results.toArray(new TomcatWebConnector[results.size()]); } catch (Exception e) { throw (IllegalArgumentException)new IllegalArgumentException("Unable to look up connectors for Tomcat container '"+containerName +"': ").initCause(e); } } /** * Gets the ObjectNames of any existing connectors for the specified container. */ public NetworkConnector[] getConnectorsForContainer(Object container) { AbstractName containerName = kernel.getAbstractNameFor(container); ProxyManager mgr = kernel.getProxyManager(); try { List results = new ArrayList(); AbstractNameQuery query = new AbstractNameQuery(TomcatWebConnector.class.getName()); Set set = kernel.listGBeans(query); // all Tomcat connectors for (Iterator it = set.iterator(); it.hasNext();) { AbstractName name = (AbstractName) it.next(); // a single Tomcat connector GBeanData data = kernel.getGBeanData(name); ReferencePatterns refs = data.getReferencePatterns(ConnectorGBean.CONNECTOR_CONTAINER_REFERENCE); if (containerName.equals(refs.getAbstractName())) { results.add(mgr.createProxy(name, TomcatWebConnector.class.getClassLoader())); } } return (TomcatWebConnector[]) results.toArray(new TomcatWebConnector[results.size()]); } catch (Exception e) { throw (IllegalArgumentException) new IllegalArgumentException("Unable to look up connectors for Tomcat container '"+containerName).initCause(e); } } // see http://tomcat.apache.org/tomcat-7.0-doc/config/http.html private static void addCommonConnectorAttributes(List<ConnectorAttribute> connectorAttributes) { connectorAttributes.add(new ConnectorAttribute<Boolean>("allowTrace", false, Messages.getString("TomcatManagerImpl.80"), Boolean.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Long>("asyncTimeout", 10000l, Messages.getString("TomcatManagerImpl.82"), Long.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Boolean>("enableLookups", true, Messages.getString("TomcatManagerImpl.84"), Boolean.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("maxParameterCount", 10000, Messages.getString("TomcatManagerImpl.85"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("maxPostSize", 2097152, Messages.getString("TomcatManagerImpl.86"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("maxSavePostSize", 4096, Messages.getString("TomcatManagerImpl.88"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("port", 8080, Messages.getString("TomcatManagerImpl.141"), Integer.class, true)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<String>("proxyName", null, Messages.getString("TomcatManagerImpl.90"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("proxyPort", 0, Messages.getString("TomcatManagerImpl.92"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("redirectPort", 8443, Messages.getString("TomcatManagerImpl.94"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<String>("uriEncoding", "ISO-8859-1", Messages.getString("TomcatManagerImpl.97"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ connectorAttributes.add(new ConnectorAttribute<Boolean>("useBodyEncodingForURI", false, Messages.getString("TomcatManagerImpl.99"), Boolean.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Boolean>("useIPVHosts", false, Messages.getString("TomcatManagerImpl.101"), Boolean.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Boolean>("xpoweredBy", false, Messages.getString("TomcatManagerImpl.103"), Boolean.class)); //$NON-NLS-1$ //$NON-NLS-2$ } // see http://tomcat.apache.org/tomcat-7.0-doc/config/http.html private static void addHttpConnectorAttributes(List<ConnectorAttribute> connectorAttributes) { connectorAttributes.add(new ConnectorAttribute<Integer>("acceptCount", 100, Messages.getString("TomcatManagerImpl.105"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<String>("address", "0.0.0.0", Messages.getString("TomcatManagerImpl.108"), String.class, true)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ connectorAttributes.add(new ConnectorAttribute<Integer>("bufferSize", 2048, Messages.getString("TomcatManagerImpl.110"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<String>("compressableMimeType", "text/html,text/xml,text/plain", Messages.getString("TomcatManagerImpl.113"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ connectorAttributes.add(new ConnectorAttribute<String>("compression", "off", Messages.getString("TomcatManagerImpl.116"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ connectorAttributes.add(new ConnectorAttribute<Integer>("connectionLinger", -1, Messages.getString("TomcatManagerImpl.118"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("connectionTimeout", 60000, Messages.getString("TomcatManagerImpl.120"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<String>("executor", "DefaultThreadPool", Messages.getString("TomcatManagerImpl.122"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("keepAliveTimeout", 60000, Messages.getString("TomcatManagerImpl.124"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Boolean>("disableUploadTimeout", true, Messages.getString("TomcatManagerImpl.126"), Boolean.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("maxHttpHeaderSize", 4096, Messages.getString("TomcatManagerImpl.128"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("maxKeepAliveRequests", 100, Messages.getString("TomcatManagerImpl.130"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("maxThreads", 40, Messages.getString("TomcatManagerImpl.132"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("minSpareThreads", 10, Messages.getString("TomcatManagerImpl.134"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("maxSpareThreads", 100, Messages.getString("TomcatManagerImpl.136"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<String>("noCompressionUserAgents", "", Messages.getString("TomcatManagerImpl.139"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ connectorAttributes.add(new ConnectorAttribute<String>("restrictedUserAgents", "", Messages.getString("TomcatManagerImpl.144"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ connectorAttributes.add(new ConnectorAttribute<String>("server", null, Messages.getString("TomcatManagerImpl.147"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ connectorAttributes.add(new ConnectorAttribute<Integer>("socketBuffer", 9000, Messages.getString("TomcatManagerImpl.149"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Boolean>("tcpNoDelay", true, Messages.getString("TomcatManagerImpl.151"), Boolean.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("threadPriority", Thread.NORM_PRIORITY, Messages.getString("TomcatManagerImpl.153"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ } // see http://tomcat.apache.org/tomcat-7.0-doc/config/http.html private static void addSslConnectorAttributes(List<ConnectorAttribute> connectorAttributes) { connectorAttributes.add(new ConnectorAttribute<String>("algorithm", KeyManagerFactory.getDefaultAlgorithm(), Messages.getString("TomcatManagerImpl.155"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<String>("clientAuth", "false", Messages.getString("TomcatManagerImpl.157"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<String>("keystoreFile", "", Messages.getString("TomcatManagerImpl.160"), String.class, true)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ connectorAttributes.add(new ConnectorAttribute<String>("keystorePass", null, Messages.getString("TomcatManagerImpl.162"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<String>("keystoreType", KeystoreUtil.defaultType, Messages.getString("TomcatManagerImpl.165"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ connectorAttributes.add(new ConnectorAttribute<String>("sslProtocol", "TLS", Messages.getString("TomcatManagerImpl.168"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ connectorAttributes.add(new ConnectorAttribute<String>("ciphers", "", Messages.getString("TomcatManagerImpl.171"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ connectorAttributes.add(new ConnectorAttribute<String>("keyAlias", null, Messages.getString("TomcatManagerImpl.173"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<String>("truststoreFile", null, Messages.getString("TomcatManagerImpl.175"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<String>("truststorePass", null, Messages.getString("TomcatManagerImpl.177"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<String>("truststoreType", KeystoreUtil.defaultType, Messages.getString("TomcatManagerImpl.179"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<String>("scheme", "https", Messages.getString("TomcatManagerImpl.169"), String.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Boolean>("secure", true, Messages.getString("TomcatManagerImpl.170"), Boolean.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Boolean>("sslEnabled", true, Messages.getString("TomcatManagerImpl.167"), Boolean.class)); //$NON-NLS-1$ //$NON-NLS-2$ } // see http://tomcat.apache.org/tomcat-7.0-doc/config/http.html private static void addNioConnectorAttributes(List<ConnectorAttribute> connectorAttributes) { connectorAttributes.add(new ConnectorAttribute<Boolean>("useSendfile", true, Messages.getString("TomcatManagerImpl.181"), Boolean.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("acceptorThreadCount", 1, Messages.getString("TomcatManagerImpl.185"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("pollerThreadCount", 1, Messages.getString("TomcatManagerImpl.187"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("pollerThreadPriority", Thread.NORM_PRIORITY, Messages.getString("TomcatManagerImpl.189"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("acceptorThreadPriority", Thread.NORM_PRIORITY, Messages.getString("TomcatManagerImpl.191"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("selectorTimeout", 1000, Messages.getString("TomcatManagerImpl.193"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Boolean>("useComet", true, Messages.getString("TomcatManagerImpl.195"), Boolean.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("processCache", 200, Messages.getString("TomcatManagerImpl.197"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Boolean>("socket_directBuffer", false, Messages.getString("TomcatManagerImpl.199"), Boolean.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("socket_rxBufSize", 25188, Messages.getString("TomcatManagerImpl.201"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("socket_txBufSize", 43800, Messages.getString("TomcatManagerImpl.203"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("socket_appReadBufSize", 8192, Messages.getString("TomcatManagerImpl.205"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("socket_appWriteBufSize", 8192, Messages.getString("TomcatManagerImpl.207"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("socket_bufferPool", 500, Messages.getString("TomcatManagerImpl.209"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("socket_bufferPoolSize", 104857600, Messages.getString("TomcatManagerImpl.211"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("socket_processorCache", 500, Messages.getString("TomcatManagerImpl.213"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("socket_keyCache", 500, Messages.getString("TomcatManagerImpl.215"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("socket_eventCache", 500, Messages.getString("TomcatManagerImpl.217"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Boolean>("socket_tcpNoDelay", false, Messages.getString("TomcatManagerImpl.219"), Boolean.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Boolean>("socket_soKeepAlive", false, Messages.getString("TomcatManagerImpl.221"), Boolean.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Boolean>("socket_ooBInline", true, Messages.getString("TomcatManagerImpl.223"), Boolean.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Boolean>("socket_soReuseAddress", true, Messages.getString("TomcatManagerImpl.225"), Boolean.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Boolean>("socket_soLingerOn", true, Messages.getString("TomcatManagerImpl.227"), Boolean.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("socket_soLingerTime", 25, Messages.getString("TomcatManagerImpl.229"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("socket_soTimeout", 5000, Messages.getString("TomcatManagerImpl.231"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("socket_soTrafficClass", (0x04 | 0x08 | 0x010), Messages.getString("TomcatManagerImpl.233"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("socket_performanceConnectionTime", 1, Messages.getString("TomcatManagerImpl.235"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("socket_performanceLatency", 0, Messages.getString("TomcatManagerImpl.237"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("socket_performanceBandwidth", 1, Messages.getString("TomcatManagerImpl.239"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("selectorPool_maxSelectors", 200, Messages.getString("TomcatManagerImpl.241"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("selectorPool_maxSpareSelectors", -1, Messages.getString("TomcatManagerImpl.243"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Boolean>("command_line_options", true, Messages.getString("TomcatManagerImpl.245"), Boolean.class)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("oomParachute", 1048576, Messages.getString("TomcatManagerImpl.247"), Integer.class)); //$NON-NLS-1$ //$NON-NLS-2$ } // http://tomcat.apache.org/tomcat-7.0-doc/apr.html private static void addAprConnectorAttributes(List<ConnectorAttribute> connectorAttributes) { connectorAttributes.add(new ConnectorAttribute<Integer>("pollTime", 2000, Messages.getString("TomcatManagerImpl.249"), Integer.class, true)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("pollerSize", 8192, Messages.getString("TomcatManagerImpl.251"), Integer.class, true)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Boolean>("useSendfile", true, Messages.getString("TomcatManagerImpl.253"), Boolean.class, true)); //$NON-NLS-1$ //$NON-NLS-2$ connectorAttributes.add(new ConnectorAttribute<Integer>("sendfileSize", 1024, Messages.getString("TomcatManagerImpl.255"), Integer.class, true)); //$NON-NLS-1$ //$NON-NLS-2$ } private static <T> void setAttribute (List<ConnectorAttribute> connectorAttributes, String attributeName, T value) { for (ConnectorAttribute connectorAttribute : connectorAttributes) { if (connectorAttribute.getAttributeName().equals(attributeName)) { connectorAttribute.setValue(value); return; } } } public ConnectorType getConnectorType(AbstractName connectorName) { ConnectorType connectorType = null; try { GBeanInfo info = kernel.getGBeanInfo(connectorName); boolean found = false; Set intfs = info.getInterfaces(); for (Iterator it = intfs.iterator(); it.hasNext() && !found;) { String intf = (String) it.next(); if (intf.equals(TomcatWebConnector.class.getName())) { found = true; } } if (!found) { throw new GBeanNotFoundException(connectorName); } String searchingFor = info.getName(); for (Entry<ConnectorType, GBeanInfo> entry : CONNECTOR_GBEAN_INFOS.entrySet() ) { String candidate = entry.getValue().getName(); if (candidate.equals(searchingFor)) { return entry.getKey(); } } } catch (GBeanNotFoundException e) { log.warn("No such GBean '" + connectorName + "'"); } catch (Exception e) { log.error("Failed to get connector type", e); } return connectorType; } /* * update server.xml based on changes to connectors * 1, if there's existing <Connector> for connectorName, update it. * 2, if there's no existing <Connector> for connectorName, create it. * */ @SuppressWarnings("unchecked") public void updateConnectorConfig(AbstractName connectorName) throws Exception { //1, getting service name String serviceName; GBeanData containerGBeanData = null; try { GBeanData connectorGBeanData = kernel.getGBeanData(connectorName); ReferencePatterns rp = connectorGBeanData.getReferencePatterns("TomcatContainer"); containerGBeanData = kernel.getGBeanData(rp.getAbstractName()); Object object = containerGBeanData.getAttribute("serviceName"); serviceName = (object == null) ? null : object.toString(); } catch (GBeanNotFoundException e) { throw new Exception("Can't find connector GBean when updating connector config",e); } catch (InternalKernelException e) { throw new Exception("error to update conector config",e); } //2, getting connector name Map<String,String> attributesToUpdate=new HashMap<String,String>(); String connectorUniqueName = (String) kernel.getAttribute(connectorName, "name"); // 3, populate tomcat protocol attribute. String tomcatProtocol=(String) kernel.getAttribute(connectorName, "tomcatProtocol"); attributesToUpdate.put("protocol", tomcatProtocol); // 4, remove the unchanged attributes, we don't need to store them back to server.xml. ConnectorType connectorType = this.getConnectorType(connectorName); List<ConnectorAttribute> defaultAttributes = this.getConnectorAttributes(connectorType); for (ConnectorAttribute defaultAttribute : defaultAttributes) { String attributeName = defaultAttribute.getAttributeName(); Object latestAttibuteValue=null; try { latestAttibuteValue=kernel.getAttribute(connectorName, attributeName); } catch (Exception e) { continue; } if (null == latestAttibuteValue) { if(defaultAttributeNames.contains(attributeName)){ //put default value to attributes listed in defaultAttributeNames. attributesToUpdate.put(attributeName, defaultAttribute.getStringValue()); } else { continue; } } else if(defaultAttribute.getValue()!=null&&defaultAttribute.getValue().equals(latestAttibuteValue)){ if (defaultAttributeNames.contains(attributeName)) { attributesToUpdate.put(attributeName, defaultAttribute.getStringValue()); } else { //don't update the unchanged attributes. continue; } } else { //adding changed attributes to attributesToUpdate map. ConnectorAttribute latestAttibute = new ConnectorAttribute(defaultAttribute); latestAttibute.setValue(latestAttibuteValue); attributesToUpdate.put(attributeName, latestAttibute.getStringValue()); } } //5, call tomcatServerConfigManager to update connector info in server.xml tomcatServerConfigManager.updateConnector(attributesToUpdate, connectorUniqueName, serviceName); } private boolean isNativeAPRLibInstalled() { try { Connector connector = new Connector("HTTP/1.1"); if (!connector.getProtocolHandlerClassName().equalsIgnoreCase("org.apache.coyote.http11.Http11AprProtocol")) { return false; } } catch (Exception e) { return false; } return true; } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.sandbox; import static java.nio.charset.StandardCharsets.UTF_8; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.io.ByteStreams; import com.google.devtools.build.lib.actions.Spawn; import com.google.devtools.build.lib.actions.Spawns; import com.google.devtools.build.lib.exec.TreeDeleter; import com.google.devtools.build.lib.exec.local.LocalEnvProvider; import com.google.devtools.build.lib.runtime.CommandEnvironment; import com.google.devtools.build.lib.runtime.ProcessWrapper; import com.google.devtools.build.lib.sandbox.SandboxHelpers.SandboxInputs; import com.google.devtools.build.lib.sandbox.SandboxHelpers.SandboxOutputs; import com.google.devtools.build.lib.shell.Command; import com.google.devtools.build.lib.shell.CommandException; import com.google.devtools.build.lib.shell.CommandResult; import com.google.devtools.build.lib.util.OS; import com.google.devtools.build.lib.vfs.FileSystem; import com.google.devtools.build.lib.vfs.Path; import java.io.BufferedWriter; import java.io.File; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.time.Duration; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.annotation.Nullable; /** Spawn runner that uses Darwin (macOS) sandboxing to execute a process. */ final class DarwinSandboxedSpawnRunner extends AbstractSandboxSpawnRunner { /** Path to the {@code getconf} system tool to use. */ @VisibleForTesting static String getconfBinary = "/usr/bin/getconf"; /** Path to the {@code sandbox-exec} system tool to use. */ @VisibleForTesting static String sandboxExecBinary = "/usr/bin/sandbox-exec"; // Since checking if sandbox is supported is expensive, we remember what we've checked. private static Boolean isSupported = null; /** * Returns whether the darwin sandbox is supported on the local machine by running a small command * in it. */ public static boolean isSupported(CommandEnvironment cmdEnv) throws InterruptedException { if (OS.getCurrent() != OS.DARWIN) { return false; } if (ProcessWrapper.fromCommandEnvironment(cmdEnv) == null) { return false; } if (isSupported == null) { isSupported = computeIsSupported(); } return isSupported; } private static boolean computeIsSupported() throws InterruptedException { List<String> args = new ArrayList<>(); args.add(sandboxExecBinary); args.add("-p"); args.add("(version 1) (allow default)"); args.add("/usr/bin/true"); ImmutableMap<String, String> env = ImmutableMap.of(); File cwd = new File("/usr/bin"); Command cmd = new Command(args.toArray(new String[0]), env, cwd); try { cmd.execute(ByteStreams.nullOutputStream(), ByteStreams.nullOutputStream()); } catch (CommandException e) { return false; } return true; } private final SandboxHelpers helpers; private final Path execRoot; private final boolean allowNetwork; private final ProcessWrapper processWrapper; private final Path sandboxBase; @Nullable private final SandboxfsProcess sandboxfsProcess; private final boolean sandboxfsMapSymlinkTargets; private final TreeDeleter treeDeleter; /** * The set of directories that always should be writable, independent of the Spawn itself. * * <p>We cache this, because creating it involves executing {@code getconf}, which is expensive. */ private final ImmutableSet<Path> alwaysWritableDirs; private final LocalEnvProvider localEnvProvider; /** * Creates a sandboxed spawn runner that uses the {@code process-wrapper} tool and the MacOS * {@code sandbox-exec} binary. * * @param helpers common tools and state across all spawns during sandboxed execution * @param cmdEnv the command environment to use * @param sandboxBase path to the sandbox base directory * @param sandboxfsProcess instance of the sandboxfs process to use; may be null for none, in * which case the runner uses a symlinked sandbox * @param sandboxfsMapSymlinkTargets map the targets of symlinks within the sandbox if true */ DarwinSandboxedSpawnRunner( SandboxHelpers helpers, CommandEnvironment cmdEnv, Path sandboxBase, @Nullable SandboxfsProcess sandboxfsProcess, boolean sandboxfsMapSymlinkTargets, TreeDeleter treeDeleter) throws IOException, InterruptedException { super(cmdEnv); this.helpers = helpers; this.execRoot = cmdEnv.getExecRoot(); this.allowNetwork = helpers.shouldAllowNetwork(cmdEnv.getOptions()); this.alwaysWritableDirs = getAlwaysWritableDirs(cmdEnv.getRuntime().getFileSystem()); this.processWrapper = ProcessWrapper.fromCommandEnvironment(cmdEnv); this.localEnvProvider = LocalEnvProvider.forCurrentOs(cmdEnv.getClientEnv()); this.sandboxBase = sandboxBase; this.sandboxfsProcess = sandboxfsProcess; this.sandboxfsMapSymlinkTargets = sandboxfsMapSymlinkTargets; this.treeDeleter = treeDeleter; } private static void addPathToSetIfExists(FileSystem fs, Set<Path> paths, String path) throws IOException { if (path != null) { addPathToSetIfExists(paths, fs.getPath(path)); } } private static void addPathToSetIfExists(Set<Path> paths, Path path) throws IOException { if (path.exists()) { paths.add(path.resolveSymbolicLinks()); } } private static ImmutableSet<Path> getAlwaysWritableDirs(FileSystem fs) throws IOException, InterruptedException { HashSet<Path> writableDirs = new HashSet<>(); addPathToSetIfExists(fs, writableDirs, "/dev"); addPathToSetIfExists(fs, writableDirs, "/tmp"); addPathToSetIfExists(fs, writableDirs, "/private/tmp"); addPathToSetIfExists(fs, writableDirs, "/private/var/tmp"); // On macOS, processes may write to not only $TMPDIR but also to two other temporary // directories. We have to get their location by calling "getconf". addPathToSetIfExists(fs, writableDirs, getConfStr("DARWIN_USER_TEMP_DIR")); addPathToSetIfExists(fs, writableDirs, getConfStr("DARWIN_USER_CACHE_DIR")); // We don't add any value for $TMPDIR here, instead we compute its value later in // {@link #actuallyExec} and add it as a writable directory in // {@link AbstractSandboxSpawnRunner#getWritableDirs}. // ~/Library/Cache and ~/Library/Logs need to be writable (cf. issue #2231). Path homeDir = fs.getPath(System.getProperty("user.home")); addPathToSetIfExists(writableDirs, homeDir.getRelative("Library/Cache")); addPathToSetIfExists(writableDirs, homeDir.getRelative("Library/Logs")); // Certain Xcode tools expect to be able to write to this path. addPathToSetIfExists(writableDirs, homeDir.getRelative("Library/Developer")); return ImmutableSet.copyOf(writableDirs); } /** Returns the value of a POSIX or X/Open system configuration variable. */ private static String getConfStr(String confVar) throws IOException, InterruptedException { String[] commandArr = new String[2]; commandArr[0] = getconfBinary; commandArr[1] = confVar; Command cmd = new Command(commandArr); CommandResult res; try { res = cmd.execute(); } catch (CommandException e) { throw new IOException("getconf failed", e); } return new String(res.getStdout(), UTF_8).trim(); } @Override protected SandboxedSpawn prepareSpawn(Spawn spawn, SpawnExecutionContext context) throws IOException, InterruptedException { // Each invocation of "exec" gets its own sandbox base. // Note that the value returned by context.getId() is only unique inside one given SpawnRunner, // so we have to prefix our name to turn it into a globally unique value. Path sandboxPath = sandboxBase.getRelative(getName()).getRelative(Integer.toString(context.getId())); sandboxPath.getParentDirectory().createDirectory(); sandboxPath.createDirectory(); // b/64689608: The execroot of the sandboxed process must end with the workspace name, just like // the normal execroot does. String workspaceName = execRoot.getBaseName(); Path sandboxExecRoot = sandboxPath.getRelative("execroot").getRelative(workspaceName); sandboxExecRoot.getParentDirectory().createDirectory(); sandboxExecRoot.createDirectory(); ImmutableMap<String, String> environment = localEnvProvider.rewriteLocalEnv(spawn.getEnvironment(), binTools, "/tmp"); final HashSet<Path> writableDirs = new HashSet<>(alwaysWritableDirs); ImmutableSet<Path> extraWritableDirs = getWritableDirs(sandboxExecRoot, environment); writableDirs.addAll(extraWritableDirs); SandboxInputs inputs = helpers.processInputFiles( context.getInputMapping(), spawn, context.getArtifactExpander(), execRoot); SandboxOutputs outputs = helpers.getOutputs(spawn); final Path sandboxConfigPath = sandboxPath.getRelative("sandbox.sb"); Duration timeout = context.getTimeout(); ProcessWrapper.CommandLineBuilder processWrapperCommandLineBuilder = processWrapper.commandLineBuilder(spawn.getArguments()).setTimeout(timeout); final Path statisticsPath; if (getSandboxOptions().collectLocalSandboxExecutionStatistics) { statisticsPath = sandboxPath.getRelative("stats.out"); processWrapperCommandLineBuilder.setStatisticsPath(statisticsPath); } else { statisticsPath = null; } ImmutableList<String> commandLine = ImmutableList.<String>builder() .add(sandboxExecBinary) .add("-f") .add(sandboxConfigPath.getPathString()) .addAll(processWrapperCommandLineBuilder.build()) .build(); boolean allowNetworkForThisSpawn = allowNetwork || Spawns.requiresNetwork(spawn, getSandboxOptions().defaultSandboxAllowNetwork); if (sandboxfsProcess != null) { return new SandboxfsSandboxedSpawn( sandboxfsProcess, sandboxPath, workspaceName, commandLine, environment, inputs, outputs, ImmutableSet.of(), sandboxfsMapSymlinkTargets, treeDeleter, statisticsPath) { @Override public void createFileSystem() throws IOException { super.createFileSystem(); // The set of writable dirs includes the path to the execroot in the sandbox tree, but not // the path to the sibling sandboxfs hierarchy. We must explicitly grant access to this to // let builds work when the output tree is not under the default path hanging from tmp. writableDirs.add(getSandboxExecRoot()); writeConfig( sandboxConfigPath, writableDirs, getInaccessiblePaths(), allowNetworkForThisSpawn, statisticsPath); } }; } else { return new SymlinkedSandboxedSpawn( sandboxPath, sandboxExecRoot, commandLine, environment, inputs, outputs, writableDirs, treeDeleter, statisticsPath) { @Override public void createFileSystem() throws IOException { super.createFileSystem(); writeConfig( sandboxConfigPath, writableDirs, getInaccessiblePaths(), allowNetworkForThisSpawn, statisticsPath); } }; } } private void writeConfig( Path sandboxConfigPath, Set<Path> writableDirs, Set<Path> inaccessiblePaths, boolean allowNetwork, Path statisticsPath) throws IOException { try (PrintWriter out = new PrintWriter( new BufferedWriter( new OutputStreamWriter(sandboxConfigPath.getOutputStream(), UTF_8)))) { // Note: In Apple's sandbox configuration language, the *last* matching rule wins. out.println("(version 1)"); out.println("(debug deny)"); out.println("(allow default)"); if (!allowNetwork) { out.println("(deny network*)"); out.println("(allow network-inbound (local ip \"localhost:*\"))"); out.println("(allow network* (remote ip \"localhost:*\"))"); out.println("(allow network* (remote unix-socket))"); } // By default, everything is read-only. out.println("(deny file-write*)"); out.println("(allow file-write*"); for (Path path : writableDirs) { out.println(" (subpath \"" + path.getPathString() + "\")"); } if (statisticsPath != null) { out.println(" (literal \"" + statisticsPath.getPathString() + "\")"); } out.println(")"); if (!inaccessiblePaths.isEmpty()) { out.println("(deny file-read*"); // The sandbox configuration file is not part of a cache key and sandbox-exec doesn't care // about ordering of paths in expressions, so it's fine if the iteration order is random. for (Path inaccessiblePath : inaccessiblePaths) { out.println(" (subpath \"" + inaccessiblePath + "\")"); } out.println(")"); } } } @Override public String getName() { return "darwin-sandbox"; } }
/* * Copyright 2006-2008 Web Cohesion * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.webcohesion.enunciate.modules.jaxb.model; import com.webcohesion.enunciate.EnunciateException; import com.webcohesion.enunciate.facets.Facet; import com.webcohesion.enunciate.facets.HasFacets; import com.webcohesion.enunciate.javac.decorations.Annotations; import com.webcohesion.enunciate.javac.decorations.TypeMirrorDecorator; import com.webcohesion.enunciate.javac.decorations.element.DecoratedElement; import com.webcohesion.enunciate.javac.decorations.element.DecoratedExecutableElement; import com.webcohesion.enunciate.javac.decorations.element.DecoratedTypeElement; import com.webcohesion.enunciate.javac.decorations.element.PropertyElement; import com.webcohesion.enunciate.metadata.ClientName; import com.webcohesion.enunciate.metadata.qname.XmlQNameEnumRef; import com.webcohesion.enunciate.modules.jaxb.EnunciateJaxbContext; import com.webcohesion.enunciate.modules.jaxb.model.types.XmlClassType; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.TypeElement; import javax.lang.model.element.VariableElement; import javax.lang.model.type.MirroredTypesException; import javax.lang.model.type.TypeKind; import javax.lang.model.type.TypeMirror; import javax.lang.model.util.ElementFilter; import javax.xml.bind.annotation.*; import javax.xml.namespace.QName; import java.beans.Introspector; import java.util.*; import java.util.concurrent.Callable; /** * A xml type definition. * * @author Ryan Heaton */ public abstract class TypeDefinition extends DecoratedTypeElement implements HasFacets { private final javax.xml.bind.annotation.XmlType xmlType; private final Schema schema; private final SortedSet<Element> elements; private final Collection<Attribute> attributes; private final Value xmlValue; private final Accessor xmlID; private final boolean hasAnyAttribute; private final TypeMirror anyAttributeQNameEnumRef; private final AnyElement anyElement; private final LinkedList<javax.lang.model.element.Element> referencedFrom = new LinkedList<javax.lang.model.element.Element>(); private final Set<Facet> facets = new TreeSet<Facet>(); protected final EnunciateJaxbContext context; protected TypeDefinition(TypeElement delegate, EnunciateJaxbContext context) { super(delegate, context.getContext().getProcessingEnvironment()); this.xmlType = getAnnotation(javax.xml.bind.annotation.XmlType.class); this.schema = new Schema(context.getContext().getProcessingEnvironment().getElementUtils().getPackageOf(delegate), env); ElementComparator comparator = new ElementComparator(getPropertyOrder(), getAccessorOrder(), env); SortedSet<Element> elementAccessors = new TreeSet<Element>(comparator); AccessorFilter filter = new AccessorFilter(getAccessType()); Collection<Attribute> attributeAccessors = new ArrayList<Attribute>(); Value value = null; Accessor xmlID = null; AnyElement anyElement = null; boolean hasAnyAttribute = false; TypeMirror anyAttributeQNameEnumRef = null; for (javax.lang.model.element.Element accessor : loadPotentialAccessors(filter)) { Accessor added; if (isAttribute(accessor)) { Attribute attribute = new Attribute(accessor, this, context); attributeAccessors.add(attribute); added = attribute; } else if (isValue(accessor)) { if (value != null) { throw new EnunciateException("Accessor " + accessor.getSimpleName() + " of " + getQualifiedName() + ": a type definition cannot have more than one xml value."); } value = new Value(accessor, this, context); added = value; } else if (isElementRef(accessor)) { ElementRef elementRef = new ElementRef(accessor, this, context); if (!elementAccessors.add(elementRef)) { //see http://jira.codehaus.org/browse/ENUNCIATE-381; the case for this is when an annotated field has an associated public property //we'll just silently continue continue; } added = elementRef; } else if (isAnyAttribute(accessor)) { hasAnyAttribute = true; final XmlQNameEnumRef enumRef = accessor.getAnnotation(XmlQNameEnumRef.class); if (enumRef != null) { anyAttributeQNameEnumRef = Annotations.mirrorOf(new Callable<Class<?>>() { @Override public Class<?> call() throws Exception { return enumRef.value(); } }, this.env); } continue; } else if (isAnyElement(accessor)) { anyElement = new AnyElement(accessor, this, context); continue; } else if (isUnsupported(accessor)) { throw new EnunciateException("Accessor " + accessor.getSimpleName() + " of " + getQualifiedName() + ": sorry, we currently don't support mixed or wildard elements. Maybe someday..."); } else { //its an element accessor. if (accessor instanceof PropertyElement) { //if the accessor is a property and either the getter or setter overrides ANY method of ANY superclass, exclude it. if (overridesAnother(((PropertyElement) accessor).getGetter()) || overridesAnother(((PropertyElement) accessor).getSetter())) { continue; } } Element element = new Element(accessor, this, context); if (!elementAccessors.add(element)) { //see http://jira.codehaus.org/browse/ENUNCIATE-381; the case for this is when an annotated field has an associated public property //we'll just silently continue continue; } added = element; } if (added.getAnnotation(XmlID.class) != null) { if (xmlID != null) { throw new EnunciateException("Accessor " + added.getSimpleName() + " of " + getQualifiedName() + ": more than one XML id specified."); } xmlID = added; } } this.elements = Collections.unmodifiableSortedSet(elementAccessors); this.attributes = Collections.unmodifiableCollection(attributeAccessors); this.xmlValue = value; this.xmlID = xmlID; this.hasAnyAttribute = hasAnyAttribute; this.anyAttributeQNameEnumRef = anyAttributeQNameEnumRef; this.anyElement = anyElement; this.facets.addAll(Facet.gatherFacets(delegate)); this.facets.addAll(this.schema.getFacets()); this.context = context; } protected TypeDefinition(TypeDefinition copy) { super(copy.delegate, copy.env); this.xmlType = copy.xmlType; this.schema = copy.schema; this.elements = copy.elements; this.attributes = copy.attributes; this.xmlValue = copy.xmlValue; this.xmlID = copy.xmlID; this.hasAnyAttribute = copy.hasAnyAttribute; this.anyAttributeQNameEnumRef = copy.anyAttributeQNameEnumRef; this.anyElement = copy.anyElement; this.facets.addAll(copy.facets); this.context = copy.context; } /** * Load the potential accessors for this type definition. * * @param filter The filter. * @return the potential accessors for this type definition. */ protected List<javax.lang.model.element.Element> loadPotentialAccessors(AccessorFilter filter) { List<VariableElement> potentialFields = new ArrayList<VariableElement>(); List<PropertyElement> potentialProperties = new ArrayList<PropertyElement>(); aggregatePotentialAccessors(potentialFields, potentialProperties, this, filter, false); List<javax.lang.model.element.Element> accessors = new ArrayList<javax.lang.model.element.Element>(); accessors.addAll(potentialFields); accessors.addAll(potentialProperties); return accessors; } /** * Aggregate the potential accessor into their separate buckets for the given class declaration, recursively including transient superclasses. * * @param fields The fields. * @param properties The properties. * @param clazz The class. * @param filter The filter. */ protected void aggregatePotentialAccessors(List<VariableElement> fields, List<PropertyElement> properties, DecoratedTypeElement clazz, AccessorFilter filter, boolean childIsXmlTransient) { DecoratedTypeElement superDeclaration = clazz.getSuperclass() != null ? (DecoratedTypeElement) this.env.getTypeUtils().asElement(clazz.getSuperclass()) : null; if (superDeclaration != null && (isXmlTransient(superDeclaration) || childIsXmlTransient)) { childIsXmlTransient = true; aggregatePotentialAccessors(fields, properties, superDeclaration, filter, childIsXmlTransient); } for (VariableElement fieldDeclaration : ElementFilter.fieldsIn(clazz.getEnclosedElements())) { if (!filter.accept((DecoratedElement) fieldDeclaration)) { remove(fieldDeclaration, fields); } else { addOrReplace(fieldDeclaration, fields); } } for (PropertyElement propertyDeclaration : clazz.getProperties()) { if (!filter.accept(propertyDeclaration)) { remove(propertyDeclaration, properties); } else { addOrReplace(propertyDeclaration, properties); } } } /** * Whether the given method declaration overrides any method. * * @param method The method declaration. * @return Whether the given method declaration overrides any method. */ protected boolean overridesAnother(DecoratedExecutableElement method) { if (method == null) { return false; } final TypeElement declaringType = (TypeElement) method.getEnclosingElement(); TypeElement superType = (TypeElement) this.env.getTypeUtils().asElement(declaringType.getSuperclass()); while (superType != null && !Object.class.getName().equals(superType.getQualifiedName().toString())) { List<ExecutableElement> methods = ElementFilter.methodsIn(superType.getEnclosedElements()); for (ExecutableElement candidate : methods) { if (this.env.getElementUtils().overrides(method, candidate, declaringType)) { return true; } } superType = (TypeElement) this.env.getTypeUtils().asElement(superType.getSuperclass()); } return false; } /** * Add the specified member declaration, or if it is already in the list (by name), replace it. * * @param memberDeclaration The member to add/replace. * @param memberDeclarations The other members. */ protected <M extends javax.lang.model.element.Element> void addOrReplace(M memberDeclaration, List<M> memberDeclarations) { remove(memberDeclaration, memberDeclarations); memberDeclarations.add(memberDeclaration); } /** * Remove specified member declaration from the specified list, if it exists.. * * @param memberDeclaration The member to remove. * @param memberDeclarations The other members. */ protected <M extends javax.lang.model.element.Element> void remove(M memberDeclaration, List<M> memberDeclarations) { Iterator<M> it = memberDeclarations.iterator(); while (it.hasNext()) { javax.lang.model.element.Element candidate = it.next(); if (candidate.getSimpleName().equals(memberDeclaration.getSimpleName())) { it.remove(); } } } /** * Whether a declaration is an xml attribute. * * @param declaration The declaration to check. * @return Whether a declaration is an attribute. */ protected boolean isAttribute(javax.lang.model.element.Element declaration) { //todo: the attribute wildcard? return (declaration.getAnnotation(XmlAttribute.class) != null); } /** * Whether a declaration is an xml value. * * @param declaration The declaration to check. * @return Whether a declaration is an value. */ protected boolean isValue(javax.lang.model.element.Element declaration) { return (declaration.getAnnotation(XmlValue.class) != null); } /** * Whether a declaration is an xml element ref. * * @param declaration The declaration to check. * @return Whether a declaration is an xml element ref. */ protected boolean isElementRef(javax.lang.model.element.Element declaration) { return ((declaration.getAnnotation(XmlElementRef.class) != null) || (declaration.getAnnotation(XmlElementRefs.class) != null)); } /** * Whether the member declaration is XmlAnyAttribute. * * @param declaration The declaration. * @return Whether the member declaration is XmlAnyAttribute. */ protected boolean isAnyAttribute(javax.lang.model.element.Element declaration) { return declaration.getAnnotation(XmlAnyAttribute.class) != null; } /** * Whether the member declaration is XmlAnyElement. * * @param declaration The declaration. * @return Whether the member declaration is XmlAnyElement. */ protected boolean isAnyElement(javax.lang.model.element.Element declaration) { return declaration.getAnnotation(XmlAnyElement.class) != null; } /** * Whether a declaration is an xml-mixed property. * * @param declaration The declaration to check. * @return Whether a declaration is an mixed. */ protected boolean isUnsupported(javax.lang.model.element.Element declaration) { //todo: support xml-mixed? return (declaration.getAnnotation(XmlMixed.class) != null); } /** * The name of the xml type element. * * @return The name of the xml type element. */ public String getName() { String name = Introspector.decapitalize(getSimpleName().toString()); if ((xmlType != null) && (!"##default".equals(xmlType.name()))) { name = xmlType.name(); if ("".equals(name)) { name = null; } } return name; } /** * The namespace of the xml type element. * * @return The namespace of the xml type element. */ public String getNamespace() { String namespace = getPackage().getNamespace(); if ((xmlType != null) && (!"##default".equals(xmlType.namespace()))) { namespace = xmlType.namespace(); } return namespace; } public EnunciateJaxbContext getContext() { return context; } /** * The simple name for client-side code generation. * * @return The simple name for client-side code generation. */ public String getClientSimpleName() { String clientSimpleName = getSimpleName().toString(); ClientName clientName = getAnnotation(ClientName.class); if (clientName != null) { clientSimpleName = clientName.value(); } return clientSimpleName; } /** * The qname of this type definition. * * @return The qname of this type definition. */ public QName getQname() { String localPart = getName(); if (localPart == null) { localPart = ""; } return new QName(getNamespace(), localPart); } /** * The default access type for the beans in this class. * * @return The default access type for the beans in this class. */ public XmlAccessType getAccessType() { XmlAccessType accessType = getPackage().getAccessType(); XmlAccessorType xmlAccessorType = getAnnotation(XmlAccessorType.class); if (xmlAccessorType != null) { accessType = xmlAccessorType.value(); } else { XmlAccessType inheritedAccessType = getInheritedAccessType(this); if (inheritedAccessType != null) { accessType = inheritedAccessType; } } return accessType; } /** * Get the inherited accessor type of the given class, or null if none is found. * * @param declaration The inherited accessor type. * @return The inherited accessor type of the given class, or null if none is found. */ protected XmlAccessType getInheritedAccessType(TypeElement declaration) { TypeMirror superclass = declaration.getSuperclass(); if (superclass != null && superclass.getKind() != TypeKind.NONE) { TypeElement superDeclaration = (TypeElement) this.env.getTypeUtils().asElement(superclass); if ((superDeclaration != null) && (!Object.class.getName().equals(superDeclaration.getQualifiedName().toString()))) { XmlAccessorType xmlAccessorType = superDeclaration.getAnnotation(XmlAccessorType.class); if (xmlAccessorType != null) { return xmlAccessorType.value(); } else { return getInheritedAccessType(superDeclaration); } } } return null; } /** * The property order of this xml type. * * @return The property order of this xml type. */ public String[] getPropertyOrder() { String[] propertyOrder = null; if (xmlType != null) { String[] propOrder = xmlType.propOrder(); if ((propOrder != null) && (propOrder.length > 0) && ((propOrder.length > 1) || !("".equals(propOrder[0])))) { propertyOrder = propOrder; } } return propertyOrder; } /** * The default accessor order of the beans in this package. * * @return The default accessor order of the beans in this package. */ public XmlAccessOrder getAccessorOrder() { XmlAccessOrder order = getPackage().getAccessorOrder(); XmlAccessorOrder xmlAccessorOrder = getAnnotation(XmlAccessorOrder.class); if (xmlAccessorOrder != null) { order = xmlAccessorOrder.value(); } return order; } /** * @return The list of class names that this type definition wants you to "see also". */ public Collection<TypeMirror> getSeeAlsos() { Collection<TypeMirror> seeAlsos = null; XmlSeeAlso seeAlsoInfo = getAnnotation(XmlSeeAlso.class); if (seeAlsoInfo != null) { seeAlsos = new ArrayList<TypeMirror>(); try { for (Class clazz : seeAlsoInfo.value()) { TypeElement typeDeclaration = this.env.getElementUtils().getTypeElement(clazz.getName()); seeAlsos.add(typeDeclaration.asType()); } } catch (MirroredTypesException e) { seeAlsos.addAll(TypeMirrorDecorator.decorate(e.getTypeMirrors(), this.env)); } } return seeAlsos; } /** * Whether this type definition has an "anyAttribute" definition. * * @return Whether this type definition has an "anyAttribute" definition. */ public boolean isHasAnyAttribute() { return hasAnyAttribute; } /** * The enum type containing the known qnames for attributes of the 'any' attribute definition. <code>null</code> if none. * * @return The enum type containing the known qnames for attributes of the 'any' attribute definition. <code>null</code> if none. */ public TypeMirror getAnyAttributeQNameEnumRef() { return anyAttributeQNameEnumRef; } /** * The "anyElement" element. * * @return The "anyElement" element. */ public AnyElement getAnyElement() { return anyElement; } /** * The elements of this type definition. * * @return The elements of this type definition. */ public SortedSet<Element> getElements() { return elements; } /** * The attributes of this type definition. * * @return The attributes of this type definition. */ public Collection<Attribute> getAttributes() { return attributes; } /** * The value of this type definition. * * @return The value of this type definition. */ public Value getValue() { return xmlValue; } public List<Accessor> getAllAccessors() { ArrayList<Accessor> accessors = new ArrayList<Accessor>(); accessors.addAll(getAllAttributes()); accessors.addAll(getAllValues()); accessors.addAll(getAllElements()); return accessors; } public List<Attribute> getAllAttributes() { ArrayList<Attribute> attributes = new ArrayList<Attribute>(); com.webcohesion.enunciate.modules.jaxb.model.types.XmlType baseType = getBaseType(); if (baseType instanceof XmlClassType) { attributes.addAll(((XmlClassType) baseType).getTypeDefinition().getAllAttributes()); } MY_ATTRIBUTES : for (Attribute attribute : getAttributes()) { for (Attribute other : attributes) { if (attribute.overrides(other)) { continue MY_ATTRIBUTES; } } attributes.add(attribute); } return attributes; } public List<Value> getAllValues() { ArrayList<Value> values = new ArrayList<Value>(); com.webcohesion.enunciate.modules.jaxb.model.types.XmlType baseType = getBaseType(); if (baseType instanceof XmlClassType) { values.addAll(((XmlClassType) baseType).getTypeDefinition().getAllValues()); } Value value = getValue(); if (value != null && values.isEmpty()) { values.add(value); } return values; } public List<Element> getAllElements() { ArrayList<Element> elements = new ArrayList<Element>(); com.webcohesion.enunciate.modules.jaxb.model.types.XmlType baseType = getBaseType(); if (baseType instanceof XmlClassType) { elements.addAll(((XmlClassType) baseType).getTypeDefinition().getAllElements()); } MY_ELEMENTS : for (Element element : getElements()) { for (Element other : elements) { if (element.overrides(other)) { continue MY_ELEMENTS; } } elements.add(element); } return elements; } /** * The accessor that is the xml id of this type definition, or null if none. * * @return The accessor that is the xml id of this type definition, or null if none. */ public Accessor getXmlID() { return xmlID; } /** * Whether a declaration is xml transient. * * @param declaration The declaration on which to determine xml transience. * @return Whether a declaration is xml transient. */ protected boolean isXmlTransient(javax.lang.model.element.Element declaration) { return (declaration.getAnnotation(XmlTransient.class) != null); } /** * Whether this xml type is anonymous. * * @return Whether this xml type is anonymous. */ public boolean isAnonymous() { return getName() == null; } /** * The schema for this complex type. * * @return The schema for this complex type. */ public Schema getSchema() { return schema; } // Inherited. @Override public Schema getPackage() { return getSchema(); } /** * Whether this is a complex type. * * @return Whether this is a complex type. */ public boolean isComplex() { return false; } /** * Whether this is a enum type. * * @return Whether this is a enum type. */ public boolean isEnum() { return false; } /** * Whether this is a simple type. * * @return Whether this is a simple type. */ public boolean isSimple() { return false; } /** * Whether this type definition is a base object (i.e. a root of the object hierarchy). * * @return Whether this type definition is a base object */ public boolean isBaseObject() { return true; } /** * Set of (human-readable) locations that this type definition is referenced from. * * @return The referenced-from list. */ public LinkedList<javax.lang.model.element.Element> getReferencedFrom() { return referencedFrom; } /** * The facets here applicable. * * @return The facets here applicable. */ public Set<Facet> getFacets() { return facets; } /** * The base type of this type definition. * * @return The base type of this type definition. */ public abstract com.webcohesion.enunciate.modules.jaxb.model.types.XmlType getBaseType(); }
/** * Copyright 2011-2019 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.testdriver; import java.io.IOException; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.asakusafw.testdriver.compiler.CompilerConstants; import com.asakusafw.testdriver.compiler.JobflowMirror; import com.asakusafw.testdriver.compiler.PortMirror; import com.asakusafw.testdriver.core.DataModelSourceFactory; import com.asakusafw.testdriver.core.Difference; import com.asakusafw.testdriver.core.TestModerator; import com.asakusafw.testdriver.core.VerifyContext; import com.asakusafw.testdriver.executor.DefaultCommandTaskExecutor; import com.asakusafw.testdriver.executor.DefaultDeleteTaskExecutor; import com.asakusafw.testdriver.executor.DefaultHadoopTaskExecutor; import com.asakusafw.testdriver.executor.TaskExecutorContextAdapter; import com.asakusafw.testdriver.hadoop.ConfigurationFactory; import com.asakusafw.vocabulary.external.ExporterDescription; import com.asakusafw.vocabulary.external.ImporterDescription; import com.asakusafw.workflow.executor.TaskExecutionContext; import com.asakusafw.workflow.executor.TaskExecutor; import com.asakusafw.workflow.executor.TaskExecutors; import com.asakusafw.workflow.executor.basic.BasicJobflowExecutor; import com.asakusafw.workflow.model.CommandTaskInfo; import com.asakusafw.workflow.model.HadoopTaskInfo; import com.asakusafw.workflow.model.TaskInfo; /** * Prepares and executes jobflows. * Application developers must not use this class directly. * @since 0.2.0 * @version 0.10.0 */ class JobflowExecutor { static final Logger LOG = LoggerFactory.getLogger(JobflowExecutor.class); private final TestDriverContext driverContext; private final TestModerator moderator; private final ConfigurationFactory configurations; private final List<TaskExecutor> taskExecutors; /** * Creates a new instance. * @param context submission context * @throws IllegalArgumentException if some parameters were {@code null} */ JobflowExecutor(TestDriverContext context) { if (context == null) { throw new IllegalArgumentException("context must not be null"); //$NON-NLS-1$ } this.driverContext = context; this.moderator = new TestModerator(context.getRepository(), context); this.taskExecutors = new ArrayList<>(); taskExecutors.addAll(TaskExecutors.loadDefaults(context.getClassLoader())); taskExecutors.add(new DefaultHadoopTaskExecutor()); taskExecutors.add(new DefaultCommandTaskExecutor()); taskExecutors.add(new DefaultDeleteTaskExecutor()); this.configurations = ConfigurationFactory.getDefault(); } /** * Cleans up the working directory on the DFS. * @throws IOException if failed to clean up */ public void cleanWorkingDirectory() throws IOException { Configuration conf = configurations.newInstance(); FileSystem fs = FileSystem.get(conf); Path path = new Path(CompilerConstants.getRuntimeWorkingDirectory()); Path fullPath = fs.makeQualified(path); LOG.debug("start initializing working directory on the testing runtime: {}", fullPath); //$NON-NLS-1$ boolean deleted = fs.delete(fullPath, true); if (deleted) { LOG.debug("finish initializing working directory on the testing runtime: {}", fullPath); //$NON-NLS-1$ } else { LOG.debug("failed to initialize working directory on the testing runtime: {}", fullPath); //$NON-NLS-1$ } } /** * Cleans up target jobflow's input/output. * @param flow target jobflow * @throws IOException if failed to clean up * @throws IllegalArgumentException if some parameters were {@code null} * @since 0.8.0 */ public void cleanInputOutput(JobflowMirror flow) throws IOException { if (flow == null) { throw new IllegalArgumentException("info must not be null"); //$NON-NLS-1$ } if (driverContext.isSkipCleanInput() == false) { for (PortMirror<? extends ImporterDescription> port : flow.getInputs()) { LOG.debug("cleaning input: {}", port.getName()); //$NON-NLS-1$ moderator.truncate(port.getDescription()); } } else { LOG.info(Messages.getString("JobflowExecutor.infoSkipInitializeInput")); //$NON-NLS-1$ } if (driverContext.isSkipCleanOutput() == false) { for (PortMirror<? extends ExporterDescription> port : flow.getOutputs()) { LOG.debug("cleaning output: {}", port.getName()); //$NON-NLS-1$ moderator.truncate(port.getDescription()); } } else { LOG.info(Messages.getString("JobflowExecutor.infoSkipInitializeOutput")); //$NON-NLS-1$ } } /** * Cleans up extra resources. * @param resources the external resource map * @throws IOException if failed to create job processes * @throws IllegalArgumentException if some parameters were {@code null} * @since 0.7.3 */ public void cleanExtraResources( Map<? extends ImporterDescription, ? extends DataModelSourceFactory> resources) throws IOException { if (resources == null) { throw new IllegalArgumentException("resources must not be null"); //$NON-NLS-1$ } if (driverContext.isSkipCleanInput() == false) { for (ImporterDescription description : resources.keySet()) { LOG.debug("cleaning external resource: {}", description); //$NON-NLS-1$ moderator.truncate(description); } } else { LOG.info(Messages.getString("JobflowExecutor.infoSkipInitializeExtraResources")); //$NON-NLS-1$ } } /** * Prepares the target jobflow's inputs. * @param jobflow target jobflow * @param inputs target inputs * @throws IOException if failed to create job processes * @throws IllegalStateException if input is not defined in the jobflow * @throws IllegalArgumentException if some parameters were {@code null} */ public void prepareInput( JobflowMirror jobflow, Iterable<? extends DriverInputBase<?>> inputs) throws IOException { if (jobflow == null) { throw new IllegalArgumentException("jobflow must not be null"); //$NON-NLS-1$ } if (inputs == null) { throw new IllegalArgumentException("inputs must not be null"); //$NON-NLS-1$ } if (driverContext.isSkipPrepareInput() == false) { for (DriverInputBase<?> input : inputs) { DataModelSourceFactory source = input.getSource(); if (source != null) { String name = input.getName(); LOG.debug("preparing input: {} ({})", name, source); //$NON-NLS-1$ PortMirror<? extends ImporterDescription> port = jobflow.findInput(name); if (port == null) { throw new IllegalStateException(MessageFormat.format( Messages.getString("JobflowExecutor.errorMissingInput"), //$NON-NLS-1$ name, jobflow.getId())); } moderator.prepare(port.getDataType(), port.getDescription(), source); } } } else { LOG.info(Messages.getString("JobflowExecutor.infoSkipPrepareInput")); //$NON-NLS-1$ } } /** * Prepares the target jobflow's output. * @param jobflow target jobflow * @param outputs target outputs * @throws IOException if failed to create job processes * @throws IllegalStateException if output is not defined in the jobflow * @throws IllegalArgumentException if some parameters were {@code null} */ public void prepareOutput( JobflowMirror jobflow, Iterable<? extends DriverOutputBase<?>> outputs) throws IOException { if (jobflow == null) { throw new IllegalArgumentException("jobflow must not be null"); //$NON-NLS-1$ } if (outputs == null) { throw new IllegalArgumentException("outputs must not be null"); //$NON-NLS-1$ } if (driverContext.isSkipPrepareOutput() == false) { for (DriverOutputBase<?> output : outputs) { DataModelSourceFactory source = output.getSource(); if (source != null) { String name = output.getName(); LOG.debug("preparing output: {} ({})", name, source); //$NON-NLS-1$ PortMirror<? extends ExporterDescription> port = jobflow.findOutput(name); if (port == null) { throw new IllegalStateException(MessageFormat.format( Messages.getString("JobflowExecutor.errorMissingOutput"), //$NON-NLS-1$ name, jobflow.getId())); } moderator.prepare(port.getDataType(), port.getDescription(), source); } } } else { LOG.info(Messages.getString("JobflowExecutor.infoSkipPrepareOutput")); //$NON-NLS-1$ } } /** * Prepares external resources. * @param resources the external resource map * @throws IOException if failed to prepare external resources * @throws IllegalArgumentException if some parameters were {@code null} * @since 0.7.3 */ public void prepareExternalResources( Map<? extends ImporterDescription, ? extends DataModelSourceFactory> resources) throws IOException { if (resources == null) { throw new IllegalArgumentException("resources must not be null"); //$NON-NLS-1$ } if (driverContext.isSkipPrepareInput() == false) { for (Map.Entry<? extends ImporterDescription, ? extends DataModelSourceFactory> entry : resources.entrySet()) { ImporterDescription description = entry.getKey(); DataModelSourceFactory source = entry.getValue(); LOG.debug("preparing external resource: {} ({})", description, source); //$NON-NLS-1$ moderator.prepare(description.getModelType(), description, source); } } else { LOG.info(Messages.getString("JobflowExecutor.infoSkipPrepareExtraResource")); //$NON-NLS-1$ } } /** * Checks if the given jobflow is valid. * @param jobflow the target jobflow */ public void validateJobflow(JobflowMirror jobflow) { if (jobflow == null) { throw new IllegalArgumentException("jobflow must not be null"); //$NON-NLS-1$ } TaskExecutionContext context = new TaskExecutorContextAdapter(driverContext, configurations); List<? extends TaskInfo> tasks = Arrays.stream(TaskInfo.Phase.values()) .flatMap(it -> jobflow.getTasks(it).stream()) .filter(task -> findExecutor(context, task).isPresent() == false) .collect(Collectors.toList()); for (TaskInfo task : tasks) { if (task instanceof CommandTaskInfo) { DefaultCommandTaskExecutor.checkSupported(context, (CommandTaskInfo) task); } else if (task instanceof HadoopTaskInfo) { DefaultHadoopTaskExecutor.checkSupported(context, (HadoopTaskInfo) task); } else { throw new IllegalStateException(MessageFormat.format( "unsupported task type: {0}", task.getClass().getSimpleName())); } } } private Optional<TaskExecutor> findExecutor(TaskExecutionContext context, TaskInfo task) { return taskExecutors.stream().filter(it -> it.isSupported(context, task)).findFirst(); } /** * Runs the target jobflow. * @param jobflow target jobflow * @throws IOException if failed to create job processes * @throws IllegalArgumentException if some parameters were {@code null} */ public void runJobflow(JobflowMirror jobflow) throws IOException { if (jobflow == null) { throw new IllegalArgumentException("jobflow must not be null"); //$NON-NLS-1$ } if (driverContext.isSkipRunJobflow() == false) { TaskExecutionContext context = new TaskExecutorContextAdapter(driverContext, configurations); try { new BasicJobflowExecutor(taskExecutors).execute(context, jobflow); } catch (InterruptedException e) { throw new IOException("interrupted while running jobflow", e); } } else { LOG.info(Messages.getString("JobflowExecutor.infoSkipExecute")); //$NON-NLS-1$ } } /** * Verifies the jobflow's results. * @param jobflow target jobflow * @param verifyContext verification context * @param outputs output information * @throws IOException if failed to verify * @throws IllegalStateException if output is not defined in the jobflow * @throws IllegalArgumentException if some parameters were {@code null} * @throws AssertionError if actual output is different for the expected output */ public void verify( JobflowMirror jobflow, VerifyContext verifyContext, Iterable<? extends DriverOutputBase<?>> outputs) throws IOException { if (jobflow == null) { throw new IllegalArgumentException("jobflow must not be null"); //$NON-NLS-1$ } if (verifyContext == null) { throw new IllegalArgumentException("verifyContext must not be null"); //$NON-NLS-1$ } if (outputs == null) { throw new IllegalArgumentException("outputs must not be null"); //$NON-NLS-1$ } if (driverContext.isSkipVerify() == false) { StringBuilder sb = new StringBuilder(); boolean sawError = false; for (DriverOutputBase<?> output : outputs) { String name = output.getName(); PortMirror<? extends ExporterDescription> port = jobflow.findOutput(name); if (port == null) { throw new IllegalStateException(MessageFormat.format( Messages.getString("JobflowExecutor.errorMissingOutput"), //$NON-NLS-1$ name, jobflow.getId())); } if (output.getResultSink() != null) { LOG.debug("saving result output: {} ({})", output.getName(), output.getResultSink()); //$NON-NLS-1$ moderator.save(port.getDataType(), port.getDescription(), output.getResultSink()); } if (output.getVerifier() != null) { LOG.debug("verifying result output: {} ({})", name, output.getVerifier()); //$NON-NLS-1$ List<Difference> diffList = moderator.inspect( port.getDataType(), port.getDescription(), verifyContext, output.getVerifier()); if (diffList.isEmpty() == false) { sawError = true; String message = MessageFormat.format( Messages.getString("JobflowExecutor.messageDifferenceSummary"), //$NON-NLS-1$ jobflow.getId(), output.getName(), diffList.size()); sb.append(String.format("%s:%n", message)); //$NON-NLS-1$ LOG.warn(message); if (output.getDifferenceSink() != null) { LOG.debug("saving output differences: {} ({})", //$NON-NLS-1$ name, output.getDifferenceSink()); moderator.save(port.getDataType(), diffList, output.getDifferenceSink()); } for (Difference difference : diffList) { sb.append(String.format("%s: %s%n", //$NON-NLS-1$ port.getDataType().getSimpleName(), difference)); } } } } if (sawError) { throw new AssertionError(sb); } } else { LOG.info(Messages.getString("JobflowExecutor.infoSkipVerifyResult")); //$NON-NLS-1$ } } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.test; import io.netty.util.ThreadDeathWatcher; import io.netty.util.concurrent.GlobalEventExecutor; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.index.Index; import org.elasticsearch.license.LicenseService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import org.elasticsearch.xpack.security.LocalStateSecurity; import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Rule; import org.junit.rules.ExternalResource; import java.nio.file.Path; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.function.Function; import java.util.stream.Collectors; import static org.elasticsearch.test.SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.elasticsearch.xpack.core.security.index.RestrictedIndicesNames.SECURITY_MAIN_ALIAS; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.is; /** * Base class to run tests against a cluster with X-Pack installed and security enabled. * The default {@link org.elasticsearch.test.ESIntegTestCase.Scope} is {@link org.elasticsearch.test.ESIntegTestCase.Scope#SUITE} * * @see SecuritySettingsSource */ public abstract class SecurityIntegTestCase extends ESIntegTestCase { private static SecuritySettingsSource SECURITY_DEFAULT_SETTINGS; protected static SecureString BOOTSTRAP_PASSWORD = null; /** * Settings used when the {@link org.elasticsearch.test.ESIntegTestCase.ClusterScope} is set to * {@link org.elasticsearch.test.ESIntegTestCase.Scope#SUITE} or {@link org.elasticsearch.test.ESIntegTestCase.Scope#TEST} * so that some of the configuration parameters can be overridden through test instance methods, similarly * to how {@link #nodeSettings(int)} works. */ private static CustomSecuritySettingsSource customSecuritySettingsSource = null; @BeforeClass public static void generateBootstrapPassword() { BOOTSTRAP_PASSWORD = TEST_PASSWORD_SECURE_STRING.clone(); } //UnicastZen requires the number of nodes in a cluster to generate the unicast configuration. //The number of nodes is randomized though, but we can predict what the maximum number of nodes will be //and configure them all in unicast.hosts protected static int defaultMaxNumberOfNodes() { ClusterScope clusterScope = SecurityIntegTestCase.class.getAnnotation(ClusterScope.class); if (clusterScope == null) { return InternalTestCluster.DEFAULT_HIGH_NUM_MASTER_NODES + InternalTestCluster.DEFAULT_MAX_NUM_DATA_NODES + InternalTestCluster.DEFAULT_MAX_NUM_CLIENT_NODES; } else { int clientNodes = clusterScope.numClientNodes(); if (clientNodes < 0) { clientNodes = InternalTestCluster.DEFAULT_MAX_NUM_CLIENT_NODES; } int masterNodes = 0; if (clusterScope.supportsDedicatedMasters()) { masterNodes = InternalTestCluster.DEFAULT_HIGH_NUM_MASTER_NODES; } int dataNodes = 0; if (clusterScope.numDataNodes() < 0) { if (clusterScope.maxNumDataNodes() < 0) { dataNodes = InternalTestCluster.DEFAULT_MAX_NUM_DATA_NODES; } else { dataNodes = clusterScope.maxNumDataNodes(); } } else { dataNodes = clusterScope.numDataNodes(); } return masterNodes + dataNodes + clientNodes; } } private static ClusterScope getAnnotation(Class<?> clazz) { if (clazz == Object.class || clazz == SecurityIntegTestCase.class) { return null; } ClusterScope annotation = clazz.getAnnotation(ClusterScope.class); if (annotation != null) { return annotation; } return getAnnotation(clazz.getSuperclass()); } Scope getCurrentClusterScope() { return getCurrentClusterScope(this.getClass()); } private static Scope getCurrentClusterScope(Class<?> clazz) { ClusterScope annotation = getAnnotation(clazz); return annotation == null ? Scope.SUITE : annotation.scope(); } @BeforeClass public static void initDefaultSettings() { if (SECURITY_DEFAULT_SETTINGS == null) { SECURITY_DEFAULT_SETTINGS = new SecuritySettingsSource(randomBoolean(), createTempDir(), Scope.SUITE); } } /** * Set the static default settings to null to prevent a memory leak. The test framework also checks for memory leaks * and computes the size, this can cause issues when running with the security manager as it tries to do reflection * into protected sun packages. */ @AfterClass public static void destroyDefaultSettings() { SECURITY_DEFAULT_SETTINGS = null; customSecuritySettingsSource = null; } @Rule //Rules are the only way to have something run before the before (final) method inherited from ESIntegTestCase public ExternalResource externalResource = new ExternalResource() { @Override protected void before() throws Throwable { Scope currentClusterScope = getCurrentClusterScope(); switch (currentClusterScope) { case SUITE: if (customSecuritySettingsSource == null) { customSecuritySettingsSource = new CustomSecuritySettingsSource(transportSSLEnabled(), createTempDir(), currentClusterScope); } break; case TEST: customSecuritySettingsSource = new CustomSecuritySettingsSource(transportSSLEnabled(), createTempDir(), currentClusterScope); break; } } }; /** * A JUnit class level rule that runs after the AfterClass method in {@link ESIntegTestCase}, * which stops the cluster. After the cluster is stopped, there are a few netty threads that * can linger, so we wait for them to finish otherwise these lingering threads can intermittently * trigger the thread leak detector */ @ClassRule public static final ExternalResource STOP_NETTY_RESOURCE = new ExternalResource() { @Override protected void after() { try { GlobalEventExecutor.INSTANCE.awaitInactivity(5, TimeUnit.SECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } catch (IllegalStateException e) { if (e.getMessage().equals("thread was not started") == false) { throw e; } // ignore since the thread was never started } try { ThreadDeathWatcher.awaitInactivity(5, TimeUnit.SECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } }; @Before //before methods from the superclass are run before this, which means that the current cluster is ready to go public void assertXPackIsInstalled() { doAssertXPackIsInstalled(); } protected void doAssertXPackIsInstalled() { NodesInfoResponse nodeInfos = client().admin().cluster().prepareNodesInfo().clear().setPlugins(true).get(); for (NodeInfo nodeInfo : nodeInfos.getNodes()) { // TODO: disable this assertion for now, due to random runs with mock plugins. perhaps run without mock plugins? // assertThat(nodeInfo.getPlugins().getInfos(), hasSize(2)); Collection<String> pluginNames = nodeInfo.getInfo(PluginsAndModules.class).getPluginInfos().stream().map(p -> p.getClassname()).collect(Collectors.toList()); assertThat("plugin [" + LocalStateSecurity.class.getName() + "] not found in [" + pluginNames + "]", pluginNames, hasItem(LocalStateSecurity.class.getName())); } } @Override protected Settings nodeSettings(int nodeOrdinal) { Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal)); // Disable native ML autodetect_process as the c++ controller won't be available // builder.put(MachineLearningField.AUTODETECT_PROCESS.getKey(), false); Settings customSettings = customSecuritySettingsSource.nodeSettings(nodeOrdinal); builder.put(customSettings, false); // handle secure settings separately builder.put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial"); Settings.Builder customBuilder = Settings.builder().put(customSettings); if (customBuilder.getSecureSettings() != null) { SecuritySettingsSource.addSecureSettings(builder, secureSettings -> secureSettings.merge((MockSecureSettings) customBuilder.getSecureSettings())); } if (builder.getSecureSettings() == null) { builder.setSecureSettings(new MockSecureSettings()); } ((MockSecureSettings) builder.getSecureSettings()).setString("bootstrap.password", BOOTSTRAP_PASSWORD.toString()); return builder.build(); } @Override protected Path nodeConfigPath(int nodeOrdinal) { return customSecuritySettingsSource.nodeConfigPath(nodeOrdinal); } @Override protected boolean addMockTransportService() { return false; // security has its own transport service } @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return customSecuritySettingsSource.nodePlugins(); } /** * Allows to override the users config file when the {@link org.elasticsearch.test.ESIntegTestCase.ClusterScope} is set to * {@link org.elasticsearch.test.ESIntegTestCase.Scope#SUITE} or {@link org.elasticsearch.test.ESIntegTestCase.Scope#TEST} */ protected String configUsers() { return SECURITY_DEFAULT_SETTINGS.configUsers(); } /** * Allows to override the users_roles config file when the {@link org.elasticsearch.test.ESIntegTestCase.ClusterScope} is set to * {@link org.elasticsearch.test.ESIntegTestCase.Scope#SUITE} or {@link org.elasticsearch.test.ESIntegTestCase.Scope#TEST} */ protected String configUsersRoles() { return SECURITY_DEFAULT_SETTINGS.configUsersRoles(); } /** * Allows to override the roles config file when the {@link org.elasticsearch.test.ESIntegTestCase.ClusterScope} is set to * {@link org.elasticsearch.test.ESIntegTestCase.Scope#SUITE} or {@link org.elasticsearch.test.ESIntegTestCase.Scope#TEST} */ protected String configRoles() { return SECURITY_DEFAULT_SETTINGS.configRoles(); } /** * Allows to override the node client username (used while sending requests to the test cluster) when the * {@link org.elasticsearch.test.ESIntegTestCase.ClusterScope} is set to * {@link org.elasticsearch.test.ESIntegTestCase.Scope#SUITE} or {@link org.elasticsearch.test.ESIntegTestCase.Scope#TEST} */ protected String nodeClientUsername() { return SECURITY_DEFAULT_SETTINGS.nodeClientUsername(); } /** * Allows to override the node client password (used while sending requests to the test cluster) when the * {@link org.elasticsearch.test.ESIntegTestCase.ClusterScope} is set to * {@link org.elasticsearch.test.ESIntegTestCase.Scope#SUITE} or {@link org.elasticsearch.test.ESIntegTestCase.Scope#TEST} */ protected SecureString nodeClientPassword() { return SECURITY_DEFAULT_SETTINGS.nodeClientPassword(); } /** * Allows to control whether ssl key information is auto generated or not on the transport layer */ protected boolean transportSSLEnabled() { return randomBoolean(); } protected int maxNumberOfNodes() { return defaultMaxNumberOfNodes(); } private class CustomSecuritySettingsSource extends SecuritySettingsSource { private CustomSecuritySettingsSource(boolean sslEnabled, Path configDir, Scope scope) { super(sslEnabled, configDir, scope); } @Override protected String configUsers() { return SecurityIntegTestCase.this.configUsers(); } @Override protected String configUsersRoles() { return SecurityIntegTestCase.this.configUsersRoles(); } @Override protected String configRoles() { return SecurityIntegTestCase.this.configRoles(); } @Override protected String nodeClientUsername() { return SecurityIntegTestCase.this.nodeClientUsername(); } @Override protected SecureString nodeClientPassword() { return SecurityIntegTestCase.this.nodeClientPassword(); } } protected static void assertGreenClusterState(Client client) { ClusterHealthResponse clusterHealthResponse = client.admin().cluster().prepareHealth().get(); assertNoTimeout(clusterHealthResponse); assertThat(clusterHealthResponse.getStatus(), is(ClusterHealthStatus.GREEN)); } /** * Creates the indices provided as argument, randomly associating them with aliases, indexes one dummy document per index * and refreshes the new indices */ protected void createIndicesWithRandomAliases(String... indices) { createIndex(indices); if (frequently()) { boolean aliasAdded = false; IndicesAliasesRequestBuilder builder = client().admin().indices().prepareAliases(); for (String index : indices) { if (frequently()) { //one alias per index with prefix "alias-" builder.addAlias(index, "alias-" + index); aliasAdded = true; } } // If we get to this point and we haven't added an alias to the request we need to add one // or the request will fail so use noAliasAdded to force adding the alias in this case if (aliasAdded == false || randomBoolean()) { //one alias pointing to all indices for (String index : indices) { builder.addAlias(index, "alias"); } } assertAcked(builder); } for (String index : indices) { client().prepareIndex(index).setSource("field", "value").get(); } refresh(indices); } @Override protected Function<Client, Client> getClientWrapper() { Map<String, String> headers = Collections.singletonMap("Authorization", basicAuthHeaderValue(nodeClientUsername(), nodeClientPassword())); // we need to wrap node clients because we do not specify a user for nodes and all requests will use the system // user. This is ok for internal n2n stuff but the test framework does other things like wiping indices, repositories, etc // that the system user cannot do. so we wrap the node client with a user that can do these things since the client() calls // return a node client return client -> (client instanceof NodeClient) ? client.filterWithHeader(headers) : client; } public void assertSecurityIndexActive() throws Exception { assertSecurityIndexActive(cluster()); } public void assertSecurityIndexActive(TestCluster testCluster) throws Exception { for (Client client : testCluster.getClients()) { assertBusy(() -> { ClusterState clusterState = client.admin().cluster().prepareState().setLocal(true).get().getState(); assertFalse(clusterState.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)); XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint().startObject(); assertTrue("security index mapping not sufficient to read:\n" + Strings.toString(clusterState.toXContent(builder, ToXContent.EMPTY_PARAMS).endObject()), SecurityIndexManager.checkIndexMappingVersionMatches(SECURITY_MAIN_ALIAS, clusterState, logger, Version.CURRENT.minimumIndexCompatibilityVersion()::onOrBefore)); Index securityIndex = resolveSecurityIndex(clusterState.metadata()); if (securityIndex != null) { IndexRoutingTable indexRoutingTable = clusterState.routingTable().index(securityIndex); if (indexRoutingTable != null) { assertTrue(indexRoutingTable.allPrimaryShardsActive()); } } }, 30L, TimeUnit.SECONDS); } } protected void deleteSecurityIndex() { final Client client = client().filterWithHeader(Collections.singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER, SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING))); GetIndexRequest getIndexRequest = new GetIndexRequest(); getIndexRequest.indices(SECURITY_MAIN_ALIAS); getIndexRequest.indicesOptions(IndicesOptions.lenientExpandOpen()); GetIndexResponse getIndexResponse = client.admin().indices().getIndex(getIndexRequest).actionGet(); if (getIndexResponse.getIndices().length > 0) { // this is a hack to clean up the .security index since only a superuser can delete it DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(getIndexResponse.getIndices()); client.admin().indices().delete(deleteIndexRequest).actionGet(); } } private static Index resolveSecurityIndex(Metadata metadata) { final IndexAbstraction indexAbstraction = metadata.getIndicesLookup().get(SECURITY_MAIN_ALIAS); if (indexAbstraction != null) { return indexAbstraction.getIndices().get(0).getIndex(); } return null; } protected boolean isTransportSSLEnabled() { return customSecuritySettingsSource.isSslEnabled(); } protected static Hasher getFastStoredHashAlgoForTests() { return Hasher.resolve(randomFrom("pbkdf2", "pbkdf2_1000", "bcrypt", "bcrypt9")); } protected class TestRestHighLevelClient extends RestHighLevelClient { public TestRestHighLevelClient() { super(getRestClient(), client -> {}, List.of()); } } }
/* * Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.hostobjects.sso.internal.builder; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.joda.time.DateTime; import org.opensaml.core.xml.config.XMLObjectProviderRegistrySupport; import org.opensaml.saml.saml2.core.Issuer; import org.opensaml.saml.saml2.core.LogoutRequest; import org.opensaml.saml.saml2.core.SessionIndex; import org.opensaml.saml.saml2.core.impl.IssuerBuilder; import org.opensaml.saml.saml2.core.impl.SessionIndexBuilder; import org.opensaml.core.xml.io.Marshaller; import org.opensaml.core.xml.io.MarshallerFactory; import org.opensaml.core.xml.io.MarshallingException; import org.opensaml.security.x509.X509Credential; import org.opensaml.xmlsec.signature.KeyInfo; import org.opensaml.xmlsec.signature.Signature; import org.opensaml.xmlsec.signature.X509Certificate; import net.shibboleth.utilities.java.support.codec.Base64Support; import org.opensaml.xmlsec.signature.X509Data; import org.opensaml.xmlsec.signature.support.SignatureConstants; import org.opensaml.xmlsec.signature.support.SignatureException; import org.opensaml.xmlsec.signature.support.Signer; import org.wso2.carbon.hostobjects.sso.exception.SSOHostObjectException; import org.wso2.carbon.hostobjects.sso.internal.util.*; import java.security.cert.CertificateEncodingException; import java.util.ArrayList; import java.util.List; /** * This class is used to generate the Logout Requests. */ public class LogoutRequestBuilder { private static Log log = LogFactory.getLog(LogoutRequestBuilder.class); /** * Build the logout request * @param subject name of the user * @param reason reason for generating logout request. * @return LogoutRequest object */ public LogoutRequest buildLogoutRequest(String subject,String sessionIndexId, String reason, String issuerId, String nameIdFormat) { Util.doBootstrap(); LogoutRequest logoutReq = new org.opensaml.saml.saml2.core.impl.LogoutRequestBuilder().buildObject(); logoutReq.setID(Util.createID()); DateTime issueInstant = new DateTime(); logoutReq.setIssueInstant(issueInstant); logoutReq.setNotOnOrAfter(new DateTime(issueInstant.getMillis() + 5 * 60 * 1000)); IssuerBuilder issuerBuilder = new IssuerBuilder(); Issuer issuer = issuerBuilder.buildObject(); issuer.setValue(issuerId); logoutReq.setIssuer(issuer); logoutReq.setNameID(Util.buildNameID(nameIdFormat, subject)); SessionIndex sessionIndex = new SessionIndexBuilder().buildObject(); sessionIndex.setSessionIndex(sessionIndexId); logoutReq.getSessionIndexes().add(sessionIndex); logoutReq.setReason(reason); return logoutReq; } /** * Build the logout request * @param subject name of the user * @param reason reason for generating logout request. * @return LogoutRequest object */ public LogoutRequest buildSignedLogoutRequest(String subject,String sessionIndexId, String reason, String issuerId, int tenantId, String tenantDomain, String destination, String nameIdFormat) throws SSOHostObjectException { Util.doBootstrap(); LogoutRequest logoutReq = new org.opensaml.saml.saml2.core.impl.LogoutRequestBuilder().buildObject(); logoutReq.setID(Util.createID()); DateTime issueInstant = new DateTime(); logoutReq.setIssueInstant(issueInstant); logoutReq.setNotOnOrAfter(new DateTime(issueInstant.getMillis() + 5 * 60 * 1000)); IssuerBuilder issuerBuilder = new IssuerBuilder(); Issuer issuer = issuerBuilder.buildObject(); issuer.setValue(issuerId); logoutReq.setIssuer(issuer); logoutReq.setNameID(Util.buildNameID(nameIdFormat, subject)); SessionIndex sessionIndex = new SessionIndexBuilder().buildObject(); sessionIndex.setSessionIndex(sessionIndexId); logoutReq.getSessionIndexes().add(sessionIndex); logoutReq.setReason(reason); logoutReq.setDestination(destination); SSOAgentCarbonX509Credential ssoAgentCarbonX509Credential = new SSOAgentCarbonX509Credential(tenantId, tenantDomain); setSignature(logoutReq, SignatureConstants.ALGO_ID_SIGNATURE_RSA, new X509CredentialImpl(ssoAgentCarbonX509Credential)); return logoutReq; } /** * Overload Logout request for sessionIndexId is not exist case * * @param subject Subject * @param reason Reason for logout * @param issuerId id of issuer * @return SAML logout request */ public LogoutRequest buildLogoutRequest(String subject, String reason, String issuerId, String nameIdFormat) { Util.doBootstrap(); LogoutRequest logoutReq = new org.opensaml.saml.saml2.core.impl.LogoutRequestBuilder().buildObject(); logoutReq.setID(Util.createID()); DateTime issueInstant = new DateTime(); logoutReq.setIssueInstant(issueInstant); logoutReq.setNotOnOrAfter(new DateTime(issueInstant.getMillis() + 5 * 60 * 1000)); IssuerBuilder issuerBuilder = new IssuerBuilder(); Issuer issuer = issuerBuilder.buildObject(); issuer.setValue(issuerId); logoutReq.setIssuer(issuer); logoutReq.setNameID(Util.buildNameID(nameIdFormat, subject)); logoutReq.setReason(reason); return logoutReq; } /** * Overload Logout request for sessionIndexId is not exist case * * @param subject Subject * @param reason Reason for logout * @param issuerId id of issuer * @return Signed SAML logout request */ public LogoutRequest buildSignedLogoutRequest(String subject, String reason, String issuerId, int tenantId, String tenantDomain, String destination, String nameIdFormat) throws SSOHostObjectException { Util.doBootstrap(); LogoutRequest logoutReq = new org.opensaml.saml.saml2.core.impl.LogoutRequestBuilder().buildObject(); logoutReq.setID(Util.createID()); DateTime issueInstant = new DateTime(); logoutReq.setIssueInstant(issueInstant); logoutReq.setNotOnOrAfter(new DateTime(issueInstant.getMillis() + 5 * 60 * 1000)); IssuerBuilder issuerBuilder = new IssuerBuilder(); Issuer issuer = issuerBuilder.buildObject(); issuer.setValue(issuerId); logoutReq.setIssuer(issuer); logoutReq.setNameID(Util.buildNameID(nameIdFormat, subject)); logoutReq.setReason(reason); logoutReq.setDestination(destination); SSOAgentCarbonX509Credential ssoAgentCarbonX509Credential = new SSOAgentCarbonX509Credential(tenantId, tenantDomain); setSignature(logoutReq, SignatureConstants.ALGO_ID_SIGNATURE_RSA, new X509CredentialImpl(ssoAgentCarbonX509Credential)); return logoutReq; } /** * Sign the SAML LogoutRequest message * * @param logoutRequest SAML logout request * @param signatureAlgorithm Signature algorithm * @param cred X.509 credential object * @return SAML logout request including the signature */ public static LogoutRequest setSignature(LogoutRequest logoutRequest, String signatureAlgorithm, X509Credential cred) throws SSOHostObjectException { try { Signature signature = (Signature) Util.buildXMLObject(Signature.DEFAULT_ELEMENT_NAME); signature.setSigningCredential(cred); signature.setSignatureAlgorithm(signatureAlgorithm); signature.setCanonicalizationAlgorithm(SignatureConstants.ALGO_ID_C14N_EXCL_OMIT_COMMENTS); KeyInfo keyInfo = (KeyInfo) Util.buildXMLObject(KeyInfo.DEFAULT_ELEMENT_NAME); X509Data data = (X509Data) Util.buildXMLObject(X509Data.DEFAULT_ELEMENT_NAME); X509Certificate cert = (X509Certificate) Util.buildXMLObject(X509Certificate.DEFAULT_ELEMENT_NAME); String value = Base64Support.encode(cred.getEntityCertificate().getEncoded(), Base64Support.UNCHUNKED); cert.setValue(value); data.getX509Certificates().add(cert); keyInfo.getX509Datas().add(data); signature.setKeyInfo(keyInfo); logoutRequest.setSignature(signature); List<Signature> signatureList = new ArrayList<Signature>(); signatureList.add(signature); // Marshall and Sign MarshallerFactory marshallerFactory = XMLObjectProviderRegistrySupport.getMarshallerFactory(); Marshaller marshaller = marshallerFactory.getMarshaller(logoutRequest); marshaller.marshall(logoutRequest); Signer.signObjects(signatureList); return logoutRequest; } catch (CertificateEncodingException e) { handleException("Error getting certificate", e); } catch (MarshallingException e) { handleException("Error while marshalling logout request", e); } catch (SignatureException e) { handleException("Error while signing the SAML logout request", e); } catch (SSOHostObjectException e) { handleException("Error while signing the SAML logout request", e); } return null; } private static void handleException(String errorMessage, Throwable e) throws SSOHostObjectException { log.error(errorMessage); throw new SSOHostObjectException(errorMessage, e); } }
/* * Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ package com.gemstone.gemfire.internal.cache; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Matcher; import com.gemstone.gemfire.CancelCriterion; import com.gemstone.gemfire.InternalGemFireError; import com.gemstone.gemfire.cache.DiskAccessException; import com.gemstone.gemfire.i18n.LogWriterI18n; import com.gemstone.gemfire.internal.Assert; import com.gemstone.gemfire.internal.InternalDataSerializer; import com.gemstone.gemfire.internal.cache.Oplog.DiskRegionInfo; import com.gemstone.gemfire.internal.cache.Oplog.KRFEntry; import com.gemstone.gemfire.internal.cache.Oplog.OplogEntryIdMap; import com.gemstone.gemfire.internal.cache.control.MemoryThresholdListener; import com.gemstone.gemfire.internal.cache.persistence.DiskRegionView; import com.gemstone.gemfire.internal.i18n.LocalizedStrings; import com.gemstone.gemfire.internal.offheap.SimpleMemoryAllocatorImpl.Chunk; import com.gemstone.gemfire.internal.offheap.annotations.Released; import com.gemstone.gemfire.internal.shared.unsafe.ChannelBufferUnsafeDataInputStream; import com.gemstone.gemfire.internal.shared.unsafe.ChannelBufferUnsafeDataOutputStream; import com.gemstone.gnu.trove.THashMap; import com.gemstone.gnu.trove.THashSet; import org.eclipse.collections.api.block.function.Function; import org.eclipse.collections.api.block.procedure.Procedure2; import org.eclipse.collections.impl.list.mutable.primitive.LongArrayList; import org.eclipse.collections.impl.map.mutable.UnifiedMap; /** * Encapsulates methods to read and write from index files in an Oplog. * * @author kneeraj, swale * @since gfxd 1.0 */ public final class OplogIndex { static final String IDX_FILE_EXT = "idxkrf"; // for index file records public static final byte INDEXID_RECORD = 0x01; public static final byte INDEX_RECORD = 0x02; public static final byte INDEX_END_OF_FILE = 0x03; // randomly generated bytes to mark the valid end of an index file. private static final byte[] INDEX_END_OF_FILE_MAGIC = new byte[] { INDEX_END_OF_FILE, -0x37, -0x11, -0x26, -0x46, 0x25, 0x71, 0x3b, 0x1f, 0x4b, -0x77, 0x2b, -0x6f, -0x1f, 0x6b, -0x02 }; /** * The max entries batched in memory before flusing to index file. Batching is * primarily done to determine any multiple entries against the same index key * to optimize its serialization by avoiding writing the same index key * multiple times. */ public static final int BATCH_FLUSH_SIZE_AT_ROLLOVER = 20000; private final Oplog oplog; private final DiskStoreImpl dsi; private File irf; private ChannelBufferUnsafeDataOutputStream dos; OplogIndex(Oplog oplog) { this.oplog = oplog; this.dsi = oplog.getParent(); } public void addRecoveredFile(String fname) { if (this.irf == null) { this.irf = new File(oplog.getDirectoryHolder().getDir(), fname); } else { // If, for some strange reason, we end up with two idx files // pick the one with the higher version. long currentVersion = getIndexFileVersion(this.irf.getName()); long incomingVersion = getIndexFileVersion(fname); if (incomingVersion > currentVersion) { this.irf = new File(oplog.getDirectoryHolder().getDir(), fname); } } } /** * Extract the version of the current index file from the name. */ private long getIndexFileVersion(String indxFile) { Matcher matcher = Oplog.IDX_PATTERN.matcher(indxFile); if (!matcher.matches()) { throw new InternalGemFireError( "Could not match index file pattern against " + indxFile); } String versionStr = matcher.group(1); return Long.parseLong(versionStr); } private File getFileName(long version) { return new File(oplog.getDirectoryHolder().getDir(), oplog.getDiskFileName() + "." + version + "." + IDX_FILE_EXT); } public synchronized File getIndexFile() { return this.irf; } public synchronized File getIndexFileIfValid(boolean recreateIndexFile) { File f = getIndexFile(); return checkValidIndexFile(f, recreateIndexFile) ? f : null; } synchronized void initializeForWriting(boolean truncate) throws IOException { // this method is only used by offline compaction. validating will not // create krf assert !this.dsi.isValidating(); // Create the idx file with a version of 1, or roll the version by // renaming a new file. This allows the incremental backup to detect // that the file contents have changed. final File irf = this.irf; if (irf == null) { this.irf = getFileName(1); } else { long version = getIndexFileVersion(irf.getName()); version++; File newFile = getFileName(version); try { Path irfPath = irf.toPath(); if (Files.exists(irfPath)) { Files.move(irfPath, newFile.toPath()); } } catch (IOException ioe) { throw new DiskAccessException("Failed to rename index file " + irf + " to " + newFile, ioe, this.dsi); } this.irf = newFile; } final boolean append = checkValidIndexFile(this.irf, false); FileChannel channel = FileChannel.open(this.irf.toPath(), StandardOpenOption.CREATE, StandardOpenOption.WRITE); // position before EOF indicator if append is true if (truncate) { channel.truncate(0); channel.position(0); } else if (append) { channel.truncate(channel.size() - INDEX_END_OF_FILE_MAGIC.length); channel.position(channel.size()); } this.dos = new ChannelBufferUnsafeDataOutputStream(channel, Oplog.DEFAULT_BUFFER_SIZE); final LogWriterI18n logger = this.oplog.logger; if (logger.infoEnabled()) { logger.info(LocalizedStrings.Oplog_CREATE_0_1_2, new Object[]{ this.oplog.toString(), IDX_FILE_EXT, this.dsi.getName()}); } } boolean checkValidIndexFile(File f, boolean recreateIndexFile) { boolean hasIrf = this.dsi.getDiskInitFile() .hasIrf(this.oplog.getOplogId()); if (f != null && f.exists() && !recreateIndexFile) { if (hasIrf) { // check if the file is closed properly try (FileChannel channel = FileChannel.open(f.toPath(), StandardOpenOption.READ)) { long size = channel.size(); int eofLen = INDEX_END_OF_FILE_MAGIC.length; int readLen; // The file should end with the end of file magic. if (size >= eofLen) { channel.position(size - eofLen); ByteBuffer data = ByteBuffer.allocate(eofLen); while (eofLen > 0 && (readLen = channel.read(data)) > 0) { eofLen -= readLen; } if (eofLen <= 0 && Arrays.equals(data.array(), INDEX_END_OF_FILE_MAGIC)) { return true; } } } catch (IOException ioe) { // ignore and continue to deleting the file } } // delete the existing, unreadable file deleteIRF(hasIrf ? "unreadable file" : "metadata missing"); } else if (recreateIndexFile) { // delete the existing file as requires recreation deleteIRF(hasIrf ? "unreadable file" : "metadata missing"); this.irf = null; } this.oplog.indexesWritten.clear(); return false; } final synchronized void deleteIRF(String reason) { DiskInitFile initFile = this.dsi.getDiskInitFile(); if (initFile.hasIrf(this.oplog.getOplogId())) { // add deleted IRF record initFile.irfDelete(this.oplog.getOplogId()); } if (this.irf != null && this.irf.exists()) { if (reason != null) { final LogWriterI18n logger = this.oplog.logger; if (logger.infoEnabled()) { logger.info(LocalizedStrings.Oplog_DELETE_0_1_2, new Object[] { this.oplog.toString(), this.irf.getAbsolutePath() + " (" + reason + ')', this.dsi.getName() }); } } if (!this.irf.delete()) { final LogWriterI18n logger = this.oplog.logger; logger.warning(LocalizedStrings.Oplog_DELETE_FAIL_0_1_2, new Object[] { this.oplog.toString(), this.irf.getAbsolutePath(), this.dsi.getName() }); } } } public void close() { boolean allClosed = false; try { if (this.dos == null) { return; } this.dos.write(INDEX_END_OF_FILE_MAGIC); this.dos.flush(); this.dos.close(); this.dos.getUnderlyingChannel().close(); this.dos = null; allClosed = true; } catch (IOException e) { throw new DiskAccessException("Failed to close index file " + this.irf, e, this.dsi); } finally { if (!allClosed) { // IOException happened during close, delete this idxkrf deleteIRF("failed to close"); } } } public static final class IndexData { public final SortedIndexContainer index; public final SortedIndexRecoveryJob indexJob; public final UnifiedMap<SortedIndexKey, LongArrayList> indexEntryMap; public final int action; // flags for "action" to indicate whether an index has to be only dumped to // file, or only loaded or both dumped to file as well as loaded public static final int ONLY_DUMP = 0x1; public static final int ONLY_LOAD = 0x2; public static final int BOTH_DUMP_AND_LOAD = 0x3; IndexData(final SortedIndexContainer index, final SortedIndexRecoveryJob indexJob, int action, int entryCacheSize) { this.index = index; this.indexJob = indexJob; this.indexEntryMap = (action != ONLY_LOAD ? new UnifiedMap<>(entryCacheSize) : null); this.action = action; } @Override public int hashCode() { return this.index.hashCode(); } @Override public boolean equals(Object other) { return other instanceof IndexData && this.index.equals(((IndexData)other).index); } public SortedIndexContainer getIndex(){ return this.index; } } public void writeIndexRecords(List<KRFEntry> entries, Set<KRFEntry> notWrittenKRFs, Set<SortedIndexContainer> dumpIndexes, Map<SortedIndexContainer, SortedIndexRecoveryJob> loadIndexes) { if ((dumpIndexes == null || dumpIndexes.isEmpty()) && loadIndexes == null) { return; } final LogWriterI18n logger = this.oplog.logger; if (DiskStoreImpl.INDEX_LOAD_DEBUG) { logger.info(LocalizedStrings.DEBUG, "OplogIndex#writeIndexRecords: " + "local indexes to be dumped are: " + dumpIndexes + ", to be loaded are: " + loadIndexes); } final MemoryThresholdListener thresholdListener = GemFireCacheImpl .getInternalProductCallbacks().getMemoryThresholdListener(); final int entryCacheSize = Math.min(BATCH_FLUSH_SIZE_AT_ROLLOVER, entries.size()); final HashMap<Long, IndexData[]> drvIdToIndexes = new HashMap<>(); if (notWrittenKRFs != null && notWrittenKRFs.isEmpty()) { notWrittenKRFs = null; } // Populate the drvIdToIndexes from oplog map first. // We ensure that the map referred to in indexToIndexData for each index // is also the one which is in drdIdToIndexes so insert can happen into // drdIdToIndexes while flushing can be done from the global // indexToIndexData map. final boolean hasOffHeap = getDiskIdToIndexDataMap(dumpIndexes, loadIndexes, entryCacheSize, drvIdToIndexes, null); final Function<SortedIndexKey, LongArrayList> entryListCreator = key -> { if (hasOffHeap) { // Snapshot the key bytes, as the offheap value bytes used as index // key would be // released , before the data is dumped in the irf. // Since a newTLongArrayList is created, implying this index key will // be used in the dumping code // Check if snap shot is needed in case of only load key.snapshotKeyFromValue(); } return new LongArrayList(2); }; if (DiskStoreImpl.INDEX_LOAD_DEBUG) { logger.info(LocalizedStrings.DEBUG, "OplogIndex#writeIndexRecords: " + "affected indexes to be dumped are: " + dumpIndexes); logger.info(LocalizedStrings.DEBUG, "OplogIndex#writeIndexRecords: " + "affected indexes to be loaded are: " + loadIndexes); } int processedCnt = 0; for (KRFEntry krf : entries) { if (notWrittenKRFs != null && notWrittenKRFs.contains(krf)) { continue; } final DiskRegionView drv = krf.getDiskRegionView(); final IndexData[] indexes = drvIdToIndexes.get(drv.getId()); if (indexes == null) { continue; } final LocalRegion baseRegion = indexes[0].index.getBaseRegion(); final DiskEntry entry = krf.getDiskEntry(); @Released final Object val = DiskEntry.Helper .getValueOffHeapOrDiskWithoutFaultIn(entry, drv, baseRegion); if (val == null || Token.isInvalidOrRemoved(val)) { if (DiskStoreImpl.INDEX_LOAD_DEBUG) { logger.info(LocalizedStrings.DEBUG, "OplogIndex#writeIndexRecords: " + "row null for entry: " + entry + "; continuing to next."); } continue; } if (!hasOffHeap || !(val instanceof Chunk)) { for (IndexData indexData : indexes) { dumpOrLoadIndex(indexData, val, entry, entryListCreator); } } else { try { for (IndexData indexData : indexes) { dumpOrLoadIndex(indexData, val, entry, entryListCreator); } } finally { ((Chunk)val).release(); } } processedCnt += indexes.length; if (processedCnt >= BATCH_FLUSH_SIZE_AT_ROLLOVER || (thresholdListener.isEviction() && processedCnt >= (BATCH_FLUSH_SIZE_AT_ROLLOVER/4))) { flushEntries(drvIdToIndexes.values()); processedCnt = 0; } } if (processedCnt > 0) { flushEntries(drvIdToIndexes.values()); } } private void dumpOrLoadIndex(final IndexData indexData, final Object val, DiskEntry entry, Function<SortedIndexKey, LongArrayList> entryListCreator) { final SortedIndexContainer index = indexData.index; SortedIndexKey ikey = index.getIndexKey(val, entry); switch (indexData.action) { case IndexData.ONLY_LOAD: // submit insert into the index immediately indexData.indexJob.addJob(ikey, entry); break; case IndexData.BOTH_DUMP_AND_LOAD: // submit insert into the index immediately indexData.indexJob.addJob(ikey, entry); // fall-through deliberate case IndexData.ONLY_DUMP: LongArrayList entryList = indexData.indexEntryMap.getIfAbsentPutWith( ikey, entryListCreator, ikey); entryList.add(Math.abs(entry.getDiskId().getKeyId())); break; default: Assert.fail("OplogIndex#writeIndexRecords: unexpected action=" + indexData.action); } } private void flushEntries(Collection<IndexData[]> allIndexes) { for (IndexData[] indexes : allIndexes) { for (IndexData indexData : indexes) { SortedIndexContainer index = indexData.index; UnifiedMap<SortedIndexKey, LongArrayList> entryIdsPerIndexKey = indexData.indexEntryMap; if (entryIdsPerIndexKey != null && entryIdsPerIndexKey.size() > 0) { writeIRFRecords(index, entryIdsPerIndexKey, dos); entryIdsPerIndexKey.clear(); } } } } @SuppressWarnings("unchecked") private static Map<String, THashSet> getRegionIdToIndexes( Set<SortedIndexContainer> dumpIndexes, Map<SortedIndexContainer, SortedIndexRecoveryJob> loadIndexes, int entryCacheSize) { final Map<String, THashSet> regionIdToIndexData = new THashMap(); if (dumpIndexes != null) { for (SortedIndexContainer index : dumpIndexes) { String regionId = index.getBaseRegion().getRegionID(); THashSet indexData = regionIdToIndexData.get(regionId); if (indexData == null) { indexData = new THashSet(4); regionIdToIndexData.put(regionId, indexData); } IndexData data; SortedIndexRecoveryJob indexJob = loadIndexes != null ? loadIndexes.get(index) : null; if (indexJob != null) { data = new IndexData(index, indexJob, IndexData.BOTH_DUMP_AND_LOAD, entryCacheSize); } else { data = new IndexData(index, null, IndexData.ONLY_DUMP, entryCacheSize); } indexData.add(data); } } if (loadIndexes != null) { for (Map.Entry<SortedIndexContainer, SortedIndexRecoveryJob> entry : loadIndexes.entrySet()) { SortedIndexContainer index = entry.getKey(); String regionId = index.getBaseRegion().getRegionID(); THashSet indexData = regionIdToIndexData.get(regionId); if (indexData == null) { indexData = new THashSet(4); regionIdToIndexData.put(regionId, indexData); } // we can safely set ONLY_LOAD here since if there is already an entry // then it will already have BOTH_DUMP_AND_LOAD as per the loop above IndexData data = new IndexData(index, entry.getValue(), IndexData.ONLY_LOAD, entryCacheSize); indexData.add(data); } } return regionIdToIndexData; } public boolean getDiskIdToIndexDataMap(Set<SortedIndexContainer> dumpIndexes, Map<SortedIndexContainer, SortedIndexRecoveryJob> loadIndexes, int entryCacheSize, final Map<Long, IndexData[]> drvIdToIndexes, final List<DiskRegionInfo> targetRegions) { boolean hasOffHeap = false; Collection<DiskRegionInfo> recoveredRegions = this.oplog .getRegionRecoveryMap(); Map<String, THashSet> regionIdToIndexes = getRegionIdToIndexes( dumpIndexes, loadIndexes, entryCacheSize); for (DiskRegionInfo regionInfo : recoveredRegions) { DiskRegionView drv = regionInfo.getDiskRegion(); Long drvId = drv.getId(); String baseRegionID = Oplog.getParentRegionID(drv); THashSet indexSet = regionIdToIndexes.get(baseRegionID); if (indexSet == null) { continue; } if (drvIdToIndexes != null) { IndexData[] indexes = new IndexData[indexSet.size()]; indexSet.toArray(indexes); drvIdToIndexes.put(drvId, indexes); } if (targetRegions != null) { targetRegions.add(regionInfo); } IndexData firstKey; if (!hasOffHeap && (firstKey = (IndexData)indexSet.firstKey()) != null && firstKey.index.getBaseRegion().getEnableOffHeapMemory()) { hasOffHeap = true; } } return hasOffHeap; } public void writeIRFRecords(final SortedIndexContainer indexContainer, UnifiedMap<SortedIndexKey, LongArrayList> entryIdsPerIndexKey, final ChannelBufferUnsafeDataOutputStream dos) { try { final LogWriterI18n logger = this.oplog.logger; if (DiskStoreImpl.INDEX_LOAD_DEBUG) { logger.info(LocalizedStrings.DEBUG, "OplogIndex#writeIRFRecords: " + "write called for " + indexContainer); } String indexId = indexContainer.getUUID(); dos.writeByte(INDEXID_RECORD); InternalDataSerializer.writeString(indexId, dos); if (DiskStoreImpl.INDEX_LOAD_DEBUG) { logger.info(LocalizedStrings.DEBUG, "OplogIndex#writeIRFRecords: " + "written indexId record for index: " + indexId); } entryIdsPerIndexKey.forEachKeyValue(new Procedure2<SortedIndexKey, LongArrayList>() { @Override public void value(SortedIndexKey ikey, LongArrayList entryKeyIds) { try { dos.writeByte(INDEX_RECORD); ikey.writeKeyBytes(dos); int numKeyIds = entryKeyIds.size(); assert numKeyIds > 0; InternalDataSerializer.writeUnsignedVL(numKeyIds, dos); if (DiskStoreImpl.INDEX_LOAD_DEBUG_FINER) { logger.info(LocalizedStrings.DEBUG, "OplogIndex#writeIRFRecords: " + "writing actual index record with index key: " + ikey + " list of oplogEntryIds: " + entryKeyIds.toString()); } if (numKeyIds == 1) { InternalDataSerializer.writeUnsignedVL(entryKeyIds.getFirst(), dos); } else { // sort the key ids to keep the deltas small and thus minimize the // size of unsigned long that will be written to disk entryKeyIds.sortThis(); long previousValue = 0; for (int index = 0; index < numKeyIds; index++) { long currValue = entryKeyIds.get(index); if (previousValue == 0) { previousValue = currValue; InternalDataSerializer.writeUnsignedVL(previousValue, dos); } else { long delta = currValue - previousValue; InternalDataSerializer.writeUnsignedVL(delta, dos); previousValue = currValue; } } } } catch (IOException ioe) { throw new DiskAccessException(ioe); } } }); } catch (IOException ioe) { throw new DiskAccessException(ioe); } } public void recoverIndexes( Map<SortedIndexContainer, SortedIndexRecoveryJob> indexes) { try { final LogWriterI18n logger = this.oplog.logger; final boolean logEnabled = DiskStoreImpl.INDEX_LOAD_DEBUG || logger.fineEnabled(); final boolean logFinerEnabled = DiskStoreImpl.INDEX_LOAD_DEBUG_FINER || logger.finerEnabled(); final CancelCriterion cc = this.dsi.getCancelCriterion(); // check early for stop cc.checkCancelInProgress(null); if (logEnabled || DiskStoreImpl.INDEX_LOAD_PERF_DEBUG) { logger.info(LocalizedStrings.DEBUG, "OplogIndex#recoverIndexes: for " + this.oplog + " processing file: " + this.irf + " of size: " + this.irf.length()); } if (logger.infoEnabled()) { logger.info(LocalizedStrings.DiskRegion_RECOVERING_OPLOG_0_1_2, new Object[] { this.oplog.toString(), this.irf.getAbsolutePath(), dsi.getName() }); } final RandomAccessFile raf = new RandomAccessFile(this.irf, "r"); final FileChannel channel = raf.getChannel(); final ChannelBufferUnsafeDataInputStream in = new ChannelBufferUnsafeDataInputStream(channel, Oplog.LARGE_BUFFER_SIZE); final OplogEntryIdMap recoveryMap = this.oplog.getInitRecoveryMap(); final HashMap<String, IndexData> indexMap = new HashMap<>(); boolean endOfFile = false; String currentIndexID; SortedIndexContainer currentIndex = null; SortedIndexRecoveryJob currentIndexJob = null; for (Map.Entry<SortedIndexContainer, SortedIndexRecoveryJob> entry : indexes.entrySet()) { SortedIndexContainer index = entry.getKey(); indexMap.put(index.getUUID(), new IndexData(index, entry.getValue(), IndexData.ONLY_LOAD, 0)); } while (!endOfFile) { final int opCode = in.read(); switch (opCode) { case INDEX_END_OF_FILE: if (logEnabled) { logger.info(LocalizedStrings.DEBUG, "OplogIndex#recoverIndexes: " + "read end 0xf file record for " + this.irf); } byte[] data = new byte[INDEX_END_OF_FILE_MAGIC.length]; data[0] = INDEX_END_OF_FILE; in.readFully(data, 1, INDEX_END_OF_FILE_MAGIC.length - 1); if (!Arrays.equals(data, INDEX_END_OF_FILE_MAGIC)) { throw new DiskAccessException( "Did not find end of file magic at the end of index " + this.irf, oplog.getParent()); } break; case INDEXID_RECORD: currentIndexID = InternalDataSerializer.readString(in); IndexData currentIndexData = indexMap.get(currentIndexID); if (currentIndexData != null) { currentIndex = currentIndexData.index; currentIndexJob = currentIndexData.indexJob; } else { currentIndex = null; currentIndexJob = null; } if (logFinerEnabled) { if (currentIndex != null) { logger.info(LocalizedStrings.DEBUG, String.format("OplogIndex#" + "recoverIndexes: indexContainer=%s, indexUUID=%s", currentIndex, currentIndexID)); } else { logger.info(LocalizedStrings.DEBUG, "OplogIndex#" + "recoverIndexes: index is null for indexUUID=" + currentIndexID); } } break; case INDEX_RECORD: byte[] indexKeyBytes = InternalDataSerializer.readByteArray(in); int numRegionKeys = (int)InternalDataSerializer .readUnsignedVL(in); long regionEntryKeyId = 0; for (int i = 0; i < numRegionKeys; i++) { if (i == 0) { regionEntryKeyId = InternalDataSerializer.readUnsignedVL(in); } else { regionEntryKeyId = regionEntryKeyId + InternalDataSerializer.readUnsignedVL(in); } if (currentIndex == null) { continue; } // check if this region key is in current live list Object entry = recoveryMap.get(regionEntryKeyId); if (entry == null) { // This is possible if it has been deleted. So just // continue. if (logEnabled) { logger.info(LocalizedStrings.DEBUG, String.format( "OplogIndex#recoverIndexes: ignoring oplogentryid=%s " + "as not found in kvMap for index=%s ", regionEntryKeyId, currentIndex)); } continue; } final DiskEntry diskEntry = (DiskEntry)entry; final DiskId diskId = diskEntry.getDiskId(); if (diskId != null && diskId.getOplogId() == this.oplog.oplogId) { if (logEnabled) { logger.info(LocalizedStrings.DEBUG, String.format( "OplogIndex#recoverIndexes: adding index diskEntry=%s " + "indexContainer=%s indexKeyBytes=%s", diskEntry, currentIndex, Arrays.toString(indexKeyBytes))); } currentIndexJob.addJob(currentIndex.getIndexKey(indexKeyBytes, diskEntry), diskEntry); } else { if (logEnabled) { logger.info(LocalizedStrings.DEBUG, String.format( "OplogIndex#recoverIndexes: skipping adding index for " + "diskEntry=%s in indexContainer=%s as oplogid=%s " + "doesn't match entry's oplogid=%s", diskEntry, currentIndex, this.oplog.oplogId, diskId != null ? diskId.getOplogId() : -1L)); } } } break; default: if (opCode < 0) { endOfFile = true; break; } else { throw new IOException("unexpected opCode=" + opCode + " encountered while reading file: " + this.irf); } } } in.close(); raf.close(); // check for stop cc.checkCancelInProgress(null); if (logEnabled || DiskStoreImpl.INDEX_LOAD_PERF_DEBUG) { logger.info(LocalizedStrings.DEBUG, "OplogIndex#recoverIndexes: " + "Processed file: " + this.irf); } } catch (IOException ioe) { throw new DiskAccessException(ioe); } } }
/** * FILE: RangeQuery.java * PATH: org.datasyslab.geospark.spatialOperator.RangeQuery.java * Copyright (c) 2017 Arizona State University Data Systems Lab * All rights reserved. */ package org.datasyslab.geospark.spatialOperator; import java.io.Serializable; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.function.Function; import org.datasyslab.geospark.rangeJudgement.GeometryRangeFilter; import org.datasyslab.geospark.rangeJudgement.RangeFilterUsingIndex; import org.datasyslab.geospark.rangeJudgement.RectangleRangeFilter; import org.datasyslab.geospark.spatialRDD.LineStringRDD; import org.datasyslab.geospark.spatialRDD.PointRDD; import org.datasyslab.geospark.spatialRDD.PolygonRDD; import org.datasyslab.geospark.spatialRDD.RectangleRDD; import com.vividsolutions.jts.geom.Envelope; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.LineString; import com.vividsolutions.jts.geom.Point; import com.vividsolutions.jts.geom.Polygon; // TODO: Auto-generated Javadoc /** * The Class RangeQuery. */ public class RangeQuery implements Serializable{ /** * Spatial range query. * * @param spatialRDD the spatial RDD * @param queryWindow the query window * @param condition the condition * @param useIndex the use index * @return the java RDD * @throws Exception the exception */ public static JavaRDD<Point> SpatialRangeQuery(PointRDD spatialRDD, Envelope queryWindow, Integer condition, boolean useIndex) throws Exception { if(useIndex==true) { if(spatialRDD.indexedRawRDD == null) { throw new Exception("[RangeQuery][SpatialRangeQuery] Index doesn't exist. Please build index on rawSpatialRDD."); } JavaRDD<Object> result = spatialRDD.indexedRawRDD.mapPartitions(new RangeFilterUsingIndex(queryWindow)); return result.map(new Function<Object, Point>() { @Override public Point call(Object spatialObject) throws Exception { return (Point)spatialObject; } }); } else{ JavaRDD<Object> result = spatialRDD.getRawSpatialRDD().filter(new GeometryRangeFilter(queryWindow, condition)); return result.map(new Function<Object,Point>() { @Override public Point call(Object spatialObject) throws Exception { return (Point)spatialObject; } }); } } /** * Spatial range query. * * @param spatialRDD the spatial RDD * @param queryWindow the query window * @param condition the condition * @param useIndex the use index * @return the java RDD * @throws Exception the exception */ public static JavaRDD<Point> SpatialRangeQuery(PointRDD spatialRDD, Polygon queryWindow, Integer condition, boolean useIndex) throws Exception { if(useIndex==true) { if(spatialRDD.indexedRawRDD == null) { throw new Exception("[RangeQuery][SpatialRangeQuery] Index doesn't exist. Please build index on rawSpatialRDD."); } JavaRDD<Object> result = spatialRDD.indexedRawRDD.mapPartitions(new RangeFilterUsingIndex(queryWindow)); return result.map(new Function<Object, Point>() { @Override public Point call(Object spatialObject) throws Exception { return (Point)spatialObject; } }); } else{ JavaRDD<Object> result = spatialRDD.getRawSpatialRDD().filter(new GeometryRangeFilter(queryWindow, condition)); return result.map(new Function<Object,Point>() { @Override public Point call(Object spatialObject) throws Exception { return (Point)spatialObject; } }); } } /** * Spatial range query. * * @param spatialRDD the spatial RDD * @param queryWindow the query window * @param condition the condition * @param useIndex the use index * @return the java RDD * @throws Exception the exception */ public static JavaRDD<Polygon> SpatialRangeQuery(PolygonRDD spatialRDD, Envelope queryWindow,Integer condition,boolean useIndex) throws Exception { if(useIndex==true) { if(spatialRDD.indexedRawRDD == null) { throw new Exception("[RangeQuery][SpatialRangeQuery] Index doesn't exist. Please build index on rawSpatialRDD."); } JavaRDD<Object> result = spatialRDD.indexedRawRDD.mapPartitions(new RangeFilterUsingIndex(queryWindow)); return result.map(new Function<Object, Polygon>() { @Override public Polygon call(Object spatialObject) throws Exception { return (Polygon)spatialObject; } }); } else{ JavaRDD<Object> result = spatialRDD.getRawSpatialRDD().filter(new GeometryRangeFilter(queryWindow, condition)); return result.map(new Function<Object,Polygon>() { @Override public Polygon call(Object spatialObject) throws Exception { return (Polygon)spatialObject; } }); } } /** * Spatial range query. * * @param spatialRDD the spatial RDD * @param queryWindow the query window * @param condition the condition * @param useIndex the use index * @return the java RDD * @throws Exception the exception */ public static JavaRDD<Polygon> SpatialRangeQuery(PolygonRDD spatialRDD, Polygon queryWindow,Integer condition,boolean useIndex) throws Exception { if(useIndex==true) { if(spatialRDD.indexedRawRDD == null) { throw new Exception("[RangeQuery][SpatialRangeQuery] Index doesn't exist. Please build index on rawSpatialRDD."); } JavaRDD<Object> result = spatialRDD.indexedRawRDD.mapPartitions(new RangeFilterUsingIndex(queryWindow)); return result.map(new Function<Object, Polygon>() { @Override public Polygon call(Object spatialObject) throws Exception { return (Polygon)spatialObject; } }); } else{ JavaRDD<Object> result = spatialRDD.getRawSpatialRDD().filter(new GeometryRangeFilter(queryWindow, condition)); return result.map(new Function<Object,Polygon>() { @Override public Polygon call(Object spatialObject) throws Exception { return (Polygon)spatialObject; } }); } } /** * Spatial range query. * * @param spatialRDD the spatial RDD * @param queryWindow the query window * @param condition the condition * @param useIndex the use index * @return the java RDD * @throws Exception the exception */ public static JavaRDD<Envelope> SpatialRangeQuery(RectangleRDD spatialRDD, Envelope queryWindow,Integer condition,boolean useIndex) throws Exception { if(useIndex==true) { if(spatialRDD.indexedRawRDD == null) { throw new Exception("[RangeQuery][SpatialRangeQuery] Index doesn't exist. Please build index on rawSpatialRDD."); } JavaRDD<Object> result = spatialRDD.indexedRawRDD.mapPartitions(new RangeFilterUsingIndex(queryWindow)); return result.map(new Function<Object, Envelope>() { @Override public Envelope call(Object spatialObject) throws Exception { Envelope returnSpatialObject = ((Geometry)spatialObject).getEnvelopeInternal(); if( ((Geometry)spatialObject).getUserData()!=null) { returnSpatialObject.setUserData(((Geometry)spatialObject).getUserData()); } return returnSpatialObject; } }); } else{ JavaRDD<Object> result = spatialRDD.getRawSpatialRDD().filter(new RectangleRangeFilter(queryWindow, condition)); return result.map(new Function<Object,Envelope>() { @Override public Envelope call(Object spatialObject) throws Exception { return (Envelope)spatialObject; } }); } } /** * Spatial range query. * * @param spatialRDD the spatial RDD * @param queryWindow the query window * @param condition the condition * @param useIndex the use index * @return the java RDD * @throws Exception the exception */ public static JavaRDD<LineString> SpatialRangeQuery(LineStringRDD spatialRDD, Envelope queryWindow,Integer condition,boolean useIndex) throws Exception { if(useIndex==true) { if(spatialRDD.indexedRawRDD == null) { throw new Exception("[RangeQuery][SpatialRangeQuery] Index doesn't exist. Please build index on rawSpatialRDD."); } JavaRDD<Object> result = spatialRDD.indexedRawRDD.mapPartitions(new RangeFilterUsingIndex(queryWindow)); return result.map(new Function<Object, LineString>() { @Override public LineString call(Object spatialObject) throws Exception { return (LineString)spatialObject; } }); } else{ JavaRDD<Object> result = spatialRDD.getRawSpatialRDD().filter(new GeometryRangeFilter(queryWindow, condition)); return result.map(new Function<Object,LineString>() { @Override public LineString call(Object spatialObject) throws Exception { return (LineString)spatialObject; } }); } } /** * Spatial range query. * * @param spatialRDD the spatial RDD * @param queryWindow the query window * @param condition the condition * @param useIndex the use index * @return the java RDD * @throws Exception the exception */ public static JavaRDD<LineString> SpatialRangeQuery(LineStringRDD spatialRDD, Polygon queryWindow,Integer condition,boolean useIndex) throws Exception { if(useIndex==true) { if(spatialRDD.indexedRawRDD == null) { throw new Exception("[RangeQuery][SpatialRangeQuery] Index doesn't exist. Please build index on rawSpatialRDD."); } JavaRDD<Object> result = spatialRDD.indexedRawRDD.mapPartitions(new RangeFilterUsingIndex(queryWindow)); return result.map(new Function<Object, LineString>() { @Override public LineString call(Object spatialObject) throws Exception { return (LineString)spatialObject; } }); } else{ JavaRDD<Object> result = spatialRDD.getRawSpatialRDD().filter(new GeometryRangeFilter(queryWindow, condition)); return result.map(new Function<Object,LineString>() { @Override public LineString call(Object spatialObject) throws Exception { return (LineString)spatialObject; } }); } } }
/* * Copyright (c) 2010 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.google.api.client.util.escape; /** * An {@link Escaper} that converts literal text into a format safe for inclusion in a particular * context (such as an XML document). Typically (but not always), the inverse process of * "unescaping" the text is performed automatically by the relevant parser. * * <p> * For example, an XML escaper would convert the literal string {@code "Foo<Bar>"} into {@code * "Foo&lt;Bar&gt;"} to prevent {@code "<Bar>"} from being confused with an XML tag. When the * resulting XML document is parsed, the parser API will return this text as the original literal * string {@code "Foo<Bar>"}. * * <p> * As there are important reasons, including potential security issues, to handle Unicode correctly * if you are considering implementing a new escaper you should favor using UnicodeEscaper wherever * possible. * * <p> * A {@code UnicodeEscaper} instance is required to be stateless, and safe when used concurrently by * multiple threads. * * <p> * Several popular escapers are defined as constants in the class {@link CharEscapers}. To create * your own escapers extend this class and implement the {@link #escape(int)} method. * * @since 1.0 */ public abstract class UnicodeEscaper extends Escaper { /** The amount of padding (chars) to use when growing the escape buffer. */ private static final int DEST_PAD = 32; /** * Returns the escaped form of the given Unicode code point, or {@code null} if this code point * does not need to be escaped. When called as part of an escaping operation, the given code point * is guaranteed to be in the range {@code 0 <= cp <= Character#MAX_CODE_POINT}. * * <p> * If an empty array is returned, this effectively strips the input character from the resulting * text. * * <p> * If the character does not need to be escaped, this method should return {@code null}, rather * than an array containing the character representation of the code point. This enables the * escaping algorithm to perform more efficiently. * * <p> * If the implementation of this method cannot correctly handle a particular code point then it * should either throw an appropriate runtime exception or return a suitable replacement * character. It must never silently discard invalid input as this may constitute a security risk. * * @param cp the Unicode code point to escape if necessary * @return the replacement characters, or {@code null} if no escaping was needed */ protected abstract char[] escape(int cp); /** * Scans a sub-sequence of characters from a given {@link CharSequence}, returning the index of * the next character that requires escaping. * * <p> * <b>Note:</b> When implementing an escaper, it is a good idea to override this method for * efficiency. The base class implementation determines successive Unicode code points and invokes * {@link #escape(int)} for each of them. If the semantics of your escaper are such that code * points in the supplementary range are either all escaped or all unescaped, this method can be * implemented more efficiently using {@link CharSequence#charAt(int)}. * * <p> * Note however that if your escaper does not escape characters in the supplementary range, you * should either continue to validate the correctness of any surrogate characters encountered or * provide a clear warning to users that your escaper does not validate its input. * * <p> * See {@link PercentEscaper} for an example. * * @param csq a sequence of characters * @param start the index of the first character to be scanned * @param end the index immediately after the last character to be scanned * @throws IllegalArgumentException if the scanned sub-sequence of {@code csq} contains invalid * surrogate pairs */ protected abstract int nextEscapeIndex(CharSequence csq, int start, int end); /** * Returns the escaped form of a given literal string. * * <p> * If you are escaping input in arbitrary successive chunks, then it is not generally safe to use * this method. If an input string ends with an unmatched high surrogate character, then this * method will throw {@link IllegalArgumentException}. You should ensure your input is valid <a * href="http://en.wikipedia.org/wiki/UTF-16">UTF-16</a> before calling this method. * * @param string the literal string to be escaped * @return the escaped form of {@code string} * @throws NullPointerException if {@code string} is null * @throws IllegalArgumentException if invalid surrogate characters are encountered */ @Override public abstract String escape(String string); /** * Returns the escaped form of a given literal string, starting at the given index. This method is * called by the {@link #escape(String)} method when it discovers that escaping is required. It is * protected to allow subclasses to override the fastpath escaping function to inline their * escaping test. * * <p> * This method is not reentrant and may only be invoked by the top level {@link #escape(String)} * method. * * @param s the literal string to be escaped * @param index the index to start escaping from * @return the escaped form of {@code string} * @throws NullPointerException if {@code string} is null * @throws IllegalArgumentException if invalid surrogate characters are encountered */ protected final String escapeSlow(String s, int index) { int end = s.length(); // Get a destination buffer and setup some loop variables. char[] dest = Platform.charBufferFromThreadLocal(); int destIndex = 0; int unescapedChunkStart = 0; while (index < end) { int cp = codePointAt(s, index, end); if (cp < 0) { throw new IllegalArgumentException("Trailing high surrogate at end of input"); } // It is possible for this to return null because nextEscapeIndex() may // (for performance reasons) yield some false positives but it must never // give false negatives. char[] escaped = escape(cp); int nextIndex = index + (Character.isSupplementaryCodePoint(cp) ? 2 : 1); if (escaped != null) { int charsSkipped = index - unescapedChunkStart; // This is the size needed to add the replacement, not the full // size needed by the string. We only regrow when we absolutely must. int sizeNeeded = destIndex + charsSkipped + escaped.length; if (dest.length < sizeNeeded) { int destLength = sizeNeeded + end - index + DEST_PAD; dest = growBuffer(dest, destIndex, destLength); } // If we have skipped any characters, we need to copy them now. if (charsSkipped > 0) { s.getChars(unescapedChunkStart, index, dest, destIndex); destIndex += charsSkipped; } if (escaped.length > 0) { System.arraycopy(escaped, 0, dest, destIndex, escaped.length); destIndex += escaped.length; } // If we dealt with an escaped character, reset the unescaped range. unescapedChunkStart = nextIndex; } index = nextEscapeIndex(s, nextIndex, end); } // Process trailing unescaped characters - no need to account for escaped // length or padding the allocation. int charsSkipped = end - unescapedChunkStart; if (charsSkipped > 0) { int endIndex = destIndex + charsSkipped; if (dest.length < endIndex) { dest = growBuffer(dest, destIndex, endIndex); } s.getChars(unescapedChunkStart, end, dest, destIndex); destIndex = endIndex; } return new String(dest, 0, destIndex); } /** * Returns the Unicode code point of the character at the given index. * * <p> * Unlike {@link Character#codePointAt(CharSequence, int)} or {@link String#codePointAt(int)} this * method will never fail silently when encountering an invalid surrogate pair. * * <p> * The behaviour of this method is as follows: * <ol> * <li>If {@code index >= end}, {@link IndexOutOfBoundsException} is thrown. * <li><b>If the character at the specified index is not a surrogate, it is returned.</b> * <li>If the first character was a high surrogate value, then an attempt is made to read the next * character. * <ol> * <li><b>If the end of the sequence was reached, the negated value of the trailing high surrogate * is returned.</b> * <li><b>If the next character was a valid low surrogate, the code point value of the high/low * surrogate pair is returned.</b> * <li>If the next character was not a low surrogate value, then {@link IllegalArgumentException} * is thrown. * </ol> * <li>If the first character was a low surrogate value, {@link IllegalArgumentException} is * thrown. * </ol> * * @param seq the sequence of characters from which to decode the code point * @param index the index of the first character to decode * @param end the index beyond the last valid character to decode * @return the Unicode code point for the given index or the negated value of the trailing high * surrogate character at the end of the sequence */ protected static int codePointAt(CharSequence seq, int index, int end) { if (index < end) { char c1 = seq.charAt(index++); if (c1 < Character.MIN_HIGH_SURROGATE || c1 > Character.MAX_LOW_SURROGATE) { // Fast path (first test is probably all we need to do) return c1; } else if (c1 <= Character.MAX_HIGH_SURROGATE) { // If the high surrogate was the last character, return its inverse if (index == end) { return -c1; } // Otherwise look for the low surrogate following it char c2 = seq.charAt(index); if (Character.isLowSurrogate(c2)) { return Character.toCodePoint(c1, c2); } throw new IllegalArgumentException( "Expected low surrogate but got char '" + c2 + "' with value " + (int) c2 + " at index " + index); } else { throw new IllegalArgumentException( "Unexpected low surrogate character '" + c1 + "' with value " + (int) c1 + " at index " + (index - 1)); } } throw new IndexOutOfBoundsException("Index exceeds specified range"); } /** * Helper method to grow the character buffer as needed, this only happens once in a while so it's * ok if it's in a method call. If the index passed in is 0 then no copying will be done. */ private static char[] growBuffer(char[] dest, int index, int size) { char[] copy = new char[size]; if (index > 0) { System.arraycopy(dest, 0, copy, 0, index); } return copy; } }
/* * Copyright 2015-2018 Jeeva Kandasamy (jkandasa@gmail.com) * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.mycontroller.standalone.provider; import org.mycontroller.standalone.McThreadPoolFactory; import org.mycontroller.standalone.eventbus.McEventBus; import org.mycontroller.standalone.eventbus.MessageStatus; import org.mycontroller.standalone.eventbus.MessageStatusHandler; import org.mycontroller.standalone.exceptions.NotSupportedException; import org.mycontroller.standalone.gateway.IGateway; import org.mycontroller.standalone.gateway.config.GatewayConfig; import org.mycontroller.standalone.message.IMessage; import org.mycontroller.standalone.message.McMessageUtils; import org.mycontroller.standalone.message.McMessageUtils.MESSAGE_STATUS; import org.mycontroller.standalone.offheap.MessageQueueImpl; import org.mycontroller.standalone.offheap.MessageQueueSleepImpl; import io.vertx.core.eventbus.MessageConsumer; import lombok.extern.slf4j.Slf4j; /** * @author Jeeva Kandasamy (jkandasa) * @since 1.2.0 */ @Slf4j public abstract class EngineAbstract implements IEngine { private volatile boolean exit = false; private volatile boolean stopped = true; protected MessageQueueImpl _queue; protected MessageQueueSleepImpl _queueSleep; protected IGateway _gateway; protected IExecutor _executor; private long auditStartTime = 0; private long gatewayAuditTime = 0; private EngineStatistics _statistics = new EngineStatistics(); private static final String STREAM_MESSAGE = McMessageUtils.MESSAGE_TYPE.C_STREAM.getText(); public EngineAbstract(GatewayConfig _config) { if (_queue == null) { _queue = new MessageQueueImpl(String.valueOf(_config.getId())); _queueSleep = new MessageQueueSleepImpl(String.valueOf(_config.getId())); } } public EngineStatistics processingRate() { return _statistics.clone(); } public void routineTasks() { // override this and do not block! } @Override public void start() { // Add it in to thread pool McThreadPoolFactory.execute(this); _logger.debug("{}", _gateway.config()); } @Override public GatewayConfig config() { return _gateway.config(); } @Override public void send(IMessage message) { if (_gateway.isUp()) { _queue.add(message); } else { McEventBus.getInstance().publish( message.getEventTopic(), MessageStatus.builder() .status(MESSAGE_STATUS.GATEWAY_NOT_AVAILABLE) .message("Gateway down! - " + _gateway.config().getName()) .build()); } } @Override public void sendSleepNode(IMessage message) { _queueSleep.put(message); McEventBus.getInstance().publish( message.getEventTopic(), MessageStatus.builder() .status(MESSAGE_STATUS.ADDED_TO_SLEEP_QUEUE) .message("Will be sent when receive a request from node.") .build()); } public void clearSleepQueue(String nodeEui) { _queueSleep.remove(nodeEui); } @Override public boolean isRunning() { return !stopped; } @Override public void distory() { _gateway.disconnect(); stop(); _queue.delete(); _queueSleep.delete(); } @Override public void run() { // clear statistics table _statistics.clear(); stopped = false; // start the gateway _gateway.connect(); _logger.debug("Gateway started successfully. {}", _gateway.config()); while (!exit) { try { auditGateway(); auditQueue(); routineTasks(); _statistics.updateLastMinuteStatus(); } catch (Exception ex) { _logger.error("Exception,", ex); } } _gateway.disconnect(); _logger.debug("Terminatted... "); stopped = true; } @Override public void stop() { exit = true; } // checks gateway status, if it is not running make it UP public void auditGateway() { if (_gateway.isUp()) { return; } long statusSince = System.currentTimeMillis() - gatewayAuditTime; if (statusSince >= _gateway.config().getReconnectDelay() * 1000L) { gatewayAuditTime = System.currentTimeMillis(); _logger.debug("Gateway is in down state. Trying to reconnect..."); _gateway.reconnect(); } } private void sleep(long sleepDuration) { sleep(sleepDuration, null); } private void sleep(long sleepDuration, MessageStatusHandler handler) { try { while (sleepDuration > 0) { sleepDuration -= 10L; Thread.sleep(10L); if (exit) { return; } if (handler != null && handler.getStatusMessage() != null) { return; } } } catch (InterruptedException ex) { _logger.warn("Sleep interrupted", ex); } } public void auditQueue() { // Update queue size _statistics.setSizeQueue(_queue.size()); if (_statistics.getSizeQueue() > 0) { IMessage message = _queue.take(); // if null message return it. if (message == null) { return; } if (!_gateway.isUp()) { // TODO: notify it is failed, "Gateway not ready" McEventBus.getInstance().publish( message.getEventTopic(), MessageStatus.builder() .status(MESSAGE_STATUS.GATEWAY_NOT_AVAILABLE).message("Gateway not available") .build()); return; } auditStartTime = System.currentTimeMillis(); _logger.debug("Processing:[{}]", message); try { if (message.isTxMessage()) { boolean ackEnabled = false; if (_gateway.config().getAckEnabled()) { // check ack enabled and if it is node broadcast message we will not get ack if (message.getNodeEui().equalsIgnoreCase(IMessage.NODE_BROADCAST_ID)) { ackEnabled = false; } else if (STREAM_MESSAGE.equalsIgnoreCase(message.getType())) { ackEnabled = _gateway.config().getStreamAckEnabled(); } else { ackEnabled = true; } } if (ackEnabled) { // set acknowledgement request message.setAck(IMessage.ACK_REQUEST); MessageConsumer<MessageStatus> _consumer = null; try { MessageStatusHandler _handler = new MessageStatusHandler(); _consumer = McEventBus.getInstance().registerConsumer(message.getEventTopic(), _handler); for (int retry = 1; retry <= _gateway.config().getFailedRetryCount(); retry++) { _logger.debug("Retry count {} of {}, {}", retry, _gateway.config().getFailedRetryCount(), message); _gateway.write(message); // send to _gateway sleep(_gateway.config().getAckWaitTime()); // wait for ack delay if (exit) { return; } // if we received ack send it to next process if (_handler.getStatusMessage() != null && _handler.getStatusMessage().getStatus() == MESSAGE_STATUS.ACK_RECEIVED) { _executor.execute(message); McEventBus.getInstance().publish( message.getEventTopic(), MessageStatus.builder() .status(MESSAGE_STATUS.SUCCESS).message("Retry count: " + retry) .build()); break; } if (retry == _gateway.config().getFailedRetryCount()) { _logger.info("Seems like failed to send this message. " + "There is no ACK received! Retried {} time(s). {}", _gateway.config().getFailedRetryCount(), message); _statistics.incrementFailureCount(); // notify it is failed, "ack not received" McEventBus.getInstance().publish( message.getEventTopic(), MessageStatus.builder().status(MESSAGE_STATUS.NO_ACK_RECEIVED) .message("Failed retry count:" + retry).build() ); } } } finally { if (_consumer != null) { _consumer.unregister(); } } } else { _gateway.write(message); // send to _gateway _executor.execute(message); McEventBus.getInstance().publish( message.getEventTopic(), MessageStatus.builder() .status(MESSAGE_STATUS.SUCCESS).message("Retry count: 0").build()); } // A delay to avoid collisions on any networks with continues messages. Only for Tx message sleep(_gateway.config().getTxDelay()); } else { _executor.execute(message); } } catch (NotSupportedException ex) { _logger.error("NotSupported: {}. Dropping {}", ex.getMessage(), message); } catch (Exception ex) { _logger.error("Throws exception while processing!, [{}]", message, ex); } finally { // update last message time to processing rate table _statistics.update(System.currentTimeMillis() - auditStartTime, message.isTxMessage()); _logger.debug("{}", _statistics); } } // sleep here to reduce CPU load, in nanoseconds try { Thread.sleep(0, 333333); } catch (InterruptedException ex) { _logger.error("Exception, ", ex); } } }
package org.marketcetera.marketdata.core.provider; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import org.apache.commons.lang.Validate; import org.marketcetera.event.*; import org.marketcetera.event.impl.QuoteEventBuilder; import org.marketcetera.event.util.MarketstatEventCache; import org.marketcetera.marketdata.Content; import org.marketcetera.marketdata.OrderBook; import org.marketcetera.marketdata.core.Messages; import org.marketcetera.trade.Instrument; import org.marketcetera.util.misc.ClassVersion; import com.google.common.collect.Lists; import com.google.common.collect.Maps; /* $License$ */ /** * Caches market data for a given instrument. * * @author <a href="mailto:colin@marketcetera.com">Colin DuPlantis</a> * @version $Id$ * @since $Release$ */ @ClassVersion("$Id$") public class MarketdataCacheElement { /** * Create a new MarketdataCacheElement instance. * * @param inInstrument an <code>Instrument</code> value * @throws IllegalArgumentException if the given instrument is <code>null</code> */ public MarketdataCacheElement(Instrument inInstrument) { Validate.notNull(inInstrument); instrument = inInstrument; marketstatCache = new MarketstatEventCache(inInstrument); trade = null; imbalance = null; } /** * Gets the latest snapshot for the given content. * * @param inContent a <code>Content</code> value * @return an <code>Event</code> value or <code>null</code> if no cached data exists for this content type */ public Event getSnapshot(Content inContent) { switch(inContent) { case MARKET_STAT: return marketstatCache.get(); case LATEST_TICK: return trade; case IMBALANCE: return imbalance; case TOP_OF_BOOK: case NBBO: return getOrderBookFor(inContent).getTopOfBook(); case AGGREGATED_DEPTH: case BBO10: case LEVEL_2: case OPEN_BOOK: case TOTAL_VIEW: case UNAGGREGATED_DEPTH: return getOrderBookFor(inContent).getDepthOfBook(); case DIVIDEND: // TODO we actually need to return multiple events here default: throw new UnsupportedOperationException(); } } /** * Updates the cache for the given content with the given events. * * @param inContent a <code>Content</code> value * @param inEvents an <code>Event[]</code> value * @return a <code>List&lt;Event&gt;</code> value containing the net change represented by the given update */ public List<Event> update(Content inContent, Event...inEvents) { List<Event> results = new ArrayList<Event>(); switch(inContent) { case NBBO: case UNAGGREGATED_DEPTH: case AGGREGATED_DEPTH: case BBO10: case TOP_OF_BOOK: case LEVEL_2: case OPEN_BOOK: case TOTAL_VIEW: doBookUpdate(inContent, results, inEvents); break; case DIVIDEND: for(Event event : inEvents) { if(event instanceof DividendEvent) { if(dividends == null) { dividends = Lists.newArrayList(); } dividends.add((DividendEvent)event); results.add(event); } else { // TODO warn - skipping event } } break; case LATEST_TICK: for(Event event : inEvents) { if(event instanceof TradeEvent) { trade = (TradeEvent)event; results.add(event); } else { // TODO warn - skipping event } } break; case IMBALANCE: for(Event event : inEvents) { if(event instanceof ImbalanceEvent) { imbalance = (ImbalanceEvent)event; results.add(event); } else { // TODO warn - skipping event } } case MARKET_STAT: for(Event event : inEvents) { if(event instanceof MarketstatEvent) { if(marketstatCache == null) { marketstatCache = new MarketstatEventCache(instrument); } marketstatCache.cache((MarketstatEvent)event); } else { // TODO warn - skipping event } // note that this intentionally combines a potential multitude of incoming marketstat events into a single result results.add(marketstatCache.get()); } break; default: throw new UnsupportedOperationException(); } return results; } /** * Updates the order book for the given content with the given events. * * @param inContent a <code>Content</code> value * @param inoutResults a <code>Collection&lt;Event&gt;</code> value containing the net change of the update * @param inEvents an <code>Event[]</code> value containing the update */ private void doBookUpdate(Content inContent, Collection<Event> inoutResults, Event...inEvents) { OrderBook orderbook = getOrderBookFor(inContent); for(Event event : inEvents) { if(event instanceof QuoteEvent) { QuoteEvent quoteEvent = (QuoteEvent)event; if(inContent == Content.TOP_OF_BOOK) { // generate DEL event for existing top, if necessary if(quoteEvent.getAction() == QuoteAction.ADD) { if(latestTop != null) { QuoteEvent deleteEvent = null; if(quoteEvent instanceof BidEvent) { BidEvent latestBid = latestTop.getBid(); if(latestBid != null) { deleteEvent = QuoteEventBuilder.delete(latestBid); } } else if(quoteEvent instanceof AskEvent) { AskEvent latestAsk = latestTop.getAsk(); if(latestAsk != null) { deleteEvent = QuoteEventBuilder.delete(latestAsk); } } else { throw new UnsupportedOperationException(); } if(deleteEvent != null) { orderbook.process(deleteEvent); inoutResults.add(deleteEvent); } } } } orderbook.process(quoteEvent); latestTop = orderbook.getTopOfBook(); inoutResults.add(quoteEvent); } else { throw new IllegalArgumentException(Messages.CONTENT_REQUIRES_QUOTE_EVENTS.getText(inContent,event.getClass().getName())); } } } /** * Gets the order book for the given content. * * @param inContent a <code>Content</code> value * @return an <code>OrderBook</code> value */ private OrderBook getOrderBookFor(Content inContent) { OrderBook book = orderbooks.get(inContent); if(book == null) { book = new OrderBook(instrument); orderbooks.put(inContent, book); } return book; } /** * instrument for which market data is cached */ private final Instrument instrument; /** * order book structures, by content */ private final Map<Content,OrderBook> orderbooks = Maps.newHashMap(); /** * cached dividend data */ private Collection<DividendEvent> dividends; /** * cached marketstat data */ private MarketstatEventCache marketstatCache; /** * most recent trade */ private TradeEvent trade; /** * most recent imbalance */ private ImbalanceEvent imbalance; /** * most recent top-of-book */ private TopOfBookEvent latestTop; }
package org.x3f.autobot; import org.x3f.lib.RestClient; import com.loopj.android.http.*; import org.json.*; import org.apache.http.Header; import android.content.Intent; import android.content.SharedPreferences; import android.content.SharedPreferences.Editor; import android.content.res.Resources.NotFoundException; import android.os.Bundle; import android.support.v4.app.FragmentActivity; import android.view.KeyEvent; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.View.OnClickListener; import android.widget.ArrayAdapter; import android.widget.EditText; import android.widget.Spinner; import android.widget.Toast; public class MainActivity extends FragmentActivity implements OnClickListener { private long exitTime; private EditText editIP; private EditText editPort; private EditText editVideoPort; private EditText editFps; private Spinner spinRslv; private ArrayAdapter<?> spinRslvAdp; private SharedPreferences sharedPref; private Editor prefEditor; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); sharedPref = getSharedPreferences(AutobotApplication.PREF_FILE_KEY, MODE_PRIVATE); prefEditor = sharedPref.edit(); prefEditor.putInt("last_protocol", AutobotApplication.PROTOCOL_HTTP); prefEditor.commit(); View btnConn = this.findViewById(R.id.btnConnect); btnConn.setOnClickListener(this); AutobotApplication app = (AutobotApplication) getApplication(); editIP = (EditText) this.findViewById(R.id.editIP); editIP.setText(app.getIp()); editPort = (EditText) this.findViewById(R.id.editPort); editPort.setText(app.getPort()); editVideoPort = (EditText) this.findViewById(R.id.editVideoPort); editVideoPort.setText(app.getVideoPort()); spinRslv = (Spinner) this.findViewById(R.id.spinRslv); spinRslvAdp = ArrayAdapter.createFromResource(this, R.array.resolutions, android.R.layout.simple_spinner_item); spinRslvAdp .setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); spinRslv.setAdapter(spinRslvAdp); String[] resolutions = getResources().getStringArray( R.array.resolutions); for (int i = 0; i < resolutions.length; i++) { if (resolutions[i].equals(sharedPref.getString( "last_video_resolution", ""))) { spinRslv.setSelection(i); break; } } editFps = (EditText) this.findViewById(R.id.editFps); editFps.setText(app.getVideoFps()); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.activity_main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.about: Intent settings_intent = new Intent(this, AboutActivity.class); startActivity(settings_intent); return true; case R.id.bluetooth: Intent bluetooth_intent = new Intent(this, BluetoothActivity.class); bluetooth_intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); startActivity(bluetooth_intent); finish(); return true; } return false; } @Override public boolean onKeyDown(int keyCode, KeyEvent event) { if (keyCode == KeyEvent.KEYCODE_BACK && event.getAction() == KeyEvent.ACTION_DOWN) { if (System.currentTimeMillis() - exitTime > 2000) { Toast.makeText(this.getApplicationContext(), this.getString(R.string.msg_quit), Toast.LENGTH_SHORT) .show(); exitTime = System.currentTimeMillis(); } else { finish(); System.exit(0); } return true; } return super.onKeyDown(keyCode, event); } @Override public void onClick(View v) { switch (v.getId()) { case R.id.btnConnect: // Fetch ip if (editIP.getText().length() <= 0) { Toast.makeText(getApplicationContext(), this.getString(R.string.msg_emptyip), Toast.LENGTH_SHORT).show(); break; } AutobotApplication app = (AutobotApplication) getApplication(); app.setIp(editIP.getText().toString()); // Fetch port if (editPort.getText().length() <= 0) { Toast.makeText(getApplicationContext(), this.getString(R.string.msg_emptyport), Toast.LENGTH_SHORT).show(); break; } app.setPort(editPort.getText().toString()); // Fetch video port if (editVideoPort.getText().length() <= 0) { Toast.makeText(getApplicationContext(), this.getString(R.string.msg_emptyvideoport), Toast.LENGTH_SHORT).show(); break; } app.setVideoPort(editVideoPort.getText().toString()); // Fetch resolution int pos = spinRslv.getSelectedItemPosition(); String[] resolutions = getResources().getStringArray( R.array.resolutions); app.setVideoResolution(resolutions[pos]); // Fetch video fps if (editFps.getText().length() <= 0) { Toast.makeText(getApplicationContext(), this.getString(R.string.msg_emptyvideofps), Toast.LENGTH_SHORT).show(); break; } app.setVideoFps(editFps.getText().toString()); // Test connection. RestClient.get( "http://" + editIP.getText() + ":" + editPort.getText() + "/connect", null, new JsonHttpResponseHandler() { @Override public void onSuccess(int statusCode, Header[] headers, JSONObject data) { try { if (data.getInt("code") == 0) { AutobotApplication app = (AutobotApplication) getApplication(); app.setProtocol(AutobotApplication.PROTOCOL_HTTP); Intent itCtrl = new Intent( getApplicationContext(), ControlActivity.class); startActivity(itCtrl); // save preferences for future use prefEditor.putString("last_bot_ip", app.getIp()); prefEditor.putString("last_bot_port", app.getPort()); prefEditor.putString("last_video_port", app.getVideoPort()); prefEditor.putString( "last_video_resolution", app.getVideoResolution()); prefEditor.putString("last_video_fps", app.getVideoFps()); prefEditor.commit(); } else { Toast.makeText(getApplicationContext(), data.getString("msg"), Toast.LENGTH_SHORT).show(); } } catch (NotFoundException e) { Toast.makeText(getApplicationContext(), e.getMessage(), Toast.LENGTH_SHORT) .show(); } catch (JSONException e) { Toast.makeText(getApplicationContext(), e.getMessage(), Toast.LENGTH_SHORT) .show(); } } @Override public void onFailure(int statusCode, Header[] headers, Throwable e, JSONObject error) { Toast.makeText(getApplicationContext(), e.getMessage(), Toast.LENGTH_SHORT).show(); } @Override public void onFailure(int statusCode, Header[] headers, Throwable e, JSONArray error) { Toast.makeText(getApplicationContext(), e.getMessage(), Toast.LENGTH_SHORT).show(); } @Override public void onFailure(int statusCode, Header[] headers, String error, Throwable e) { Toast.makeText(getApplicationContext(), error, Toast.LENGTH_SHORT).show(); } }); break; } } }
package sonata.kernel.placement.net; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; import com.fasterxml.jackson.databind.module.SimpleModule; import org.apache.commons.io.IOUtils; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.log4j.Logger; import sonata.kernel.VimAdaptor.commons.vnfd.Unit; import sonata.kernel.VimAdaptor.commons.vnfd.UnitDeserializer; import sonata.kernel.placement.config.PopResource; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.regex.Pattern; /** * Utility functions to add Loadbalancing rules to a son-emu emulator using the loadbalancing REST API. */ public class TranslatorLoadbalancer { final static Logger logger = Logger.getLogger(TranslatorLoadbalancer.class); /** * Holds objects for json object */ static HashMap<String,Object> lbObject; /** * Holds a list of LinkPort objects for a json object */ static List<LinkPort> lbPortList; /** * Pattern to dissect the emulator response */ static Pattern pattern_floatingNode = Pattern.compile("^Loadbalancer set up at ([^:]*):(.*)$"); // initiate static objects static { lbObject = new HashMap<String,Object>(); lbPortList = new ArrayList<LinkPort>(); lbObject.put("dst_vnf_interfaces",lbPortList); } /** * Add a loadbalance rule to a son-emu emulator * @param balance describes the loadbalancer rule */ public static void loadbalance(LinkLoadbalance balance){ String balancePath = balance.srcPort.pop.getChainingEndpoint(); if(!balancePath.endsWith("/")) balancePath += "/"; String requestUri; String srcDcName = balance.srcPort.pop.getPopName(); requestUri = balancePath+"v1/lb/"+srcDcName+"/"+balance.srcPort.stack+"/"+balance.srcPort.server+"/"+balance.srcPort.port; lbPortList.clear(); lbPortList.addAll(balance.dstPorts); String json = null; try { json = jsonMapper.writeValueAsString(lbObject); } catch (JsonProcessingException e) { logger.error("Error when converting port list to json structure.",e); return; } CloseableHttpClient client = HttpClients.createDefault(); HttpPost postRequest = new HttpPost(requestUri); postRequest.setEntity(new StringEntity(json, ContentType.APPLICATION_JSON)); CloseableHttpResponse response = null; logger.info("Loadbalance "+postRequest.getRequestLine().getUri()+" "+json); try { response = client.execute(postRequest); if (response.getStatusLine().getStatusCode() == 500) { String errorMsg = null; if(response.getEntity()!=null && response.getEntity().getContent()!=null) { IOUtils.toString(response.getEntity().getContent(), "utf-8"); } logger.error("Loadbalance failed "+requestUri+" errorMsg: "+errorMsg); } else { logger.info("Loadbalance successful "+requestUri); } } catch (IOException e) { e.printStackTrace(); logger.error("Loadbalance request aborted "+requestUri); } } /** * Adds a floating node to a son-emu emulator * @param balance describes the loadbalance rule * @return Cookie from the emulator to remove the floating node later on */ public static FloatingNode floatingNode(LinkLoadbalance balance){ String balancePath = balance.srcPort.pop.getChainingEndpoint(); if(!balancePath.endsWith("/")) balancePath += "/"; String requestUri; String srcDcName = balance.srcPort.pop.getPopName(); requestUri = balancePath+"v1/lb/"+srcDcName+"/floating/"+balance.srcPort.server+"/"+balance.srcPort.port; lbPortList.clear(); lbPortList.addAll(balance.dstPorts); String json = null; try { json = jsonMapper.writeValueAsString(lbObject); } catch (JsonProcessingException e) { logger.error("Error when converting port list to json structure.",e); return null; } CloseableHttpClient client = HttpClients.createDefault(); HttpPost postRequest = new HttpPost(requestUri); postRequest.setEntity(new StringEntity(json, ContentType.APPLICATION_JSON)); CloseableHttpResponse response = null; logger.info("Add floating node "+postRequest.getRequestLine().getUri()+" "+json); try { response = client.execute(postRequest); if (response.getStatusLine().getStatusCode() == 500) { String errorMsg = null; if(response.getEntity()!=null && response.getEntity().getContent()!=null) { IOUtils.toString(response.getEntity().getContent(), "utf-8"); } logger.error("Adding floating node failed "+requestUri+" errorMsg: "+errorMsg); } else { String text = IOUtils.toString(response.getEntity().getContent(), "utf-8"); HashMap cookieMap = null; String cookieNr = null; String floatingIp = null; try { cookieMap = jsonMapper.readValue(text, HashMap.class); cookieNr = (String)cookieMap.get("cookie"); floatingIp = (String)cookieMap.get("floating_ip"); logger.info("Adding floating node successful "+requestUri+", "+text); return new TranslatorLoadbalancer.FloatingNode(balance.srcPort.pop, balance.srcPort.stack, cookieNr, floatingIp, balance); } catch(Exception e) { logger.error("Adding floating node failed "+requestUri+", call succeeded but response invalid: "+text); } } } catch (IOException e) { e.printStackTrace(); logger.error("Adding floating node request aborted "+requestUri); } return null; } /** * Removes a floating node from a son-emu emulator * @param floatingNode Contains the cookie for the emulator to identify the floating node */ public static void unFloatingNode(FloatingNode floatingNode){ String balancePath = floatingNode.pop.getChainingEndpoint(); if(!balancePath.endsWith("/")) balancePath += "/"; String requestUri; String srcDcName = floatingNode.pop.getPopName(); requestUri = balancePath+"v1/lb/"+srcDcName+"/"+floatingNode.stackName+"/"+floatingNode.cookie+"/"+floatingNode.floatingIp; CloseableHttpClient client = HttpClients.createDefault(); HttpDelete deleteRequest = new HttpDelete(requestUri); CloseableHttpResponse response = null; logger.info("Remove floating node "+deleteRequest.getRequestLine().getUri()); try { response = client.execute(deleteRequest); if (response.getStatusLine().getStatusCode() == 500) { String errorMsg = null; if(response.getEntity()!=null && response.getEntity().getContent()!=null) { IOUtils.toString(response.getEntity().getContent(), "utf-8"); } logger.error("Remove floating node failed "+requestUri+" error: "+errorMsg); } else { logger.info("Remove floating node successful "+requestUri); } } catch (IOException e) { e.printStackTrace(); logger.error("Remove floating node request aborted "+requestUri); } } /** * Removes a loadbalance rule from a son-emu emulator * @param srcPort describes the source port of the loadbalancer rule */ public static void unloadbalance(LinkPort srcPort){ String balancePath = srcPort.pop.getChainingEndpoint(); if(!balancePath.endsWith("/")) balancePath += "/"; String requestUri; String srcDcName = srcPort.pop.getPopName(); requestUri = balancePath+"v1/lb/"+srcDcName+"/"+srcPort.stack+"/"+srcPort.server+"/"+srcPort.port; CloseableHttpClient client = HttpClients.createDefault(); HttpDelete deleteRequest = new HttpDelete(requestUri); CloseableHttpResponse response = null; logger.info("Unloadbalance "+deleteRequest.getRequestLine().getUri()); try { response = client.execute(deleteRequest); if (response.getStatusLine().getStatusCode() == 500) { String errorMsg = null; if(response.getEntity()!=null && response.getEntity().getContent()!=null) { IOUtils.toString(response.getEntity().getContent(), "utf-8"); } logger.error("Unloadbalance failed "+requestUri+" errorMsg: "+errorMsg); } else { logger.info("Unloadbalance successful "+requestUri); } } catch (IOException e) { e.printStackTrace(); logger.error("Unloadbalance request aborted "+requestUri); } } /** * Describes the floating node used to connect a service to the outer world */ public static class FloatingNode{ /** * Loadbalancing rule to loadbalance incoming traffic between floating node and the service's input nodes */ public LinkLoadbalance lbRule; /** * Datacenter the floating node is assigned to */ public final PopResource pop; /** * Floating node's stack name */ public final String stackName; /** * Cookie provided by the emulator */ public final String cookie; /** * Emulator host IP assigned to the floating node */ public final String floatingIp; /** * Creates a floating node descriptor * @param pop datacenter * @param stackName floating node's stack * @param cookie cookie identifying the floating node * @param floatingIp IP of the node * @param lbRule loadbalancing rule */ public FloatingNode(PopResource pop, String stackName, String cookie, String floatingIp, LinkLoadbalance lbRule){ this.pop = pop; this.stackName = stackName; this.cookie = cookie; this.floatingIp = floatingIp; this.lbRule = lbRule; } } /** * Used to map objects to json Strings */ static ObjectMapper jsonMapper; static { jsonMapper = new ObjectMapper(new JsonFactory()); jsonMapper.disable(SerializationFeature.WRITE_EMPTY_JSON_ARRAYS); jsonMapper.enable(SerializationFeature.WRITE_ENUMS_USING_TO_STRING); jsonMapper.disable(SerializationFeature.WRITE_NULL_MAP_VALUES); jsonMapper.enable(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS); jsonMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); SimpleModule module = new SimpleModule(); module.addDeserializer(Unit.class, new UnitDeserializer()); jsonMapper.registerModule(module); jsonMapper.enable(DeserializationFeature.READ_ENUMS_USING_TO_STRING); } }
/* $Id$ * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.etch.util.core.nio; import java.io.EOFException; import java.io.IOException; import java.nio.channels.SelectableChannel; import java.nio.channels.SelectionKey; /** * Abstract implementation of Handler. * @param <T> */ public class AbstractHandler<T extends SelectableChannel> implements Handler<T> { /** * @param channel */ public AbstractHandler( T channel ) { this.channel = channel; } private final T channel; public final T channel() { return channel; } public final void registered( Selector selector, SelectionKey key ) { this.selector = selector; this.key = key; registered(); } private Selector selector; private SelectionKey key; /** * @return the Selector of the handler. */ public final Selector selector() { return selector; } public final SelectionKey key() { return key; } /** * Notifies the handler that it has been registered with a selector. */ protected void registered() { // nothing to do. } public final int getLastInterestOps() { return lastInterestOps; } public final void setLastInterestOps( int ops ) { lastInterestOps = ops; } private int lastInterestOps; public final void selected() throws Exception { SelectionKey k = key; if (k == null) return; int ops = k.readyOps(); if ((ops & SelectionKey.OP_ACCEPT) != 0) { doAccept(); return; } if ((ops & SelectionKey.OP_CONNECT) != 0) { doConnect(); return; } if ((ops & SelectionKey.OP_READ) != 0) doRead(); if ((ops & SelectionKey.OP_WRITE) != 0) doWrite(); } protected void doAccept() throws Exception { throw new IOException( "doAccept not implemented" ); } protected void doConnect() throws Exception { throw new IOException( "doConnect not implemented" ); } protected void doRead() throws Exception { throw new IOException( "doRead not implemented" ); } protected void doWrite() throws Exception { throw new IOException( "doWrite not implemented" ); } public final int getInterestOps() throws Exception { if (canAccept()) return SelectionKey.OP_ACCEPT; if (canConnect()) return SelectionKey.OP_CONNECT; int ops = canRead() ? SelectionKey.OP_READ : 0; if (canWrite()) ops |= SelectionKey.OP_WRITE; return ops; } /** * @return true if this handler can accept. Called from * {@link #getInterestOps()}. * @throws Exception */ protected boolean canAccept() throws Exception { return false; } /** * @return true if this handler can connect. Called from * {@link #getInterestOps()}. * @throws Exception */ protected boolean canConnect() throws Exception { return false; } /** * @return true if this handler can read. Called from * {@link #getInterestOps()}. * @throws Exception */ protected boolean canRead() throws Exception { return false; } /** * @return true if this handler can write. Called from * {@link #getInterestOps()}. * @throws Exception */ protected boolean canWrite() throws Exception { return false; } /** * Updates the interest ops of the channel. */ public final void updateInterestOps() { Selector s = selector; if (s == null) throw new IllegalStateException( "selector == null" ); s.updateInterestOps( this ); } /** * Cancels the registration of this handler. */ public final void cancel() { Selector s = selector; if (s != null) s.cancel( this ); else canceled( null ); } public void canceled( Exception e ) { try { if (e != null) reportException( e ); if (key != null) key.cancel(); channel.close(); } catch ( Exception e1 ) { reportException( e1 ); } finally { selector = null; key = null; } } public void reportException( Exception e ) { if (e instanceof IOException) { if (e instanceof EOFException) return; String msg = e.getMessage(); if (msg != null) { if (msg.equals( "An existing connection was forcibly closed by the remote host" )) return; } } System.err.printf( "%s: caught exception: %s\n", this, e ); e.printStackTrace(); } }
/************************************************************************** * copyright file="Appointment.java" company="Microsoft" * Copyright (c) Microsoft Corporation. All rights reserved. * * Defines the Appointment.java. **************************************************************************/ package microsoft.exchange.webservices.data; import java.util.ArrayList; import java.util.Date; /** * Represents an appointment or a meeting. Properties available on appointments * are defined in the AppointmentSchema class. * */ @Attachable @ServiceObjectDefinition(xmlElementName = XmlElementNames.CalendarItem) public class Appointment extends Item implements ICalendarActionProvider { /** * Initializes an unsaved local instance of Appointment". To bind to an * existing appointment, use Appointment.Bind() instead. * * @param service * The ExchangeService instance to which this appointmtnt is * bound. * @throws Exception * the exception */ public Appointment(ExchangeService service) throws Exception { super(service); } /** * Initializes a new instance of Appointment. * * @param parentAttachment * the parent attachment * @param isNew * If true, attachment is new. * @throws Exception * the exception */ protected Appointment(ItemAttachment parentAttachment, boolean isNew) throws Exception { // If we're running against Exchange 2007, we need to explicitly preset // the StartTimeZone property since Exchange 2007 will otherwise scope // start and end to UTC. super(parentAttachment); } /** * Binds to an existing appointment and loads the specified set of * properties. Calling this method results in a call to EWS. * * @param service * the service * @param id * the id * @param propertySet * the property set * @return An Appointment instance representing the appointment * corresponding to the specified Id. * @throws Exception * the exception */ public static Appointment bind(ExchangeService service, ItemId id, PropertySet propertySet) throws Exception { return service.bindToItem(Appointment.class, id, propertySet); } /** * Binds to an existing appointment and loads its first class properties. * Calling this method results in a call to EWS. * * @param service * the service * @param id * the id * @return An Appointment instance representing the appointment * corresponding to the specified Id. * @throws Exception * the exception */ public static Appointment bind(ExchangeService service, ItemId id) throws Exception { return Appointment.bind(service, id, PropertySet.FirstClassProperties); } /** * Binds to an existing appointment and loads its first class properties. * Calling this method results in a call to EWS. * * @param service * the service * @param recurringMasterId * the recurring master id * @param occurenceIndex * the occurence index * @return An Appointment instance representing the appointment * corresponding to the specified Id. * @throws Exception * the exception */ public static Appointment bindToOccurrence(ExchangeService service, ItemId recurringMasterId, int occurenceIndex) throws Exception { return Appointment.bindToOccurrence(service, recurringMasterId, occurenceIndex, PropertySet.FirstClassProperties); } /** * Binds to an existing appointment and loads its first class properties. * Calling this method results in a call to EWS. * * @param service * the service * @param recurringMasterId * the recurring master id * @param occurenceIndex * the occurence index * @param propertySet * the property set * @return An Appointment instance representing the appointment * corresponding to the specified Id. * @throws Exception * the exception */ public static Appointment bindToOccurrence(ExchangeService service, ItemId recurringMasterId, int occurenceIndex, PropertySet propertySet) throws Exception { AppointmentOccurrenceId occurenceId = new AppointmentOccurrenceId( recurringMasterId.getUniqueId(), occurenceIndex); return Appointment.bind(service, occurenceId, propertySet); } /** * Binds to the master appointment of a recurring series and loads its first * class properties. Calling this method results in a call to EWS. * * @param service * the service * @param occurrenceId * the occurrence id * @return An Appointment instance representing the appointment * corresponding to the specified Id. * @throws Exception * the exception */ public static Appointment bindToRecurringMaster(ExchangeService service, ItemId occurrenceId) throws Exception { return Appointment.bindToRecurringMaster(service, occurrenceId, PropertySet.FirstClassProperties); } /** * Binds to the master appointment of a recurring series and loads its first * class properties. Calling this method results in a call to EWS. * * @param service * the service * @param occurrenceId * the occurrence id * @param propertySet * the property set * @return An Appointment instance representing the appointment * corresponding to the specified Id. * @throws Exception * the exception */ public static Appointment bindToRecurringMaster(ExchangeService service, ItemId occurrenceId, PropertySet propertySet) throws Exception { RecurringAppointmentMasterId recurringMasterId = new RecurringAppointmentMasterId( occurrenceId.getUniqueId()); return Appointment.bind(service, recurringMasterId, propertySet); } /** * Internal method to return the schema associated with this type of object. * * @return The schema associated with this type of object */ @Override protected ServiceObjectSchema getSchema() { return AppointmentSchema.Instance; } /** * Gets the minimum required server version. * * @return Earliest Exchange version in which this service object type is * supported. */ @Override protected ExchangeVersion getMinimumRequiredServerVersion() { return ExchangeVersion.Exchange2007_SP1; } /** * Determines whether properties defined with * ScopedDateTimePropertyDefinition require custom time zone scoping. * * @return if this item type requires custom scoping for scoped date/time * properties; otherwise, . */ @Override protected boolean getIsCustomDateTimeScopingRequired() { return true; } /** * Validates this instance. * @throws Exception */ @Override protected void validate() throws Exception { super.validate(); // PS # 250452: Make sure that if we're //on the Exchange2007_SP1 schema version, // if any of the following // properties are set or updated: // o Start // o End // o IsAllDayEvent // o Recurrence // ... then, we must send the MeetingTimeZone element // (which is generated from StartTimeZone for // Exchange2007_SP1 requests (see //StartTimeZonePropertyDefinition.cs). // If the StartTimeZone isn't // in the property bag, then throw, because clients must // supply the proper time zone - either by // loading it from a currently-existing appointment, //or by setting it directly. // Otherwise, to dirty // the StartTimeZone property, we just set it to its current value. if ((this.getService().getRequestedServerVersion() == ExchangeVersion.Exchange2007_SP1) && !(this.getService().getExchange2007CompatibilityMode())) { if (this.getPropertyBag().isPropertyUpdated(AppointmentSchema.Start) || this.getPropertyBag().isPropertyUpdated(AppointmentSchema.End) || this.getPropertyBag().isPropertyUpdated(AppointmentSchema.IsAllDayEvent) || this.getPropertyBag().isPropertyUpdated(AppointmentSchema.Recurrence)) { // If the property isn't in the property bag, throw.... if (!this.getPropertyBag().contains(AppointmentSchema.StartTimeZone)) { throw new ServiceLocalException(Strings. StartTimeZoneRequired); //getStartTimeZoneRequired()); } // Otherwise, set the time zone to its current value to // force it to be sent with the request. this.setStartTimeZone(this.getStartTimeZone()); } } } /** * Creates a reply response to the organizer and/or attendees of the * meeting. * * @param replyAll * the reply all * @return A ResponseMessage representing the reply response that can * subsequently be modified and sent. * @throws Exception * the exception */ public ResponseMessage createReply(boolean replyAll) throws Exception { this.throwIfThisIsNew(); return new ResponseMessage(this, replyAll ? ResponseMessageType.ReplyAll : ResponseMessageType.Reply); } /** * Replies to the organizer and/or the attendees of the meeting. Calling * this method results in a call to EWS. * * @param bodyPrefix * the body prefix * @param replyAll * the reply all * @throws Exception * the exception */ public void reply(MessageBody bodyPrefix, boolean replyAll) throws Exception { ResponseMessage responseMessage = this.createReply(replyAll); responseMessage.setBodyPrefix(bodyPrefix); responseMessage.sendAndSaveCopy(); } /** * Creates a forward message from this appointment. * * @return A ResponseMessage representing the forward response that can * subsequently be modified and sent. * @throws Exception * the exception */ public ResponseMessage createForward() throws Exception { this.throwIfThisIsNew(); return new ResponseMessage(this, ResponseMessageType.Forward); } /** * Forwards the appointment. Calling this method results in a call to EWS. * * @param bodyPrefix * the body prefix * @param toRecipients * the to recipients * @throws Exception * the exception */ public void forward(MessageBody bodyPrefix, EmailAddress... toRecipients) throws Exception { if (null != toRecipients) { ArrayList<EmailAddress> list = new ArrayList<EmailAddress>(); for (EmailAddress email : toRecipients) { list.add(email); } this.forward(bodyPrefix, list); } } /** * Forwards the appointment. Calling this method results in a call to EWS. * * @param bodyPrefix * the body prefix * @param toRecipients * the to recipients * @throws Exception * the exception */ public void forward(MessageBody bodyPrefix, Iterable<EmailAddress> toRecipients) throws Exception { ResponseMessage responseMessage = this.createForward(); responseMessage.setBodyPrefix(bodyPrefix); responseMessage.getToRecipients() .addEmailRange(toRecipients.iterator()); responseMessage.sendAndSaveCopy(); } /** * Saves this appointment in the specified folder. Calling this method * results in at least one call to EWS. Mutliple calls to EWS might be made * if attachments have been added. * * @param destinationFolderName * the destination folder name * @param sendInvitationsMode * the send invitations mode * @throws Exception * the exception */ public void save(WellKnownFolderName destinationFolderName, SendInvitationsMode sendInvitationsMode) throws Exception { this.internalCreate(new FolderId(destinationFolderName), null, sendInvitationsMode); } /** * Saves this appointment in the specified folder. Calling this method * results in at least one call to EWS. Mutliple calls to EWS might be made * if attachments have been added. * * @param destinationFolderId * the destination folder id * @param sendInvitationsMode * the send invitations mode * @throws Exception * the exception */ public void save(FolderId destinationFolderId, SendInvitationsMode sendInvitationsMode) throws Exception { EwsUtilities.validateParam(destinationFolderId, "destinationFolderId"); this.internalCreate(destinationFolderId, null, sendInvitationsMode); } /** * Saves this appointment in the Calendar folder. Calling this method * results in at least one call to EWS. Mutliple calls to EWS might be made * if attachments have been added. * * @param sendInvitationsMode * the send invitations mode * @throws Exception * the exception */ public void save(SendInvitationsMode sendInvitationsMode) throws Exception { this.internalCreate(null, null, sendInvitationsMode); } /** * Applies the local changes that have been made to this appointment. * Calling this method results in at least one call to EWS. Mutliple calls * to EWS might be made if attachments have been added or removed. * * @param conflictResolutionMode * the conflict resolution mode * @param sendInvitationsOrCancellationsMode * the send invitations or cancellations mode * @throws Exception * the exception */ public void update( ConflictResolutionMode conflictResolutionMode, SendInvitationsOrCancellationsMode sendInvitationsOrCancellationsMode) throws Exception { this.internalUpdate(null, conflictResolutionMode, null, sendInvitationsOrCancellationsMode); } /** * Deletes this appointment. Calling this method results in a call to EWS. * * @param deleteMode * the delete mode * @param sendCancellationsMode * the send cancellations mode * @throws Exception * the exception */ public void delete(DeleteMode deleteMode, SendCancellationsMode sendCancellationsMode) throws Exception { this.internalDelete(deleteMode, sendCancellationsMode, null); } /** * Creates a local meeting acceptance message that can be customized and * sent. * * @param tentative * the tentative * @return An AcceptMeetingInvitationMessage representing the meeting * acceptance message. * @throws Exception * the exception */ public AcceptMeetingInvitationMessage createAcceptMessage(boolean tentative) throws Exception { return new AcceptMeetingInvitationMessage(this, tentative); } /** * Creates a local meeting acceptance message that can be customized and * sent. * * @return A CancelMeetingMessage representing the meeting cancellation * message. * @throws Exception * the exception */ public CancelMeetingMessage createCancelMeetingMessage() throws Exception { return new CancelMeetingMessage(this); } /** * Creates a local meeting declination message that can be customized and * sent. * * @return A DeclineMeetingInvitation representing the meeting declination * message. * @throws Exception * the exception */ public DeclineMeetingInvitationMessage createDeclineMessage() throws Exception { return new DeclineMeetingInvitationMessage(this); } /** * Accepts the meeting. Calling this method results in a call to EWS. * * @param sendResponse * the send response * @return A CalendarActionResults object containing the various items that * were created or modified as a results of this operation. * @throws Exception * the exception */ public CalendarActionResults accept(boolean sendResponse) throws Exception { return this.internalAccept(false, sendResponse); } /** * Tentatively accepts the meeting. Calling this method results in a call to * EWS. * * @param sendResponse * the send response * @return A CalendarActionResults object containing the various items that * were created or modified as a results of this operation. * @throws Exception * the exception */ public CalendarActionResults acceptTentatively(boolean sendResponse) throws Exception { return this.internalAccept(true, sendResponse); } /** * Accepts the meeting. * * @param tentative * the tentative * @param sendResponse * the send response * @return A CalendarActionResults object containing the various items that * were created or modified as a results of this operation. * @throws Exception * the exception */ protected CalendarActionResults internalAccept(boolean tentative, boolean sendResponse) throws Exception { AcceptMeetingInvitationMessage accept = this .createAcceptMessage(tentative); if (sendResponse) { return accept.calendarSendAndSaveCopy(); } else { return accept.calendarSave(); } } /** * Cancels the meeting and sends cancellation messages to all attendees. * Calling this method results in a call to EWS. * * @return A CalendarActionResults object containing the various items that * were created or modified as a results of this operation. * @throws Exception * the exception */ public CalendarActionResults cancelMeeting() throws Exception { return this.createCancelMeetingMessage().calendarSendAndSaveCopy(); } /** * Cancels the meeting and sends cancellation messages to all attendees. * Calling this method results in a call to EWS. * * @param cancellationMessageText * the cancellation message text * @return A CalendarActionResults object containing the various items that * were created or modified as a results of this operation. * @throws Exception * the exception */ public CalendarActionResults cancelMeeting(String cancellationMessageText) throws Exception { CancelMeetingMessage cancelMsg = this.createCancelMeetingMessage(); cancelMsg.setBody(new MessageBody(cancellationMessageText)); return cancelMsg.calendarSendAndSaveCopy(); } /** * Declines the meeting invitation. Calling this method results in a call to * EWS. * * @param sendResponse * the send response * @return A CalendarActionResults object containing the various items that * were created or modified as a results of this operation. * @throws Exception * the exception */ public CalendarActionResults decline(boolean sendResponse) throws Exception { DeclineMeetingInvitationMessage decline = this.createDeclineMessage(); if (sendResponse) { return decline.calendarSendAndSaveCopy(); } else { return decline.calendarSave(); } } /** * Gets the default setting for sending cancellations on Delete. * * @return If Delete() is called on Appointment, we want to send * cancellations and save a copy. */ @Override protected SendCancellationsMode getDefaultSendCancellationsMode() { return SendCancellationsMode.SendToAllAndSaveCopy; } /** * Gets the default settings for sending invitations on Save. * * @return the default send invitations mode */ @Override protected SendInvitationsMode getDefaultSendInvitationsMode() { return SendInvitationsMode.SendToAllAndSaveCopy; } /** * Gets the default settings for sending invitations on Save. * * @return the default send invitations or cancellations mode */ @Override protected SendInvitationsOrCancellationsMode getDefaultSendInvitationsOrCancellationsMode() { return SendInvitationsOrCancellationsMode.SendToAllAndSaveCopy; } // Properties /** * Gets the start time of the appointment. * * @return the start * @throws ServiceLocalException * the service local exception */ public Date getStart() throws ServiceLocalException { return (Date) this.getPropertyBag().getObjectFromPropertyDefinition( AppointmentSchema.Start); } /** * Sets the start. * * @param value * the new start * @throws Exception * the exception */ public void setStart(Date value) throws Exception { this.getPropertyBag().setObjectFromPropertyDefinition( AppointmentSchema.Start, value); } /** * Gets or sets the end time of the appointment. * * @return the end * @throws ServiceLocalException * the service local exception */ public Date getEnd() throws ServiceLocalException { return (Date) this.getPropertyBag().getObjectFromPropertyDefinition( AppointmentSchema.End); } /** * Sets the end. * * @param value * the new end * @throws Exception * the exception */ public void setEnd(Date value) throws Exception { this.getPropertyBag().setObjectFromPropertyDefinition( AppointmentSchema.End, value); } /** * Gets the original start time of this appointment. * * @return the original start * @throws ServiceLocalException * the service local exception */ public Date getOriginalStart() throws ServiceLocalException { return (Date) this.getPropertyBag().getObjectFromPropertyDefinition( AppointmentSchema.OriginalStart); } /** * Gets a value indicating whether this appointment is an all day * event. * * @return the checks if is all day event * @throws ServiceLocalException * the service local exception */ public Boolean getIsAllDayEvent() throws ServiceLocalException { return (Boolean) this.getPropertyBag().getObjectFromPropertyDefinition( AppointmentSchema.IsAllDayEvent); } /** * Sets the checks if is all day event. * * @param value * the new checks if is all day event * @throws Exception * the exception */ public void setIsAllDayEvent(Boolean value) throws Exception { this.getPropertyBag().setObjectFromPropertyDefinition( AppointmentSchema.IsAllDayEvent, value); } /** * Gets a value indicating the free/busy status of the owner of this * appointment. * * @return the legacy free busy status * @throws ServiceLocalException * the service local exception */ public LegacyFreeBusyStatus getLegacyFreeBusyStatus() throws ServiceLocalException { return (LegacyFreeBusyStatus) this.getPropertyBag() .getObjectFromPropertyDefinition( AppointmentSchema.LegacyFreeBusyStatus); } /** * Sets the legacy free busy status. * * @param value * the new legacy free busy status * @throws Exception * the exception */ public void setLegacyFreeBusyStatus(LegacyFreeBusyStatus value) throws Exception { this.getPropertyBag().setObjectFromPropertyDefinition( AppointmentSchema.LegacyFreeBusyStatus, value); } /** * Gets the location of this appointment. * * @return the location * @throws ServiceLocalException * the service local exception */ public String getLocation() throws ServiceLocalException { return (String) this.getPropertyBag().getObjectFromPropertyDefinition( AppointmentSchema.Location); } /** * Sets the location. * * @param value * the new location * @throws Exception * the exception */ public void setLocation(String value) throws Exception { this.getPropertyBag().setObjectFromPropertyDefinition( AppointmentSchema.Location, value); } /** * Gets a text indicating when this appointment occurs. The text returned by * When is localized using the Exchange Server culture or using the culture * specified in the PreferredCulture property of the ExchangeService object * this appointment is bound to. * * @return the when * @throws ServiceLocalException * the service local exception */ public String getWhen() throws ServiceLocalException { return (String) this.getPropertyBag().getObjectFromPropertyDefinition( AppointmentSchema.When); } /** * Gets a value indicating whether the appointment is a meeting. * * @return the checks if is meeting * @throws ServiceLocalException * the service local exception */ public Boolean getIsMeeting() throws ServiceLocalException { return (Boolean) this.getPropertyBag().getObjectFromPropertyDefinition( AppointmentSchema.IsMeeting); } /** * Gets a value indicating whether the appointment has been cancelled. * * @return the checks if is cancelled * @throws ServiceLocalException * the service local exception */ public Boolean getIsCancelled() throws ServiceLocalException { return (Boolean) this.getPropertyBag().getObjectFromPropertyDefinition( AppointmentSchema.IsCancelled); } /** * Gets a value indicating whether the appointment is recurring. * * @return the checks if is recurring * @throws ServiceLocalException * the service local exception */ public Boolean getIsRecurring() throws ServiceLocalException { return (Boolean) this.getPropertyBag().getObjectFromPropertyDefinition( AppointmentSchema.IsRecurring); } /** * Gets a value indicating whether the meeting request has already been * sent. * * @return the meeting request was sent * @throws ServiceLocalException * the service local exception */ public Boolean getMeetingRequestWasSent() throws ServiceLocalException { return (Boolean) this.getPropertyBag().getObjectFromPropertyDefinition( AppointmentSchema.MeetingRequestWasSent); } /** * Gets a value indicating whether responses are requested when * invitations are sent for this meeting. * * @return the checks if is response requested * @throws ServiceLocalException * the service local exception */ public Boolean getIsResponseRequested() throws ServiceLocalException { return (Boolean) this.getPropertyBag().getObjectFromPropertyDefinition( AppointmentSchema.IsResponseRequested); } /** * Sets the checks if is response requested. * * @param value * the new checks if is response requested * @throws Exception * the exception */ public void setIsResponseRequested(Boolean value) throws Exception { this.getPropertyBag().setObjectFromPropertyDefinition( AppointmentSchema.IsResponseRequested, value); } /** * Gets a value indicating the type of this appointment. * * @return the appointment type * @throws ServiceLocalException * the service local exception */ public AppointmentType getAppointmentType() throws ServiceLocalException { return (AppointmentType) this.getPropertyBag() .getObjectFromPropertyDefinition( AppointmentSchema.AppointmentType); } /** * Gets a value indicating what was the last response of the user that * loaded this meeting. * * @return the my response type * @throws ServiceLocalException * the service local exception */ public MeetingResponseType getMyResponseType() throws ServiceLocalException { return (MeetingResponseType) this.getPropertyBag() .getObjectFromPropertyDefinition( AppointmentSchema.MyResponseType); } /** * Gets the organizer of this meeting. The Organizer property is read-only * and is only relevant for attendees. The organizer of a meeting is * automatically set to the user that created the meeting. * * @return the organizer * @throws ServiceLocalException * the service local exception */ public EmailAddress getOrganizer() throws ServiceLocalException { return (EmailAddress) this.getPropertyBag() .getObjectFromPropertyDefinition(AppointmentSchema.Organizer); } /** * Gets a list of required attendees for this meeting. * * @return the required attendees * @throws ServiceLocalException * the service local exception */ public AttendeeCollection getRequiredAttendees( ) throws ServiceLocalException { return (AttendeeCollection) this.getPropertyBag() .getObjectFromPropertyDefinition( AppointmentSchema.RequiredAttendees); } /** * Gets a list of optional attendeed for this meeting. * * @return the optional attendees * @throws ServiceLocalException * the service local exception */ public AttendeeCollection getOptionalAttendees() throws ServiceLocalException { return (AttendeeCollection) this.getPropertyBag() .getObjectFromPropertyDefinition( AppointmentSchema.OptionalAttendees); } /** * Gets a list of resources for this meeting. * * @return the resources * @throws ServiceLocalException * the service local exception */ public AttendeeCollection getResources() throws ServiceLocalException { return (AttendeeCollection) this.getPropertyBag() .getObjectFromPropertyDefinition(AppointmentSchema.Resources); } /** * Gets the number of calendar entries that conflict with this appointment * in the authenticated user's calendar. * * @return the conflicting meeting count * @throws ServiceLocalException * the service local exception */ public Integer getConflictingMeetingCount() throws ServiceLocalException { return (Integer) this.getPropertyBag().getObjectFromPropertyDefinition( AppointmentSchema.ConflictingMeetingCount); } /** * Gets the number of calendar entries that are adjacent to this appointment * in the authenticated user's calendar. * * @return the adjacent meeting count * @throws ServiceLocalException * the service local exception */ public Integer getAdjacentMeetingCount() throws ServiceLocalException { return (Integer) this.getPropertyBag().getObjectFromPropertyDefinition( AppointmentSchema.AdjacentMeetingCount); } /** * Gets a list of meetings that conflict with this appointment in the * authenticated user's calendar. * * @return the conflicting meetings * @throws ServiceLocalException * the service local exception */ public ItemCollection<Appointment> getConflictingMeetings() throws ServiceLocalException { return (ItemCollection<Appointment>) this.getPropertyBag() .getObjectFromPropertyDefinition( AppointmentSchema.ConflictingMeetings); } /** * Gets a list of meetings that conflict with this appointment in the * authenticated user's calendar. * * @return the adjacent meetings * @throws ServiceLocalException * the service local exception */ public ItemCollection<Appointment> getAdjacentMeetings() throws ServiceLocalException { return (ItemCollection<Appointment>) this.getPropertyBag() .getObjectFromPropertyDefinition( AppointmentSchema.AdjacentMeetings); } /** * Gets the duration of this appointment. * * @return the duration * @throws ServiceLocalException * the service local exception */ public TimeSpan getDuration() throws ServiceLocalException { return (TimeSpan) this.getPropertyBag().getObjectFromPropertyDefinition( AppointmentSchema.Duration); } /** * Gets the name of the time zone this appointment is defined in. * * @return the time zone * @throws ServiceLocalException * the service local exception */ public String getTimeZone() throws ServiceLocalException { return (String) this.getPropertyBag().getObjectFromPropertyDefinition( AppointmentSchema.TimeZone); } /** * Gets the time when the attendee replied to the meeting request. * * @return the appointment reply time * @throws ServiceLocalException * the service local exception */ public Date getAppointmentReplyTime() throws ServiceLocalException { return (Date) this.getPropertyBag().getObjectFromPropertyDefinition( AppointmentSchema.AppointmentReplyTime); } /** * Gets the sequence number of this appointment. * * @return the appointment sequence number * @throws ServiceLocalException * the service local exception */ public Integer getAppointmentSequenceNumber() throws ServiceLocalException { return (Integer) this.getPropertyBag().getObjectFromPropertyDefinition( AppointmentSchema.AppointmentSequenceNumber); } /** * Gets the state of this appointment. * * @return the appointment state * @throws ServiceLocalException * the service local exception */ public Integer getAppointmentState() throws ServiceLocalException { return (Integer) this.getPropertyBag().getObjectFromPropertyDefinition( AppointmentSchema.AppointmentState); } /** * Gets the recurrence pattern for this appointment. Available * recurrence pattern classes include Recurrence.DailyPattern, * Recurrence.MonthlyPattern and Recurrence.YearlyPattern. * * @return the recurrence * @throws ServiceLocalException * the service local exception */ public Recurrence getRecurrence() throws ServiceLocalException { return (Recurrence) this.getPropertyBag() .getObjectFromPropertyDefinition(AppointmentSchema.Recurrence); } /** * Sets the recurrence. * * @param value * the new recurrence * @throws Exception * the exception */ public void setRecurrence(Recurrence value) throws Exception { if (value != null) { if (value.isRegenerationPattern()) { throw new ServiceLocalException( Strings.RegenerationPatternsOnlyValidForTasks); } } this.getPropertyBag().setObjectFromPropertyDefinition( AppointmentSchema.Recurrence, value); } /** * Gets an OccurrenceInfo identifying the first occurrence of this meeting. * * @return the first occurrence * @throws ServiceLocalException * the service local exception */ public OccurrenceInfo getFirstOccurrence() throws ServiceLocalException { return (OccurrenceInfo) this.getPropertyBag() .getObjectFromPropertyDefinition( AppointmentSchema.FirstOccurrence); } /** * Gets an OccurrenceInfo identifying the first occurrence of this meeting. * * @return the last occurrence * @throws ServiceLocalException * the service local exception */ public OccurrenceInfo getLastOccurrence() throws ServiceLocalException { return (OccurrenceInfo) this.getPropertyBag() .getObjectFromPropertyDefinition( AppointmentSchema.LastOccurrence); } /** * Gets a list of modified occurrences for this meeting. * * @return the modified occurrences * @throws ServiceLocalException * the service local exception */ public OccurrenceInfoCollection getModifiedOccurrences() throws ServiceLocalException { return (OccurrenceInfoCollection) this.getPropertyBag() .getObjectFromPropertyDefinition( AppointmentSchema.ModifiedOccurrences); } /** * Gets a list of deleted occurrences for this meeting. * * @return the deleted occurrences * @throws ServiceLocalException * the service local exception */ public DeletedOccurrenceInfoCollection getDeletedOccurrences() throws ServiceLocalException { return (DeletedOccurrenceInfoCollection) this.getPropertyBag() .getObjectFromPropertyDefinition( AppointmentSchema.DeletedOccurrences); } /** * Gets the start time zone. * * @return the start time zone * @throws ServiceLocalException * the service local exception */ public TimeZoneDefinition getStartTimeZone() throws ServiceLocalException { return (TimeZoneDefinition) this.getPropertyBag() .getObjectFromPropertyDefinition( AppointmentSchema.StartTimeZone); } /** * Sets the start time zone. * * @param value * the new start time zone * @throws Exception * the exception */ public void setStartTimeZone(TimeZoneDefinition value) throws Exception { this.getPropertyBag().setObjectFromPropertyDefinition( AppointmentSchema.StartTimeZone, value); } /** * Gets the start time zone. * * @return the start time zone * @throws ServiceLocalException * the service local exception */ public TimeZoneDefinition getEndTimeZone() throws ServiceLocalException { return (TimeZoneDefinition) this.getPropertyBag() .getObjectFromPropertyDefinition(AppointmentSchema.EndTimeZone); } /** * Sets the start time zone. * * @param value * the new end time zone * @throws Exception * the exception */ public void setEndTimeZone(TimeZoneDefinition value) throws Exception { this.getPropertyBag().setObjectFromPropertyDefinition( AppointmentSchema.EndTimeZone, value); } /** * Gets the type of conferencing that will be used during the * meeting. * * @return the conference type * @throws ServiceLocalException * the service local exception */ public Integer getConferenceType() throws ServiceLocalException { return (Integer) this.getPropertyBag().getObjectFromPropertyDefinition( AppointmentSchema.ConferenceType); } /** * Sets the conference type. * * @param value * the new conference type * @throws Exception * the exception */ public void setConferenceType(Integer value) throws Exception { this.getPropertyBag().setObjectFromPropertyDefinition( AppointmentSchema.ConferenceType, value); } /** * Gets a value indicating whether new time proposals are allowed * for attendees of this meeting. * * @return the allow new time proposal * @throws ServiceLocalException * the service local exception */ public Boolean getAllowNewTimeProposal() throws ServiceLocalException { return (Boolean) this.getPropertyBag().getObjectFromPropertyDefinition( AppointmentSchema.AllowNewTimeProposal); } /** * Sets the allow new time proposal. * * @param value * the new allow new time proposal * @throws Exception * the exception */ public void setAllowNewTimeProposal(Boolean value) throws Exception { this.getPropertyBag().setObjectFromPropertyDefinition( AppointmentSchema.AllowNewTimeProposal, value); } /** * Gets a value indicating whether this is an online meeting. * * @return the checks if is online meeting * @throws ServiceLocalException * the service local exception */ public Boolean getIsOnlineMeeting() throws ServiceLocalException { return (Boolean) this.getPropertyBag().getObjectFromPropertyDefinition( AppointmentSchema.IsOnlineMeeting); } /** * Sets the checks if is online meeting. * * @param value * the new checks if is online meeting * @throws Exception * the exception */ public void setIsOnlineMeeting(Boolean value) throws Exception { this.getPropertyBag().setObjectFromPropertyDefinition( AppointmentSchema.IsOnlineMeeting, value); } /** * Gets the URL of the meeting workspace. A meeting workspace is a * shared Web site for planning meetings and tracking results. * * @return the meeting workspace url * @throws ServiceLocalException * the service local exception */ public String getMeetingWorkspaceUrl() throws ServiceLocalException { return (String) this.getPropertyBag().getObjectFromPropertyDefinition( AppointmentSchema.MeetingWorkspaceUrl); } /** * Sets the meeting workspace url. * * @param value * the new meeting workspace url * @throws Exception * the exception */ public void setMeetingWorkspaceUrl(String value) throws Exception { this.getPropertyBag().setObjectFromPropertyDefinition( AppointmentSchema.MeetingWorkspaceUrl, value); } /** * Gets the URL of the Microsoft NetShow online meeting. * * @return the net show url * @throws ServiceLocalException * the service local exception */ public String getNetShowUrl() throws ServiceLocalException { return (String) this.getPropertyBag().getObjectFromPropertyDefinition( AppointmentSchema.NetShowUrl); } /** * Sets the net show url. * * @param value * the new net show url * @throws Exception * the exception */ public void setNetShowUrl(String value) throws Exception { this.getPropertyBag().setObjectFromPropertyDefinition( AppointmentSchema.NetShowUrl, value); } /** * Gets the ICalendar Uid. * * @return the i cal uid * @throws ServiceLocalException * the service local exception */ public String getICalUid() throws ServiceLocalException { return (String) this.getPropertyBag().getObjectFromPropertyDefinition( AppointmentSchema.ICalUid); } /** * Sets the ICalendar Uid. * * @param value the i cal uid * @throws Exception *///this.PropertyBag[AppointmentSchema.ICalUid] = value; public void setICalUid(String value) throws Exception { this.getPropertyBag().setObjectFromPropertyDefinition( AppointmentSchema.ICalUid, value); } /** * Gets the ICalendar RecurrenceId. * * @return the i cal recurrence id * @throws ServiceLocalException * the service local exception */ public Date getICalRecurrenceId() throws ServiceLocalException { return (Date) this.getPropertyBag().getObjectFromPropertyDefinition( AppointmentSchema.ICalRecurrenceId); } /** * Gets the ICalendar DateTimeStamp. * * @return the i cal date time stamp * @throws ServiceLocalException * the service local exception */ public Date getICalDateTimeStamp() throws ServiceLocalException { return (Date) this.getPropertyBag().getObjectFromPropertyDefinition( AppointmentSchema.ICalDateTimeStamp); } }
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import org.eclipse.core.runtime.IAdaptable; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EStructuralFeature; import org.eclipse.gef.EditPart; import org.eclipse.gef.commands.Command; import org.eclipse.gmf.runtime.diagram.core.util.ViewUtil; import org.eclipse.gmf.runtime.diagram.ui.commands.DeferredLayoutCommand; import org.eclipse.gmf.runtime.diagram.ui.commands.ICommandProxy; import org.eclipse.gmf.runtime.diagram.ui.commands.SetViewMutabilityCommand; import org.eclipse.gmf.runtime.diagram.ui.editpolicies.CanonicalEditPolicy; import org.eclipse.gmf.runtime.diagram.ui.requests.CreateViewRequest; import org.eclipse.gmf.runtime.emf.core.util.EObjectAdapter; import org.eclipse.gmf.runtime.notation.Node; import org.eclipse.gmf.runtime.notation.View; import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceEndpointEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AddressEndPointEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AddressingEndpointEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AggregateMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BAMMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BeanMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BuilderMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CacheMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CallMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CallTemplateMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CalloutMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ClassMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloneMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloudConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloudConnectorOperationEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CommandMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ConditionalRouterMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DBLookupMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DBReportMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DataMapperMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DefaultEndPointEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DropMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EJBMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EnqueueMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EnrichMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EntitlementMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EventMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FailoverEndPointEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FastXSLTMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FaultMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FilterMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.HTTPEndpointEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.HeaderMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.IterateMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LoadBalanceEndPointEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LogMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LoopBackMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.NamedEndpointEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.OAuthMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PayloadFactoryMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PropertyMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RMSequenceMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RecipientListEndPointEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RespondMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RouterMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RuleMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ScriptMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SendMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SequenceEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SmooksMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SpringMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.StoreMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SwitchMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.TemplateEndpointEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ThrottleMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.TransactionMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.URLRewriteMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ValidateMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.WSDLEndPointEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.XQueryMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.XSLTMediatorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbDiagramUpdater; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbNodeDescriptor; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbVisualIDRegistry; /** * @generated */ public class MediatorFlowMediatorFlowCompartment18CanonicalEditPolicy extends CanonicalEditPolicy { /** * @generated */ protected void refreshOnActivate() { // Need to activate editpart children before invoking the canonical refresh for EditParts to add event listeners List<?> c = getHost().getChildren(); for (int i = 0; i < c.size(); i++) { ((EditPart) c.get(i)).activate(); } super.refreshOnActivate(); } /** * @generated */ protected EStructuralFeature getFeatureToSynchronize() { return EsbPackage.eINSTANCE.getMediatorFlow_Children(); } /** * @generated */ @SuppressWarnings("rawtypes") protected List getSemanticChildrenList() { View viewObject = (View) getHost().getModel(); LinkedList<EObject> result = new LinkedList<EObject>(); List<EsbNodeDescriptor> childDescriptors = EsbDiagramUpdater .getMediatorFlowMediatorFlowCompartment_7045SemanticChildren(viewObject); for (EsbNodeDescriptor d : childDescriptors) { result.add(d.getModelElement()); } return result; } /** * @generated */ protected boolean isOrphaned(Collection<EObject> semanticChildren, final View view) { return isMyDiagramElement(view) && !semanticChildren.contains(view.getElement()); } /** * @generated */ private boolean isMyDiagramElement(View view) { int visualID = EsbVisualIDRegistry.getVisualID(view); switch (visualID) { case DropMediatorEditPart.VISUAL_ID: case PropertyMediatorEditPart.VISUAL_ID: case ThrottleMediatorEditPart.VISUAL_ID: case FilterMediatorEditPart.VISUAL_ID: case LogMediatorEditPart.VISUAL_ID: case EnrichMediatorEditPart.VISUAL_ID: case XSLTMediatorEditPart.VISUAL_ID: case SwitchMediatorEditPart.VISUAL_ID: case SequenceEditPart.VISUAL_ID: case EventMediatorEditPart.VISUAL_ID: case EntitlementMediatorEditPart.VISUAL_ID: case ClassMediatorEditPart.VISUAL_ID: case SpringMediatorEditPart.VISUAL_ID: case ScriptMediatorEditPart.VISUAL_ID: case FaultMediatorEditPart.VISUAL_ID: case XQueryMediatorEditPart.VISUAL_ID: case CommandMediatorEditPart.VISUAL_ID: case DBLookupMediatorEditPart.VISUAL_ID: case DBReportMediatorEditPart.VISUAL_ID: case SmooksMediatorEditPart.VISUAL_ID: case SendMediatorEditPart.VISUAL_ID: case HeaderMediatorEditPart.VISUAL_ID: case CloneMediatorEditPart.VISUAL_ID: case CacheMediatorEditPart.VISUAL_ID: case IterateMediatorEditPart.VISUAL_ID: case CalloutMediatorEditPart.VISUAL_ID: case TransactionMediatorEditPart.VISUAL_ID: case RMSequenceMediatorEditPart.VISUAL_ID: case RuleMediatorEditPart.VISUAL_ID: case OAuthMediatorEditPart.VISUAL_ID: case AggregateMediatorEditPart.VISUAL_ID: case StoreMediatorEditPart.VISUAL_ID: case BuilderMediatorEditPart.VISUAL_ID: case CallTemplateMediatorEditPart.VISUAL_ID: case PayloadFactoryMediatorEditPart.VISUAL_ID: case EnqueueMediatorEditPart.VISUAL_ID: case URLRewriteMediatorEditPart.VISUAL_ID: case ValidateMediatorEditPart.VISUAL_ID: case RouterMediatorEditPart.VISUAL_ID: case ConditionalRouterMediatorEditPart.VISUAL_ID: case BAMMediatorEditPart.VISUAL_ID: case BeanMediatorEditPart.VISUAL_ID: case EJBMediatorEditPart.VISUAL_ID: case DefaultEndPointEditPart.VISUAL_ID: case AddressEndPointEditPart.VISUAL_ID: case FailoverEndPointEditPart.VISUAL_ID: case RecipientListEndPointEditPart.VISUAL_ID: case WSDLEndPointEditPart.VISUAL_ID: case NamedEndpointEditPart.VISUAL_ID: case LoadBalanceEndPointEditPart.VISUAL_ID: case APIResourceEndpointEditPart.VISUAL_ID: case AddressingEndpointEditPart.VISUAL_ID: case HTTPEndpointEditPart.VISUAL_ID: case TemplateEndpointEditPart.VISUAL_ID: case CloudConnectorEditPart.VISUAL_ID: case CloudConnectorOperationEditPart.VISUAL_ID: case LoopBackMediatorEditPart.VISUAL_ID: case RespondMediatorEditPart.VISUAL_ID: case CallMediatorEditPart.VISUAL_ID: case DataMapperMediatorEditPart.VISUAL_ID: case FastXSLTMediatorEditPart.VISUAL_ID: return true; } return false; } /** * @generated */ protected void refreshSemantic() { if (resolveSemanticElement() == null) { return; } LinkedList<IAdaptable> createdViews = new LinkedList<IAdaptable>(); List<EsbNodeDescriptor> childDescriptors = EsbDiagramUpdater .getMediatorFlowMediatorFlowCompartment_7045SemanticChildren((View) getHost() .getModel()); LinkedList<View> orphaned = new LinkedList<View>(); // we care to check only views we recognize as ours LinkedList<View> knownViewChildren = new LinkedList<View>(); for (View v : getViewChildren()) { if (isMyDiagramElement(v)) { knownViewChildren.add(v); } } // alternative to #cleanCanonicalSemanticChildren(getViewChildren(), semanticChildren) // // iteration happens over list of desired semantic elements, trying to find best matching View, while original CEP // iterates views, potentially losing view (size/bounds) information - i.e. if there are few views to reference same EObject, only last one // to answer isOrphaned == true will be used for the domain element representation, see #cleanCanonicalSemanticChildren() for (Iterator<EsbNodeDescriptor> descriptorsIterator = childDescriptors .iterator(); descriptorsIterator.hasNext();) { EsbNodeDescriptor next = descriptorsIterator.next(); String hint = EsbVisualIDRegistry.getType(next.getVisualID()); LinkedList<View> perfectMatch = new LinkedList<View>(); // both semanticElement and hint match that of NodeDescriptor for (View childView : getViewChildren()) { EObject semanticElement = childView.getElement(); if (next.getModelElement().equals(semanticElement)) { if (hint.equals(childView.getType())) { perfectMatch.add(childView); // actually, can stop iteration over view children here, but // may want to use not the first view but last one as a 'real' match (the way original CEP does // with its trick with viewToSemanticMap inside #cleanCanonicalSemanticChildren } } } if (perfectMatch.size() > 0) { descriptorsIterator.remove(); // precise match found no need to create anything for the NodeDescriptor // use only one view (first or last?), keep rest as orphaned for further consideration knownViewChildren.remove(perfectMatch.getFirst()); } } // those left in knownViewChildren are subject to removal - they are our diagram elements we didn't find match to, // or those we have potential matches to, and thus need to be recreated, preserving size/location information. orphaned.addAll(knownViewChildren); // ArrayList<CreateViewRequest.ViewDescriptor> viewDescriptors = new ArrayList<CreateViewRequest.ViewDescriptor>( childDescriptors.size()); for (EsbNodeDescriptor next : childDescriptors) { String hint = EsbVisualIDRegistry.getType(next.getVisualID()); IAdaptable elementAdapter = new CanonicalElementAdapter( next.getModelElement(), hint); CreateViewRequest.ViewDescriptor descriptor = new CreateViewRequest.ViewDescriptor( elementAdapter, Node.class, hint, ViewUtil.APPEND, false, host().getDiagramPreferencesHint()); viewDescriptors.add(descriptor); } boolean changed = deleteViews(orphaned.iterator()); // CreateViewRequest request = getCreateViewRequest(viewDescriptors); Command cmd = getCreateViewCommand(request); if (cmd != null && cmd.canExecute()) { SetViewMutabilityCommand.makeMutable( new EObjectAdapter(host().getNotationView())).execute(); executeCommand(cmd); @SuppressWarnings("unchecked") List<IAdaptable> nl = (List<IAdaptable>) request.getNewObject(); createdViews.addAll(nl); } if (changed || createdViews.size() > 0) { postProcessRefreshSemantic(createdViews); } if (createdViews.size() > 1) { // perform a layout of the container DeferredLayoutCommand layoutCmd = new DeferredLayoutCommand(host() .getEditingDomain(), createdViews, host()); executeCommand(new ICommandProxy(layoutCmd)); } makeViewsImmutable(createdViews); } }
// Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.autofill; import android.annotation.SuppressLint; import android.content.Context; import android.widget.EditText; import androidx.test.filters.SmallTest; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.RuntimeEnvironment; import org.robolectric.annotation.Config; import org.chromium.base.test.BaseRobolectricTestRunner; import org.chromium.chrome.browser.autofill.AutofillUiUtils.ErrorType; import java.util.Calendar; /** * Tests the AutofillUiUtils's java code. */ @RunWith(BaseRobolectricTestRunner.class) @Config(manifest = Config.NONE) public class AutofillUiUtilsTest { private Context mContext; private EditText mMonthInput; private EditText mYearInput; private int mThisMonth; private int mTwoDigitThisYear; @Before public void setUp() { mContext = RuntimeEnvironment.application; mMonthInput = new EditText(mContext); mYearInput = new EditText(mContext); mThisMonth = Calendar.getInstance().get(Calendar.MONTH) + 1; mTwoDigitThisYear = Calendar.getInstance().get(Calendar.YEAR) % 100; } @Test @SmallTest @SuppressLint("SetTextI18n") public void testExpirationDateErrorWithInvalidMonthReturnsExpirationMonthErrorType() { mMonthInput.setText("20"); mYearInput.setText(String.valueOf(mTwoDigitThisYear)); int errorType = getExpirationDateErrorForUserEnteredMonthAndYear(); Assert.assertEquals(ErrorType.EXPIRATION_MONTH, errorType); } @Test @SmallTest public void testExpirationDateErrorWithInvalidYearReturnsExpirationYearErrorType() { mMonthInput.setText(String.valueOf(mThisMonth)); mYearInput.setText(String.valueOf(mTwoDigitThisYear - 1)); int errorType = getExpirationDateErrorForUserEnteredMonthAndYear(); Assert.assertEquals(ErrorType.EXPIRATION_YEAR, errorType); } @Test @SmallTest public void testExpirationDateErrorWithInvalidFutureYearReturnsExpirationYearErrorType() { mMonthInput.setText(String.valueOf(mThisMonth)); mYearInput.setText(String.valueOf(mTwoDigitThisYear + 21)); int errorType = getExpirationDateErrorForUserEnteredMonthAndYear(); Assert.assertEquals(ErrorType.EXPIRATION_YEAR, errorType); } @Test @SmallTest public void testExpirationDateErrorWithCurrentYearAndCurrentMonthReturnsNoneErrorType() { mMonthInput.setText(String.valueOf(mThisMonth)); mYearInput.setText(String.valueOf(mTwoDigitThisYear)); int errorType = getExpirationDateErrorForUserEnteredMonthAndYear(); Assert.assertEquals(ErrorType.NONE, errorType); } @Test @SmallTest @SuppressLint("SetTextI18n") public void testExpirationDateErrorWithEditingMonthAndNotFocusedYearReturnsNotEnoughInfoErrorType() { mMonthInput.setText("1"); mYearInput.setText(String.valueOf("")); mMonthInput.requestFocus(); // currently being edited int errorType = AutofillUiUtils.getExpirationDateErrorType(mMonthInput, mYearInput, /*didFocusOnMonth=*/ true, /*didFocusOnYear=*/false); Assert.assertEquals(ErrorType.NOT_ENOUGH_INFO, errorType); } @Test @SmallTest @SuppressLint("SetTextI18n") public void testExpirationDateErrorWithEditingMonthAndFocusedInvalidYearReturnsExpirationYearErrorType() { mMonthInput.setText("1"); mYearInput.setText(String.valueOf("")); mMonthInput.requestFocus(); // currently being edited int errorType = AutofillUiUtils.getExpirationDateErrorType(mMonthInput, mYearInput, /*didFocusOnMonth=*/ true, /*didFocusOnYear=*/true); Assert.assertEquals(ErrorType.EXPIRATION_YEAR, errorType); } @Test @SmallTest @SuppressLint("SetTextI18n") public void testExpirationDateErrorWithValidMonthAndIncompleteYearReturnsNotEnoughInfoErrorType() { mMonthInput.setText(String.valueOf(mThisMonth)); mYearInput.setText("1"); mYearInput.requestFocus(); // currently being edited int errorType = getExpirationDateErrorForUserEnteredMonthAndYear(); Assert.assertEquals(ErrorType.NOT_ENOUGH_INFO, errorType); } @Test @SmallTest @SuppressLint("SetTextI18n") public void testExpirationDateErrorWithValidMonthAndValidYearReturnsNoneErrorType() { mMonthInput.setText(String.valueOf(mThisMonth)); mYearInput.setText(String.valueOf(mTwoDigitThisYear + 1)); int errorType = getExpirationDateErrorForUserEnteredMonthAndYear(); Assert.assertEquals(ErrorType.NONE, errorType); } @Test @SmallTest @SuppressLint("SetTextI18n") public void testExpirationDateErrorWithMonthBeingEditedAndValidYearReturnsNotEnoughInfo() { mMonthInput.setText(""); mMonthInput.requestFocus(); mYearInput.setText(String.valueOf(mTwoDigitThisYear + 1)); int errorType = getExpirationDateErrorForUserEnteredMonthAndYear(); Assert.assertEquals(ErrorType.NOT_ENOUGH_INFO, errorType); } @Test @SmallTest @SuppressLint("SetTextI18n") public void testExpirationDateErrorWithMonthSetToZeroAndValidYearReturnsNotEnoughInfo() { mMonthInput.setText("0"); mMonthInput.requestFocus(); mYearInput.setText(String.valueOf(mTwoDigitThisYear + 1)); int errorType = getExpirationDateErrorForUserEnteredMonthAndYear(); Assert.assertEquals(ErrorType.NOT_ENOUGH_INFO, errorType); } @Test @SmallTest @SuppressLint("SetTextI18n") public void testGetMonthWithNonNumericInputReturnsNegativeOne() { mMonthInput.setText("MM"); int month = AutofillUiUtils.getMonth(mMonthInput); Assert.assertEquals(-1, month); } @Test @SmallTest @SuppressLint("SetTextI18n") public void testGetMonthWithNegativeNumberInputReturnsNegativeOne() { mMonthInput.setText("-20"); int month = AutofillUiUtils.getMonth(mMonthInput); Assert.assertEquals(-1, month); } @Test @SmallTest @SuppressLint("SetTextI18n") public void testGetMonthWithZeroAsInputReturnsNegativeOne() { mMonthInput.setText("0"); int month = AutofillUiUtils.getMonth(mMonthInput); Assert.assertEquals(-1, month); } @Test @SmallTest @SuppressLint("SetTextI18n") public void testGetMonthWithThirteenAsInputReturnsNegativeOne() { mMonthInput.setText("13"); int month = AutofillUiUtils.getMonth(mMonthInput); Assert.assertEquals(-1, month); } @Test @SmallTest @SuppressLint("SetTextI18n") public void testGetFourDigitYearWithNonNumericInputReturnsNegativeOne() { mYearInput.setText("YY"); int year = AutofillUiUtils.getMonth(mYearInput); Assert.assertEquals(-1, year); } @Test @SmallTest @SuppressLint("SetTextI18n") public void testGetFourDigitYearWithNegativeNumberInputReturnsNegativeOne() { mYearInput.setText("-20"); int fourDigitYear = AutofillUiUtils.getFourDigitYear(mYearInput); Assert.assertEquals(-1, fourDigitYear); } @Test @SmallTest public void testGetFourDigitYearForCurrentTwoDigitYearReturnsCurrentFourDigitYear() { // Set the edit text value to be the current year in YY format. mYearInput.setText(String.valueOf(mTwoDigitThisYear)); int fourDigitYear = AutofillUiUtils.getFourDigitYear(mYearInput); Assert.assertEquals(Calendar.getInstance().get(Calendar.YEAR), fourDigitYear); } @Test @SmallTest public void testGetFourDigitYearForPreviousYearReturnsNegativeOne() { // Set the edit text value to be the current year in YY format. mYearInput.setText(String.valueOf(mTwoDigitThisYear - 1)); int fourDigitYear = AutofillUiUtils.getFourDigitYear(mYearInput); Assert.assertEquals(-1, fourDigitYear); } @Test @SmallTest public void testGetFourDigitYearForTenYearsFromNowReturnsValidFourDigitYear() { // Set the edit text value to be the current year in YY format. mYearInput.setText(String.valueOf(mTwoDigitThisYear + 10)); int fourDigitYear = AutofillUiUtils.getFourDigitYear(mYearInput); Assert.assertEquals(Calendar.getInstance().get(Calendar.YEAR) + 10, fourDigitYear); } @Test @SmallTest public void testGetFourDigitYearForElevenYearsFromNowReturnsNegativeOne() { // Set the edit text value to be the current year in YY format. mYearInput.setText(String.valueOf(mTwoDigitThisYear + 11)); int fourDigitYear = AutofillUiUtils.getFourDigitYear(mYearInput); Assert.assertEquals(-1, fourDigitYear); } @ErrorType private int getExpirationDateErrorForUserEnteredMonthAndYear() { return AutofillUiUtils.getExpirationDateErrorType(mMonthInput, mYearInput, /*didFocusOnMonth=*/ true, /*didFocusOnYear=*/true); } }
/* * This file is part of Sponge, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered.org <http://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.api.world.extent; import com.flowpowered.math.vector.Vector3i; import com.google.common.base.Optional; import org.spongepowered.api.block.BlockSnapshot; import org.spongepowered.api.block.BlockType; import org.spongepowered.api.block.ScheduledBlockUpdate; import org.spongepowered.api.item.inventory.ItemStack; import org.spongepowered.api.util.Direction; import org.spongepowered.api.world.Location; import org.spongepowered.api.world.weather.WeatherUniverse; import java.util.Collection; /** * Contains blocks, tile entities, entities, and possibly other game objects. */ public interface Extent extends EntityUniverse, TileEntityVolume, WeatherUniverse, BiomeArea { /** * Get a representation of the block at the given position. * * @param position The position * @return The block */ Location getFullBlock(Vector3i position); /** * Get a representation of the block at the given position. * * @param x The X position * @param y The Y position * @param z The Z position * @return The block */ Location getFullBlock(int x, int y, int z); /** * Get the base type of block. * * <p>The type does not include block data such as the contents of * inventories.</p> * * @param position The position of the block * @return The type of block */ BlockType getBlockType(Vector3i position); /** * Get the base type of block. * * <p>The type does not include block data such as the contents of * inventories.</p> * * @param x The X position * @param y The Y position * @param z The Z position * @return The type of block */ BlockType getBlockType(int x, int y, int z); /** * Replace the block at this position by a new type. * * <p>This will remove any extended block data at the given position.</p> * * @param position The position of the block * @param type The new type */ void setBlockType(Vector3i position, BlockType type); /** * Replace the block at this position by a new type. * * <p>This will remove any extended block data at the given position.</p> * * @param x The X position * @param y The Y position * @param z The Z position * @param type The new type */ void setBlockType(int x, int y, int z, BlockType type); /** * Get a snapshot of this block at the current point in time. * * <p>A snapshot is disconnected from the {@link Extent} that it was * taken from so changes to the original block do not affect the * snapshot.</p> * * @param position The position of the block * @return A snapshot */ BlockSnapshot getBlockSnapshot(Vector3i position); /** * Get a snapshot of this block at the current point in time. * * <p>A snapshot is disconnected from the {@link Extent} that it was * taken from so changes to the original block do not affect the * snapshot.</p> * * @param x The X position * @param y The Y position * @param z The Z position * @return A snapshot */ BlockSnapshot getBlockSnapshot(int x, int y, int z); /** * Replace the block at this position with a copy of the given snapshot. * * <p>Changing the snapshot afterwards will not affect the block that * has been placed at this location.</p> * * @param position The position of the block * @param snapshot The snapshot */ void setBlockSnapshot(Vector3i position, BlockSnapshot snapshot); /** * Replace the block at this position with a copy of the given snapshot. * * <p>Changing the snapshot afterwards will not affect the block that * has been placed at this location.</p> * * @param x The X position * @param y The Y position * @param z The Z position * @param snapshot The snapshot */ void setBlockSnapshot(int x, int y, int z, BlockSnapshot snapshot); /** * Get an instance of the given data class for this block. * * <p>For example, if this block represents a sign, * {@code getBlockData(Sign.class)} would yield an instance of * {@code Sign} to change the contents of the sign. However, if * this block does not represent a sign, then an instance will not * be returned.</p> * * @param position The position of the block * @param dataClass The data class * @param <T> The type of data * @return An instance of the class */ <T> Optional<T> getBlockData(Vector3i position, Class<T> dataClass); /** * Get an instance of the given data class for this block. * * <p>For example, if this block represents a sign, * {@code getBlockData(Sign.class)} would yield an instance of * {@code Sign} to change the contents of the sign. However, if * this block does not represent a sign, then an instance will not * be returned.</p> * * @param x The X position * @param y The Y position * @param z The Z position * @param dataClass The data class * @param <T> The type of data * @return An instance of the class */ <T> Optional<T> getBlockData(int x, int y, int z, Class<T> dataClass); /** * Simulates the interaction with this object as if a player had done so. * * @param position The position of the block */ void interactBlock(Vector3i position); /** * Simulates the interaction with this object as if a player had done so. * * @param x The X position * @param y The Y position * @param z The Z position */ void interactBlock(int x, int y, int z); /** * Simulates the interaction with this object using the given item as if * the player had done so. * * @param position The position of the block * @param itemStack The item */ void interactBlockWith(Vector3i position, ItemStack itemStack); /** * Simulates the interaction with this object using the given item as if * the player had done so. * * @param x The X position * @param y The Y position * @param z The Z position * @param itemStack The item */ void interactBlockWith(int x, int y, int z, ItemStack itemStack); /** * Simulate the digging of the block as if a player had done so. * * @param position The position of the block * @return Whether the block was destroyed */ boolean digBlock(Vector3i position); /** * Simulate the digging of the block as if a player had done so. * * @param x The X position * @param y The Y position * @param z The Z position * @return Whether the block was destroyed */ boolean digBlock(int x, int y, int z); /** * Simulate the digging of the block with the given tool as if a player * had done so. * * @param position The position of the block * @param itemStack The tool * @return Whether the block was destroyed */ boolean digBlockWith(Vector3i position, ItemStack itemStack); /** * Simulate the digging of the block with the given tool as if a player * had done so. * * @param x The X position * @param y The Y position * @param z The Z position * @param itemStack The tool * @return Whether the block was destroyed */ boolean digBlockWith(int x, int y, int z, ItemStack itemStack); /** * Gets the time it takes to dig this block with a fist in ticks. * * @param position The position of the block * @return The time in ticks */ int getBlockDigTime(Vector3i position); /** * Gets the time it takes to dig this block with a fist in ticks. * * @param x The X position * @param y The Y position * @param z The Z position * @return The time in ticks */ int getBlockDigTime(int x, int y, int z); /** * Gets the time it takes to dig this block the specified item in ticks. * * @param position The position of the block * @param itemStack The item to pretend-dig with * @return The time in ticks */ int getBlockDigTimeWith(Vector3i position, ItemStack itemStack); /** * Gets the time it takes to dig this block the specified item in ticks. * * @param x The X position * @param y The Y position * @param z The Z position * @param itemStack The item to pretend-dig with * @return The time in ticks */ int getBlockDigTimeWith(int x, int y, int z, ItemStack itemStack); /** * Get the light level for this object. * * <p>Higher levels indicate a higher luminance.</p> * * @param position The position of the block * @return A light level, nominally between 0 and 15, inclusive */ byte getLuminance(Vector3i position); /** * Get the light level for this object. * * <p>Higher levels indicate a higher luminance.</p> * * @param x The X position * @param y The Y position * @param z The Z position * @return A light level, nominally between 0 and 15, inclusive */ byte getLuminance(int x, int y, int z); /** * Get the light level for this object that is caused by an overhead sky. * * <p>Higher levels indicate a higher luminance. If no sky is overheard, * the return value may be 0.</p> * * @param position The position of the block * @return A light level, nominally between 0 and 15, inclusive */ byte getLuminanceFromSky(Vector3i position); /** * Get the light level for this object that is caused by an overhead sky. * * <p>Higher levels indicate a higher luminance. If no sky is overheard, * the return value may be 0.</p> * * @param x The X position * @param y The Y position * @param z The Z position * @return A light level, nominally between 0 and 15, inclusive */ byte getLuminanceFromSky(int x, int y, int z); /** * Get the light level for this object that is caused by everything * other than the sky. * * <p>Higher levels indicate a higher luminance.</p> * * @param position The position of the block * @return A light level, nominally between 0 and 15, inclusive */ byte getLuminanceFromGround(Vector3i position); /** * Get the light level for this object that is caused by everything * other than the sky. * * <p>Higher levels indicate a higher luminance.</p> * * @param x The X position * @param y The Y position * @param z The Z position * @return A light level, nominally between 0 and 15, inclusive */ byte getLuminanceFromGround(int x, int y, int z); /** * Test whether the object is powered. * * @param position The position of the block * @return Whether powered */ boolean isBlockPowered(Vector3i position); /** * Test whether the object is powered. * * @param x The X position * @param y The Y position * @param z The Z position * @return Whether powered */ boolean isBlockPowered(int x, int y, int z); /** * Test whether the object is indirectly powered. * * @param position The position of the block * @return Whether powered */ boolean isBlockIndirectlyPowered(Vector3i position); /** * Test whether the object is indirectly powered. * * @param x The X position * @param y The Y position * @param z The Z position * @return Whether powered */ boolean isBlockIndirectlyPowered(int x, int y, int z); /** * Test whether the face in the given direction is powered. * * @param position The position of the block * @param direction The direction * @return Whether powered */ boolean isBlockFacePowered(Vector3i position, Direction direction); /** * Test whether the face in the given direction is powered. * * @param x The X position * @param y The Y position * @param z The Z position * @param direction The direction * @return Whether powered */ boolean isBlockFacePowered(int x, int y, int z, Direction direction); /** * Test whether the face in the given direction is indirectly powered. * * @param position The position of the block * @param direction The direction * @return Whether powered */ boolean isBlockFaceIndirectlyPowered(Vector3i position, Direction direction); /** * Test whether the face in the given direction is indirectly powered. * * @param x The X position * @param y The Y position * @param z The Z position * @param direction The direction * @return Whether powered */ boolean isBlockFaceIndirectlyPowered(int x, int y, int z, Direction direction); /** * Get all the faces of this block that are directly powered. * * @param position The position of the block * @return Faces powered */ Collection<Direction> getPoweredBlockFaces(Vector3i position); /** * Get all the faces of this block that are directly powered. * * @param x The X position * @param y The Y position * @param z The Z position * @return Faces powered */ Collection<Direction> getPoweredBlockFaces(int x, int y, int z); /** * Get all faces of this block that are indirectly powered. * * @param position The position of the block * @return Faces indirectly powered */ Collection<Direction> getIndirectlyPoweredBlockFaces(Vector3i position); /** * Get all faces of this block that are indirectly powered. * * @param x The X position * @param y The Y position * @param z The Z position * @return Faces indirectly powered */ Collection<Direction> getIndirectlyPoweredBlockFaces(int x, int y, int z); /** * Test whether the the block will block the movement of entities. * * @param position The position of the block * @return Blocks movement */ boolean isBlockPassable(Vector3i position); /** * Test whether the the block will block the movement of entities. * * @param x The X position * @param y The Y position * @param z The Z position * @return Blocks movement */ boolean isBlockPassable(int x, int y, int z); /** * Test whether the given face of the block can catch fire. * * @param position The position of the block * @param faceDirection The face of the block to check * @return Is flammable */ boolean isBlockFlammable(Vector3i position, Direction faceDirection); /** * Test whether the given face of the block can catch fire. * * @param x The X position * @param y The Y position * @param z The Z position * @param faceDirection The face of the block to check * @return Is flammable */ boolean isBlockFlammable(int x, int y, int z, Direction faceDirection); /** * Gets a list of {@link ScheduledBlockUpdate}s on this block. * * @param position The position of the block * @return A list of ScheduledBlockUpdates on this block */ Collection<ScheduledBlockUpdate> getScheduledUpdates(Vector3i position); /** * Gets a list of {@link ScheduledBlockUpdate}s on this block. * * @param x The X position * @param y The Y position * @param z The Z position * @return A list of ScheduledBlockUpdates on this block */ Collection<ScheduledBlockUpdate> getScheduledUpdates(int x, int y, int z); /** * Adds a new {@link ScheduledBlockUpdate} to this block. * * @param position The position of the block * @param priority The priority of the scheduled update * @param ticks The ticks until the scheduled update should be processed * @return The newly created scheduled update */ ScheduledBlockUpdate addScheduledUpdate(Vector3i position, int priority, int ticks); /** * Adds a new {@link ScheduledBlockUpdate} to this block. * * @param x The X position * @param y The Y position * @param z The Z position * @param priority The priority of the scheduled update * @param ticks The ticks until the scheduled update should be processed * @return The newly created scheduled update */ ScheduledBlockUpdate addScheduledUpdate(int x, int y, int z, int priority, int ticks); /** * Removes a {@link ScheduledBlockUpdate} from this block. * * @param position The position of the block * @param update The ScheduledBlockUpdate to remove */ void removeScheduledUpdate(Vector3i position, ScheduledBlockUpdate update); /** * Removes a {@link ScheduledBlockUpdate} from this block. * @param x The X position * @param y The Y position * @param z The Z position * @param update The ScheduledBlockUpdate to remove */ void removeScheduledUpdate(int x, int y, int z, ScheduledBlockUpdate update); /** * Checks if this is a flowerpot. * * @return Whether this is a flowerpot */ boolean isFlowerPot(); }
/* * Copyright 2017 - 2019 Roman Borris (pcfreak9000), Paul Hagedorn (Panzer1119) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.omnikryptec.util.settings.keys; import java.util.Collection; import java.util.Objects; public class KeyGroup extends KeyContainer implements IKey { /** * Default {@link de.omnikryptec.util.settings.keys.KeyGroup} which can be * returned instead of null */ public static final KeyGroup DEFAULT_NULL_KEY_GROUP = new KeyGroup("DEFAULT_NULL_KEY_GROUP"); protected final String name; protected boolean allIKeysNeedsToBePressed; /** * Constructs a {@link de.omnikryptec.util.settings.keys.KeyGroup} (where all * {@link de.omnikryptec.util.settings.keys.IKey}s needs to be pressed at the * same time) * <p> * A KeyGroup contains {@link de.omnikryptec.util.settings.keys.IKey}s, which * means a {@link de.omnikryptec.util.settings.keys.KeyGroup} can contain * multiple {@link de.omnikryptec.util.settings.keys.KeyGroup}s or * {@link de.omnikryptec.util.settings.keys.Key}s * * @param name Name of the {@link de.omnikryptec.util.settings.keys.KeyGroup} */ public KeyGroup(final String name) { this(name, true); } /** * Constructs a {@link de.omnikryptec.util.settings.keys.KeyGroup} * <p> * A KeyGroup contains {@link de.omnikryptec.util.settings.keys.IKey}s, which * means a {@link de.omnikryptec.util.settings.keys.KeyGroup} can contain * multiple {@link de.omnikryptec.util.settings.keys.KeyGroup}s or * {@link de.omnikryptec.util.settings.keys.Key}s * * @param name Name of the * {@link de.omnikryptec.util.settings.keys.KeyGroup} * @param allIKeysNeedsToBePressed <tt>true</tt> if all * {@link de.omnikryptec.util.settings.keys.IKey}s * in this * {@link de.omnikryptec.util.settings.keys.KeyGroup} * have to be pressed at the same time */ public KeyGroup(final String name, final boolean allIKeysNeedsToBePressed) { this(name, allIKeysNeedsToBePressed, null); } /** * Constructs a {@link de.omnikryptec.util.settings.keys.KeyGroup} * <p> * A KeyGroup contains {@link de.omnikryptec.util.settings.keys.IKey}s, which * means a {@link de.omnikryptec.util.settings.keys.KeyGroup} can contain * multiple {@link de.omnikryptec.util.settings.keys.KeyGroup}s or * {@link de.omnikryptec.util.settings.keys.Key}s * * @param name Name of the * {@link de.omnikryptec.util.settings.keys.KeyGroup} * @param allIKeysNeedsToBePressed <tt>true</tt> if all * {@link de.omnikryptec.util.settings.keys.IKey}s * in this * {@link de.omnikryptec.util.settings.keys.KeyGroup} * have to be pressed at the same time * @param ikeys {@link de.omnikryptec.util.settings.keys.IKey}s * to be added */ public KeyGroup(final String name, final boolean allIKeysNeedsToBePressed, final Collection<IKey> ikeys) { this.name = name; this.allIKeysNeedsToBePressed = allIKeysNeedsToBePressed; if (ikeys != null) { addIKeys(ikeys); } } /** * Returns the name of the {@link de.omnikryptec.util.settings.keys.KeyGroup} * * @return Name of the {@link de.omnikryptec.util.settings.keys.KeyGroup} */ @Override public String getName() { return this.name; } /** * Returns if this {@link de.omnikryptec.util.settings.keys.KeyGroup} is being * pressed * * @return <tt>true</tt> if this * {@link de.omnikryptec.util.settings.keys.KeyGroup} is pressed */ @Override public boolean isPressed() { if (isEmpty()) { return false; } for (final IKey ikey : getIKeys()) { final boolean isPressed = ikey.isPressed(); if (!isPressed && this.allIKeysNeedsToBePressed) { return false; } else if (isPressed && !this.allIKeysNeedsToBePressed) { return true; } } return this.allIKeysNeedsToBePressed; } /** * Returns if this {@link de.omnikryptec.util.settings.keys.KeyGroup} is being * pressed for a specified time * * @param minTime Minimum pressing time * @param maxTime Maximum pressing time * * @return <tt>true</tt> if this * {@link de.omnikryptec.util.settings.keys.KeyGroup} is pressed for the * specified time */ @Override public boolean isLongPressed(final double minTime, final double maxTime) { if (isEmpty()) { return false; } for (final IKey ikey : getIKeys()) { final boolean isLongPressed = ikey.isLongPressed(minTime, maxTime); if (!isLongPressed && this.allIKeysNeedsToBePressed) { return false; } else if (isLongPressed && !this.allIKeysNeedsToBePressed) { return true; } } return this.allIKeysNeedsToBePressed; } /** * Returns if all {@link de.omnikryptec.util.settings.keys.IKey}s in this * {@link de.omnikryptec.util.settings.keys.KeyGroup} needs to be pressed at the * same time * * @return <tt>true</tt> if all {@link de.omnikryptec.util.settings.keys.IKey}s * in this {@link de.omnikryptec.util.settings.keys.KeyGroup} needs to * be pressed at the same time */ public boolean isAllIKeysNeedsToBePressed() { return this.allIKeysNeedsToBePressed; } /** * Sets if all {@link de.omnikryptec.util.settings.keys.IKey}s in this * {@link de.omnikryptec.util.settings.keys.KeyGroup} needs to be pressed at the * same time * * @param allIKeysNeedsToBePressed <tt>true</tt> if all * {@link de.omnikryptec.util.settings.keys.IKey}s * in this * {@link de.omnikryptec.util.settings.keys.KeyGroup} * needs to be pressed at the same time * * @return A reference to this * {@link de.omnikryptec.util.settings.keys.KeyGroup} */ public KeyGroup setAllIKeysNeedsToBePressed(final boolean allIKeysNeedsToBePressed) { this.allIKeysNeedsToBePressed = allIKeysNeedsToBePressed; return this; } @Override public boolean equals(final Object o) { if (this == o) { return true; } if (o == null || !getClass().isAssignableFrom(o.getClass())) { return false; } if (!super.equals(o)) { return false; } final KeyGroup keyGroup = (KeyGroup) o; return this.allIKeysNeedsToBePressed == keyGroup.allIKeysNeedsToBePressed && Objects.equals(this.name, keyGroup.name); } @Override public int hashCode() { return Objects.hash(super.hashCode(), this.name, this.allIKeysNeedsToBePressed); } @Override public String toString() { return "KeyGroup{" + "name='" + this.name + '\'' + ", allIKeysNeedsToBePressed=" + this.allIKeysNeedsToBePressed + ", ikeys=" + this.ikeys + '}'; } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. /** * DeleteKeyPairResponse.java * * This file was auto-generated from WSDL * by the Apache Axis2 version: 1.5.6 Built on : Aug 30, 2011 (10:01:01 CEST) */ package com.amazon.ec2; /** * DeleteKeyPairResponse bean class */ public class DeleteKeyPairResponse implements org.apache.axis2.databinding.ADBBean{ public static final javax.xml.namespace.QName MY_QNAME = new javax.xml.namespace.QName( "http://ec2.amazonaws.com/doc/2012-08-15/", "DeleteKeyPairResponse", "ns1"); private static java.lang.String generatePrefix(java.lang.String namespace) { if(namespace.equals("http://ec2.amazonaws.com/doc/2012-08-15/")){ return "ns1"; } return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } /** * field for DeleteKeyPairResponse */ protected com.amazon.ec2.DeleteKeyPairResponseType localDeleteKeyPairResponse ; /** * Auto generated getter method * @return com.amazon.ec2.DeleteKeyPairResponseType */ public com.amazon.ec2.DeleteKeyPairResponseType getDeleteKeyPairResponse(){ return localDeleteKeyPairResponse; } /** * Auto generated setter method * @param param DeleteKeyPairResponse */ public void setDeleteKeyPairResponse(com.amazon.ec2.DeleteKeyPairResponseType param){ this.localDeleteKeyPairResponse=param; } /** * isReaderMTOMAware * @return true if the reader supports MTOM */ public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) { boolean isReaderMTOMAware = false; try{ isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE)); }catch(java.lang.IllegalArgumentException e){ isReaderMTOMAware = false; } return isReaderMTOMAware; } /** * * @param parentQName * @param factory * @return org.apache.axiom.om.OMElement */ public org.apache.axiom.om.OMElement getOMElement ( final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{ org.apache.axiom.om.OMDataSource dataSource = new org.apache.axis2.databinding.ADBDataSource(this,MY_QNAME){ public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { DeleteKeyPairResponse.this.serialize(MY_QNAME,factory,xmlWriter); } }; return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl( MY_QNAME,factory,dataSource); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ serialize(parentQName,factory,xmlWriter,false); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter, boolean serializeType) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ //We can safely assume an element has only one type associated with it if (localDeleteKeyPairResponse==null){ throw new org.apache.axis2.databinding.ADBException("Property cannot be null!"); } localDeleteKeyPairResponse.serialize(MY_QNAME,factory,xmlWriter); } /** * Util method to write an attribute with the ns prefix */ private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (xmlWriter.getPrefix(namespace) == null) { xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } xmlWriter.writeAttribute(namespace,attName,attValue); } /** * Util method to write an attribute without the ns prefix */ private void writeAttribute(java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (namespace.equals("")) { xmlWriter.writeAttribute(attName,attValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace,attName,attValue); } } /** * Util method to write an attribute without the ns prefix */ private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName, javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String attributeNamespace = qname.getNamespaceURI(); java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace); if (attributePrefix == null) { attributePrefix = registerPrefix(xmlWriter, attributeNamespace); } java.lang.String attributeValue; if (attributePrefix.trim().length() > 0) { attributeValue = attributePrefix + ":" + qname.getLocalPart(); } else { attributeValue = qname.getLocalPart(); } if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attributeValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attributeValue); } } /** * method to handle Qnames */ private void writeQName(javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String namespaceURI = qname.getNamespaceURI(); if (namespaceURI != null) { java.lang.String prefix = xmlWriter.getPrefix(namespaceURI); if (prefix == null) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } else { // i.e this is the default namespace xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } else { xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } private void writeQNames(javax.xml.namespace.QName[] qnames, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (qnames != null) { // we have to store this data until last moment since it is not possible to write any // namespace data after writing the charactor data java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer(); java.lang.String namespaceURI = null; java.lang.String prefix = null; for (int i = 0; i < qnames.length; i++) { if (i > 0) { stringToWrite.append(" "); } namespaceURI = qnames[i].getNamespaceURI(); if (namespaceURI != null) { prefix = xmlWriter.getPrefix(namespaceURI); if ((prefix == null) || (prefix.length() == 0)) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } xmlWriter.writeCharacters(stringToWrite.toString()); } } /** * Register a namespace prefix */ private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException { java.lang.String prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) { prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } return prefix; } /** * databinding method to get an XML representation of this object * */ public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName) throws org.apache.axis2.databinding.ADBException{ //We can safely assume an element has only one type associated with it return localDeleteKeyPairResponse.getPullParser(MY_QNAME); } /** * Factory class that keeps the parse method */ public static class Factory{ /** * static method to create the object * Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable * If this object is not an element, it is a complex type and the reader is at the event just after the outer start element * Postcondition: If this object is an element, the reader is positioned at its end element * If this object is a complex type, the reader is positioned at the end element of its outer element */ public static DeleteKeyPairResponse parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{ DeleteKeyPairResponse object = new DeleteKeyPairResponse(); int event; java.lang.String nillableValue = null; java.lang.String prefix =""; java.lang.String namespaceuri =""; try { while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); // Note all attributes that were handled. Used to differ normal attributes // from anyAttributes. java.util.Vector handledAttributes = new java.util.Vector(); while(!reader.isEndElement()) { if (reader.isStartElement() ){ if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/","DeleteKeyPairResponse").equals(reader.getName())){ object.setDeleteKeyPairResponse(com.amazon.ec2.DeleteKeyPairResponseType.Factory.parse(reader)); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } } else { reader.next(); } } // end of while loop } catch (javax.xml.stream.XMLStreamException e) { throw new java.lang.Exception(e); } return object; } }//end of factory class }
package main.flowstoneenergy.tileentities.machines; //import cofh.api.energy.IEnergyHandler; import main.flowstoneenergy.tileentities.recipes.Recipe2_1; import main.flowstoneenergy.tileentities.recipes.RecipesMetalMixer; import net.minecraft.item.ItemStack; import net.minecraft.util.EnumFacing; import net.minecraft.util.IChatComponent; //TODO: Enable after update public class TileEntityMachineMetalMixer extends TileEntityMachineBase /*implements IEnergyHandler*/ { @SuppressWarnings("unused") private String field_145958_o; public TileEntityMachineMetalMixer() { maxTicks = 100; energyRequired = 2000; //energy.setMaxExtract(2000); items = new ItemStack[4]; } @Override public String getName() { return null; } @Override public boolean hasCustomName() { return true; } @Override public boolean isItemValidForSlot(int slot, ItemStack stack) { if (slot != 0 || slot != 1) return false; for (Recipe2_1 r : RecipesMetalMixer.recipe21List) { if (r.getInput1().isItemEqual(stack) || r.getInput2().isItemEqual(stack)) return true; } return false; } @Override public int[] getSlotsForFace(EnumFacing side) { return new int[]{0, 1, 2, 3}; } @Override public boolean canInsertItem(int slot, ItemStack itemStack, EnumFacing side) { return true; } @Override public boolean canExtractItem(int slot, ItemStack itemStack, EnumFacing side) { return slot == 2 || slot == 3; } public void func_145951_a(String displayName) { this.field_145958_o = displayName; } @Override public void update() { super.update(); if (canMix()) { if (ticksLeft >= maxTicks) { mixMetals(); // resetTimeAndTexture(); } else { ticksLeft++; markDirty(); } } else { //resetTimeAndTexture(); } } private boolean canMix() { if (items[0] == null || items[1] == null) return false; Recipe2_1 recipe = RecipesMetalMixer.getRecipeFromStack(items[0], items[1]); if (recipe == null || recipe.getOutput() == null) return false; ItemStack output = recipe.getOutput(); boolean suitableForOutput1 = (items[2] != null && output.isItemEqual(items[2]) || items[2] == null); boolean suitableForOutput2 = (items[3] != null && output.isItemEqual(items[3]) || items[3] == null); if (!suitableForOutput1 || !suitableForOutput2) return false; int totalAvailableSpace = output.getMaxStackSize() * 2; if (suitableForOutput1) { if (items[2] != null) totalAvailableSpace -= items[2].stackSize; } else { totalAvailableSpace -= output.getMaxStackSize(); } if (suitableForOutput2) { if (items[3] != null) totalAvailableSpace -= items[3].stackSize; } else { totalAvailableSpace -= output.getMaxStackSize(); } if (totalAvailableSpace < output.stackSize) return false; // TODO: Enable after update /* int availableEnergy = energy.extractEnergy(energyRequired, true); if (availableEnergy < energyRequired) return false;*/ return true; } private void mixMetals() { if (items[0] == null || items[1] == null) return; ItemStack res = RecipesMetalMixer.getRecipeFromStack(items[0], items[1]).getOutput(); if (items[2] == null) items[2] = res.copy(); else if (items[2].stackSize == res.getMaxStackSize()) if (items[3] == null) items[3] = res.copy(); else items[3].stackSize += res.stackSize; else items[2].stackSize += res.stackSize; items[0].stackSize--; if (items[0].stackSize <= 0) { items[0] = null; } items[1].stackSize--; if (items[1].stackSize <= 0) { items[1] = null; } // TODO: Enable after update //energy.extractEnergy(energyRequired, false); } public int getScaledProgress(int scale) { if (maxTicks == 0) return 0; return ticksLeft * scale / maxTicks; } @Override public int getField(int id) { // TODO Auto-generated method stub return 0; } @Override public void setField(int id, int value) { // TODO Auto-generated method stub } @Override public int getFieldCount() { // TODO Auto-generated method stub return 0; } @Override public void clear() { // TODO Auto-generated method stub } @Override public IChatComponent getDisplayName() { // TODO Auto-generated method stub return null; } // TODO: Enable after update /* @Override public int receiveEnergy(ForgeDirection from, int maxReceive, boolean simulate) { return energy.receiveEnergy(maxReceive, simulate); } @Override public int extractEnergy(ForgeDirection from, int maxExtract, boolean simulate) { return 0; } @Override public int getEnergyStored(ForgeDirection from) { return energy.getEnergyStored(); } @Override public int getMaxEnergyStored(ForgeDirection from) { return energy.getMaxEnergyStored(); } @Override public boolean canConnectEnergy(ForgeDirection from) { return true; } */ }
package com.ilscipio.scipio.cms.media; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.ofbiz.base.util.Debug; import org.ofbiz.base.util.UtilMisc; import org.ofbiz.base.util.UtilValidate; import org.ofbiz.entity.Delegator; import org.ofbiz.entity.GenericEntityException; import org.ofbiz.entity.GenericValue; import org.ofbiz.entity.condition.EntityCondition; import org.ofbiz.entity.condition.EntityOperator; import org.ofbiz.entity.util.EntityListIterator; import org.ofbiz.entity.util.EntityUtil; import org.ofbiz.webapp.control.RequestHandler; import org.ofbiz.webapp.control.WebAppConfigurationException; public abstract class CmsMediaWorker { private static final Debug.OfbizLogger module = Debug.getOfbizLogger(java.lang.invoke.MethodHandles.lookup().lookupClass()); public static final Set<String> VALID_DATA_RESOURCE_TYPE_LIST = Collections.unmodifiableSet(UtilMisc.toHashSet("AUDIO_OBJECT", "VIDEO_OBJECT", "IMAGE_OBJECT", "DOCUMENT_OBJECT")); protected CmsMediaWorker() { } public static GenericValue getContentForMedia(Delegator delegator, String contentId, String dataResourceId) throws GenericEntityException, IllegalArgumentException, IllegalStateException { GenericValue content; if (UtilValidate.isNotEmpty(contentId)) { content = delegator.findOne("Content", UtilMisc.toMap("contentId", contentId), false); if (UtilValidate.isEmpty(content)) { throw new IllegalArgumentException("Media file not found for contentId '" + contentId + "'"); } //dataResourceId = content.getString("dataResourceId"); } else { List<GenericValue> contentList = delegator.findByAnd("Content", UtilMisc.toMap("dataResourceId", dataResourceId), null, false); if (UtilValidate.isEmpty(contentList)) { // DEV NOTE: I was going to make this auto-create one for backward compat but not worth it, cms not released yet throw new IllegalArgumentException("Invalid media file - dataResourceId '" + dataResourceId + "' has no Content record" + " - either invalid media file ID or schema error - please contact your administrator"); } else if (contentList.size() > 1){ throw new IllegalStateException("Media file DataResource is associated to multiple Content records - cannot safely modify -" + " db corruption could occur if we tried to update one - please contact your administrator"); } content = contentList.get(0); //contentId = content.getString("contentId"); } return content; } public static GenericValue getDataResourceForMedia(Delegator delegator, String contentId, String dataResourceId) throws GenericEntityException, IllegalArgumentException { if (UtilValidate.isNotEmpty(dataResourceId) || UtilValidate.isNotEmpty(contentId)) { GenericValue content = getContentForMedia(delegator, contentId, dataResourceId); return content.getRelatedOne("DataResource", false); } else { throw new IllegalArgumentException("Invalid media file - dataResourceId '" + dataResourceId + "' has no Content record" + " - either invalid media file ID or schema error - please contact your administrator"); } } /** * Returns as ContentDataResourceRequiredView values (NOTE: the DataResource fields have "dr" prefix). * @throws GenericEntityException */ public static EntityListIterator getAllMediaContentDataResourceRequired(Delegator delegator, String dataResourceTypeId, List<String> orderBy) throws GenericEntityException { List<EntityCondition> condList = new ArrayList<>(); condList.add(EntityCondition.makeCondition("contentTypeId", "SCP_MEDIA")); if (dataResourceTypeId != null) condList.add(EntityCondition.makeCondition("drDataResourceTypeId", dataResourceTypeId)); return delegator.find("ContentDataResourceRequiredView", EntityCondition.makeCondition(condList, EntityOperator.AND), null, null, orderBy, null); } public static EntityListIterator getMediaContentDataResourceRequiredByContentId(Delegator delegator, String dataResourceTypeId, Collection<String> contentIdList, List<String> orderBy) throws GenericEntityException { List<EntityCondition> condList = new ArrayList<>(); condList.add(EntityCondition.makeCondition("contentTypeId", "SCP_MEDIA")); if (dataResourceTypeId != null) condList.add(EntityCondition.makeCondition("drDataResourceTypeId", dataResourceTypeId)); List<EntityCondition> contentIdCondList = new ArrayList<>(); for(String contentId : contentIdList) { contentIdCondList.add(EntityCondition.makeCondition("contentId", contentId)); } condList.add(EntityCondition.makeCondition(contentIdCondList, EntityOperator.OR)); return delegator.find("ContentDataResourceRequiredView", EntityCondition.makeCondition(condList, EntityOperator.AND), null, null, null, null); } public static EntityListIterator getMediaContentDataResourceViewTo(Delegator delegator, String dataResourceTypeId, Collection<String> contentIdList, List<String> orderBy) throws GenericEntityException { List<EntityCondition> condList = new ArrayList<>(); condList.add(EntityCondition.makeCondition("contentTypeId", "SCP_MEDIA_VARIANT")); if (dataResourceTypeId != null) condList.add(EntityCondition.makeCondition("drDataResourceTypeId", dataResourceTypeId)); List<EntityCondition> contentIdCondList = new ArrayList<>(); for(String contentId : contentIdList) { contentIdCondList.add(EntityCondition.makeCondition("contentId", contentId)); } condList.add(EntityCondition.makeCondition(contentIdCondList, EntityOperator.OR)); return delegator.find("ContentAssocDataResourceViewTo", EntityCondition.makeCondition(condList, EntityOperator.AND), null, null, null, null); } // TODO: REVIEW: for now we are intentionally ignoring the thruDate on ContentAssoc to simplify. // I don't see the point in keeping old records... public static List<GenericValue> getVariantContentAssocTo(HttpServletRequest request, String contentId) throws GenericEntityException { Delegator delegator = (Delegator) request.getAttribute("delegator"); return getVariantContentAssocTo(delegator, contentId); } public static List<GenericValue> getVariantContentAssocTo(Delegator delegator, String contentId) throws GenericEntityException { EntityCondition cond = EntityCondition.makeCondition( EntityCondition.makeCondition("contentIdStart", contentId), EntityOperator.AND, EntityCondition.makeCondition("contentTypeId", "SCP_MEDIA_VARIANT")); // alternative: EntityCondition.makeCondition("caContentAssocTypeId", EntityOperator.LIKE, "IMGSZ_%") return delegator.findList("ContentAssocViewTo", cond, null, null, null, false); } public static Set<String> getVariantContentAssocContentIdTo(Delegator delegator, String contentId) throws GenericEntityException { List<GenericValue> assocList = getVariantContentAssocTo(delegator, contentId); Set<String> res = new LinkedHashSet<>(); if (assocList != null) { for(GenericValue assoc : assocList) { res.add(assoc.getString("contentId")); } } return res; } public static List<String> getVariantContentMapKeys(Delegator delegator, String contentId) throws GenericEntityException { List<GenericValue> assocList = getVariantContentAssocTo(delegator, contentId); List<String> res = new ArrayList<>(); if (assocList != null) { for(GenericValue assoc : assocList) { res.add(assoc.getString("caMapKey")); } } return res; } public static EntityListIterator findVariantContentAssocTypes(Delegator delegator) throws GenericEntityException { return delegator.find("ContentAssocType", EntityCondition.makeCondition("contentAssocTypeId", EntityOperator.LIKE, "IMGSZ_%"), null, null, null, null); } // TODO: optimize public static boolean hasVariantContent(Delegator delegator, String contentId) throws GenericEntityException { EntityCondition cond = EntityCondition.makeCondition( EntityCondition.makeCondition("contentIdStart", contentId), EntityOperator.AND, EntityCondition.makeCondition("contentTypeId", "SCP_MEDIA_VARIANT")); // alternative: EntityCondition.makeCondition("caContentAssocTypeId", EntityOperator.LIKE, "IMGSZ_%") return delegator.findCountByCondition("ContentAssocViewTo", cond, null, null) > 0; } // Responsive image utilities /** * * @param responsiveImage * @return * @throws GenericEntityException */ public static List<GenericValue> getResponsiveImageViewPorts(GenericValue responsiveImage) throws GenericEntityException { if (responsiveImage.get("srcsetModeEnumId").equals("IMG_SRCSET_VW")) return responsiveImage.getRelated("ResponsiveImageVP", null, UtilMisc.toList("sequenceNum"), false); return null; } /** * * @param delegator * @param contentId * @return * @throws GenericEntityException */ public static List<GenericValue> getResponsiveImageViewPorts(Delegator delegator, String contentId) throws GenericEntityException { return getResponsiveImageViewPorts(getResponsiveImage(delegator, contentId)); } /** * * @param delegator * @param contentId * @return * @throws GenericEntityException */ public static GenericValue getResponsiveImage(Delegator delegator, String contentId) throws GenericEntityException { return delegator.findOne("ResponsiveImage", UtilMisc.toMap("contentId", contentId), false); } /** * * @param request * @param contentId * @return * @throws GenericEntityException * @throws WebAppConfigurationException * @throws IOException */ public static Map<String, String> buildSrcsetMap(HttpServletRequest request, HttpServletResponse response, String contentId) throws GenericEntityException, WebAppConfigurationException, IOException { Delegator delegator = (Delegator) request.getAttribute("delegator"); // Locale locale = (Locale) request.getAttribute("locale"); String webSiteId = (String) request.getAttribute("webSiteId"); Map<String, String> srcsetEntry = UtilMisc.newInsertOrderMap(); List<GenericValue> imageSizeDimensionList = UtilMisc.newList(); List<Long> scpWidthList = UtilMisc.newList(); EntityListIterator contentDataResourceList = null; try { contentDataResourceList = getMediaContentDataResourceViewTo(delegator, "IMAGE_OBJECT", getVariantContentAssocContentIdTo(delegator, contentId), null); GenericValue contentDataResource; Map<String, GenericValue> dataResourceBySizeIdMap = UtilMisc.newMap(); while ((contentDataResource = contentDataResourceList.next()) != null) { String sizeId = contentDataResource.getString("drSizeId"); GenericValue imageSizeDimension = delegator.findOne("ImageSizeDimension", UtilMisc.toMap("sizeId", sizeId), false); if (UtilValidate.isNotEmpty(imageSizeDimension)) { imageSizeDimensionList.add(imageSizeDimension); } else { // TODO: Let's see what do in this case scpWidthList.add(contentDataResource.getLong("drScpWidth")); } dataResourceBySizeIdMap.put(sizeId, contentDataResource); } imageSizeDimensionList = EntityUtil.orderBy(imageSizeDimensionList, UtilMisc.toList("sequenceNum")); for (GenericValue imageSizeDimension : imageSizeDimensionList) { GenericValue dataResource = dataResourceBySizeIdMap.get(imageSizeDimension.getString("sizeId")); String variantUrl = RequestHandler.makeLinkAuto(request, response, "media?contentId=" + contentId + "&variant=" + dataResource.get("caMapKey"), false, false, webSiteId, false, true, true, true); // OfbizUrlBuilder.from(request).buildFullUrlWithContextPath(variantUrl, "/media?contentId=" + contentId + "&variant=" + dataResource.get("caMapKey"), true); if (UtilValidate.isNotEmpty(variantUrl)) { srcsetEntry.put(String.valueOf(imageSizeDimension.getLong("dimensionWidth")), variantUrl); } } } catch (GenericEntityException e) { throw e; } finally { if (UtilValidate.isNotEmpty(contentDataResourceList)) { contentDataResourceList.close(); } } return srcsetEntry; } }
package org.apereo.cas; import org.javers.core.commit.Commit; import org.javers.core.diff.Change; import org.javers.core.diff.Diff; import org.javers.core.metamodel.object.CdoSnapshot; import org.javers.shadow.Shadow; import java.time.LocalDate; import java.util.List; import java.util.Map; import java.util.Set; /** * This is {@link ObjectChangelog}. * * @author Misagh Moayyed * @param <T> the type parameter * @since 5.2.0 */ public interface ObjectChangelog<T> { /** * Delete commit. * * @param author the author * @param object the object * @return the commit */ Commit delete(String author, T object); /** * Delete commit. * * @param author the author * @param object the object * @param branch the branch * @return the commit */ Commit delete(String author, T object, String branch); /** * Delete commit. * * @param author the author * @param object the object * @param note the note * @param branch the branch * @return the commit */ Commit delete(String author, T object, String note, String branch); /** * Delete commit. * * @param author the author * @param object the object * @param properties the properties * @return the commit */ Commit delete(String author, T object, Map<String, String> properties); /** * Find branches set. * * @param author the author * @return the set */ Set<String> findBranches(String author); /** * Compare diff. * * @param oldObj the old obj * @param newObj the new obj * @return the diff */ Diff compare(T oldObj, T newObj); /** * Merge commit. * * @param shadow the shadow * @param branch the branch * @return the commit */ Commit merge(Shadow<T> shadow, String branch); /** * Commit commit. * * @param author the author * @param object the object * @param branch the branch * @param note the notes * @return the commit */ Commit commit(String author, T object, String branch, String note); /** * Commit commit. * * @param author the author * @param object the object * @return the commit */ Commit commit(String author, T object); /** * Commit commit. * * @param author the author * @param branch the branch * @param object the object * @return the commit */ Commit commit(String author, String branch, T object); /** * Commit commit. * * @param author the author * @param object the object * @param properties the properties * @return the commit */ Commit commit(String author, T object, Map<String, String> properties); /** * Find changes list. * * @param identifier the identifier * @param clazz the clazz * @return the list */ List<Change> findChanges(Object identifier, Class clazz); /** * Find changes list. * * @param identifier the identifier * @param clazz the clazz * @param author the author * @param branch the branch * @return the list */ List<Change> findChanges(Object identifier, Class clazz, String author, String branch); /** * Find changes list. * * @param identifier the identifier * @param clazz the clazz * @param author the author * @param fromDate the from date * @param toDate the to date * @param version the version * @param skip the skip * @param branch the branch * @return the list */ List<Change> findChanges(Object identifier, Class<T> clazz, String author, LocalDate fromDate, LocalDate toDate, int version, int skip, String branch); /** * Find changes list. * * @param identifier the identifier * @param clazz the clazz * @param author the author * @param fromDate the from date * @param toDate the to date * @param version the version * @param skip the skip * @return the list */ List<Change> findChanges(Object identifier, Class<T> clazz, String author, LocalDate fromDate, LocalDate toDate, int version, int skip); /** * Find snapshots list. * * @param identifier the identifier * @param clazz the clazz * @param limit the limit * @return the list */ List<CdoSnapshot> findSnapshots(Object identifier, Class clazz, int limit); /** * Find snapshots list. * * @param identifier the identifier * @param clazz the clazz * @param limit the limit * @param skip the skip * @return the list */ List<CdoSnapshot> findSnapshots(Object identifier, Class clazz, int limit, int skip); /** * Find snapshots list. * * @param identifier the identifier * @param clazz the clazz * @return the list */ List<CdoSnapshot> findSnapshots(Object identifier, Class clazz); /** * Find snapshots list. * * @param identifier the identifier * @param clazz the clazz * @param author the author * @return the list */ List<CdoSnapshot> findSnapshots(Object identifier, Class clazz, String author); /** * Find snapshots list. * * @param identifier the identifier * @param clazz the clazz * @param author the author * @param branch the branch * @return the list */ List<CdoSnapshot> findSnapshots(Object identifier, Class clazz, String author, String branch); /** * Find snapshots list. * * @param identifier the identifier * @param clazz the clazz * @param author the author * @param fromDate the from date * @param toDate the to date * @param version the version * @param limit the limit * @param skip the skip * @param branch the branch * @return the list */ List<CdoSnapshot> findSnapshots(Object identifier, Class<T> clazz, String author, LocalDate fromDate, LocalDate toDate, int version, int limit, int skip, String branch); /** * Find snapshots list. * * @param identifier the identifier * @param clazz the clazz * @param author the author * @param fromDate the from date * @param toDate the to date * @param version the version * @param limit the limit * @param skip the skip * @return the list */ List<CdoSnapshot> findSnapshots(Object identifier, Class<T> clazz, String author, LocalDate fromDate, LocalDate toDate, int version, int limit, int skip); /** * Find shadows list. * * @param identifier the identifier * @param clazz the clazz * @param author the author * @param commitId the commit id * @return the list */ List<Shadow<T>> findShadows(Object identifier, Class clazz, String author, String commitId); /** * Find shadows list. * * @param identifier the identifier * @param clazz the clazz * @param commitId the commit id * @return the list */ List<Shadow<T>> findShadows(Object identifier, Class clazz, String commitId); /** * Find shadows list. * * @param identifier the identifier * @param clazz the clazz * @return the list */ List<Shadow<T>> findShadows(Object identifier, Class clazz); /** * Find shadows list. * * @param identifier the identifier * @param clazz the clazz * @param author the author * @param fromDate the from date * @param toDate the to date * @param version the version * @param commitId the commit id * @return the list */ List<Shadow<T>> findShadows(Object identifier, Class clazz, String author, LocalDate fromDate, LocalDate toDate, int version, String commitId); }
/* * Copyright (c) 2018 David Boissier. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.codinjutsu.tools.mongo.view.edition; import com.intellij.openapi.ui.ComboBox; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.ui.ColoredListCellRenderer; import com.intellij.ui.components.JBCheckBox; import org.apache.commons.lang.StringUtils; import org.bson.Document; import org.codinjutsu.tools.mongo.view.edition.MongoEditionPanel; import org.codinjutsu.tools.mongo.view.model.JsonDataType; import org.codinjutsu.tools.mongo.view.table.DateTimePicker; import javax.swing.*; import java.awt.*; import java.util.Date; import java.util.GregorianCalendar; import java.util.HashMap; import java.util.Map; @SuppressWarnings("unchecked") abstract class AbstractAddDialog extends DialogWrapper { private static final Map<JsonDataType, TextFieldWrapper> UI_COMPONENT_BY_JSON_DATATYPE = new HashMap<>(); static { UI_COMPONENT_BY_JSON_DATATYPE.put(JsonDataType.STRING, new StringFieldWrapper()); UI_COMPONENT_BY_JSON_DATATYPE.put(JsonDataType.BOOLEAN, new BooleanFieldWrapper()); UI_COMPONENT_BY_JSON_DATATYPE.put(JsonDataType.NUMBER, new NumberFieldWrapper()); UI_COMPONENT_BY_JSON_DATATYPE.put(JsonDataType.NULL, new NullFieldWrapper()); UI_COMPONENT_BY_JSON_DATATYPE.put(JsonDataType.DATE, new DateTimeFieldWrapper()); UI_COMPONENT_BY_JSON_DATATYPE.put(JsonDataType.OBJECT, new JsonFieldWrapper()); UI_COMPONENT_BY_JSON_DATATYPE.put(JsonDataType.ARRAY, new ArrayFieldWrapper()); } final MongoEditionPanel mongoEditionPanel; TextFieldWrapper currentEditor = null; AbstractAddDialog(MongoEditionPanel mongoEditionPanel) { super(mongoEditionPanel, true); this.mongoEditionPanel = mongoEditionPanel; } void initCombo(final ComboBox combobox, final JPanel parentPanel) { combobox.setModel(new DefaultComboBoxModel<>(JsonDataType.values())); combobox.setRenderer(new ColoredListCellRenderer() { @Override protected void customizeCellRenderer(JList jList, Object o, int i, boolean b, boolean b2) { append(((JsonDataType) o).type); } }); combobox.setSelectedItem(null); combobox.addItemListener(itemEvent -> { JsonDataType selectedType = (JsonDataType) combobox.getSelectedItem(); currentEditor = UI_COMPONENT_BY_JSON_DATATYPE.get(selectedType); currentEditor.reset(); parentPanel.invalidate(); parentPanel.removeAll(); parentPanel.add(currentEditor.getComponent(), BorderLayout.CENTER); parentPanel.validate(); }); combobox.setSelectedItem(JsonDataType.STRING); } public abstract Object getValue(); static abstract class TextFieldWrapper<T extends JComponent, V> { final T component; private TextFieldWrapper(T component) { this.component = component; } protected abstract V getValue(); protected abstract void reset(); boolean isValueSet() { return true; } T getComponent() { return component; } void validate() { if (!isValueSet()) { throw new IllegalArgumentException("Value is not set"); } } } private static class StringFieldWrapper extends TextFieldWrapper<JTextField, String> { private StringFieldWrapper() { super(new JTextField()); } @Override public String getValue() { return component.getText(); } @Override public boolean isValueSet() { return StringUtils.isNotBlank(component.getText()); } @Override public void reset() { component.setText(""); } } private static class JsonFieldWrapper extends TextFieldWrapper<JTextField, Object> { private JsonFieldWrapper() { super(new JTextField()); } @Override public Object getValue() { return Document.parse(component.getText()); } @Override public boolean isValueSet() { return StringUtils.isNotBlank(component.getText()); } @Override public void reset() { component.setText(""); } } private static class ArrayFieldWrapper extends JsonFieldWrapper { @Override public Object getValue() { //ugly hack to use DocumentParser instead of BsonArray String arrayInDoc = String.format("{\"array\": %s}", component.getText()); return Document.parse(arrayInDoc).get("array"); } } private static class NumberFieldWrapper extends TextFieldWrapper<JTextField, Number> { private NumberFieldWrapper() { super(new JTextField()); } @Override public Number getValue() { return org.codinjutsu.tools.mongo.utils.StringUtils.parseNumber(component.getText()); } @Override public void reset() { component.setText(""); } @Override public boolean isValueSet() { return StringUtils.isNotBlank(component.getText()); } @Override public void validate() { super.validate(); getValue(); } } private static class BooleanFieldWrapper extends TextFieldWrapper<JBCheckBox, Boolean> { private BooleanFieldWrapper() { super(new JBCheckBox()); } @Override public Boolean getValue() { return component.isSelected(); } @Override public void reset() { component.setSelected(false); } } private static class NullFieldWrapper extends TextFieldWrapper<JLabel, Object> { private NullFieldWrapper() { super(new JLabel("null")); } @Override public Object getValue() { return null; } @Override public void reset() { } } private static class DateTimeFieldWrapper extends TextFieldWrapper<DateTimePicker, Date> { private DateTimeFieldWrapper() { super(DateTimePicker.create()); component.getEditor().setEditable(false); } @Override public Date getValue() { return component.getDate(); } @Override public boolean isValueSet() { return component.getDate() != null; } @Override public void reset() { component.setDate(GregorianCalendar.getInstance().getTime()); } } }
/*=========================================================================== * Licensed Materials - Property of IBM * "Restricted Materials of IBM" * * IBM SDK, Java(tm) Technology Edition, v8 * (C) Copyright IBM Corp. 2000, 2008. All Rights Reserved * * US Government Users Restricted Rights - Use, duplication or disclosure * restricted by GSA ADP Schedule Contract with IBM Corp. *=========================================================================== */ /* * Copyright (c) 2000, 2008, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ package javax.management.relation; import com.sun.jmx.mbeanserver.Util; import java.util.ArrayList; import java.util.Collection; import java.util.List; /** * A RoleList represents a list of roles (Role objects). It is used as * parameter when creating a relation, and when trying to set several roles in * a relation (via 'setRoles()' method). It is returned as part of a * RoleResult, to provide roles successfully retrieved. * * @since 1.5 */ /* We cannot extend ArrayList<Role> because our legacy add(Role) method would then override add(E) in ArrayList<E>, and our return value is void whereas ArrayList.add(E)'s is boolean. Likewise for set(int,Role). Grrr. We cannot use covariance to override the most important methods and have them return Role, either, because that would break subclasses that override those methods in turn (using the original return type of Object). Finally, we cannot implement Iterable<Role> so you could write for (Role r : roleList) because ArrayList<> implements Iterable<> and the same class cannot implement two versions of a generic interface. Instead we provide the asList() method so you can write for (Role r : roleList.asList()) */ public class RoleList extends ArrayList<Object> { private transient boolean typeSafe; private transient boolean tainted; /* Serial version */ private static final long serialVersionUID = 5568344346499649313L; // // Constructors // /** * Constructs an empty RoleList. */ public RoleList() { super(); } /** * Constructs an empty RoleList with the initial capacity * specified. * * @param initialCapacity initial capacity */ public RoleList(int initialCapacity) { super(initialCapacity); } /** * Constructs a {@code RoleList} containing the elements of the * {@code List} specified, in the order in which they are returned by * the {@code List}'s iterator. The {@code RoleList} instance has * an initial capacity of 110% of the size of the {@code List} * specified. * * @param list the {@code List} that defines the initial contents of * the new {@code RoleList}. * * @exception IllegalArgumentException if the {@code list} parameter * is {@code null} or if the {@code list} parameter contains any * non-Role objects. * * @see ArrayList#ArrayList(java.util.Collection) */ public RoleList(List<Role> list) throws IllegalArgumentException { // Check for null parameter // if (list == null) throw new IllegalArgumentException("Null parameter"); // Check for non-Role objects // checkTypeSafe(list); // Build the List<Role> // super.addAll(list); } /** * Return a view of this list as a {@code List<Role>}. * Changes to the returned value are reflected by changes * to the original {@code RoleList} and vice versa. * * @return a {@code List<Role>} whose contents * reflect the contents of this {@code RoleList}. * * <p>If this method has ever been called on a given * {@code RoleList} instance, a subsequent attempt to add * an object to that instance which is not a {@code Role} * will fail with an {@code IllegalArgumentException}. For compatibility * reasons, a {@code RoleList} on which this method has never * been called does allow objects other than {@code Role}s to * be added.</p> * * @throws IllegalArgumentException if this {@code RoleList} contains * an element that is not a {@code Role}. * * @since 1.6 */ @SuppressWarnings("unchecked") public List<Role> asList() { if (!typeSafe) { if (tainted) checkTypeSafe(this); typeSafe = true; } return Util.cast(this); } // // Accessors // /** * Adds the Role specified as the last element of the list. * * @param role the role to be added. * * @exception IllegalArgumentException if the role is null. */ public void add(Role role) throws IllegalArgumentException { if (role == null) { String excMsg = "Invalid parameter"; throw new IllegalArgumentException(excMsg); } super.add(role); } /** * Inserts the role specified as an element at the position specified. * Elements with an index greater than or equal to the current position are * shifted up. * * @param index The position in the list where the new Role * object is to be inserted. * @param role The Role object to be inserted. * * @exception IllegalArgumentException if the role is null. * @exception IndexOutOfBoundsException if accessing with an index * outside of the list. */ public void add(int index, Role role) throws IllegalArgumentException, IndexOutOfBoundsException { if (role == null) { String excMsg = "Invalid parameter"; throw new IllegalArgumentException(excMsg); } super.add(index, role); } /** * Sets the element at the position specified to be the role * specified. * The previous element at that position is discarded. * * @param index The position specified. * @param role The value to which the role element should be set. * * @exception IllegalArgumentException if the role is null. * @exception IndexOutOfBoundsException if accessing with an index * outside of the list. */ public void set(int index, Role role) throws IllegalArgumentException, IndexOutOfBoundsException { if (role == null) { // Revisit [cebro] Localize message String excMsg = "Invalid parameter."; throw new IllegalArgumentException(excMsg); } super.set(index, role); } /** * Appends all the elements in the RoleList specified to the end * of the list, in the order in which they are returned by the Iterator of * the RoleList specified. * * @param roleList Elements to be inserted into the list (can be null) * * @return true if this list changed as a result of the call. * * @exception IndexOutOfBoundsException if accessing with an index * outside of the list. * * @see ArrayList#addAll(Collection) */ public boolean addAll(RoleList roleList) throws IndexOutOfBoundsException { if (roleList == null) { return true; } return (super.addAll(roleList)); } /** * Inserts all of the elements in the RoleList specified into this * list, starting at the specified position, in the order in which they are * returned by the Iterator of the RoleList specified. * * @param index Position at which to insert the first element from the * RoleList specified. * @param roleList Elements to be inserted into the list. * * @return true if this list changed as a result of the call. * * @exception IllegalArgumentException if the role is null. * @exception IndexOutOfBoundsException if accessing with an index * outside of the list. * * @see ArrayList#addAll(int, Collection) */ public boolean addAll(int index, RoleList roleList) throws IllegalArgumentException, IndexOutOfBoundsException { if (roleList == null) { // Revisit [cebro] Localize message String excMsg = "Invalid parameter."; throw new IllegalArgumentException(excMsg); } return (super.addAll(index, roleList)); } /* * Override all of the methods from ArrayList<Object> that might add * a non-Role to the List, and disallow that if asList has ever * been called on this instance. */ @Override public boolean add(Object o) { if (!tainted) tainted = isTainted(o); if (typeSafe) checkTypeSafe(o); return super.add(o); } @Override public void add(int index, Object element) { if (!tainted) tainted = isTainted(element); if (typeSafe) checkTypeSafe(element); super.add(index, element); } @Override public boolean addAll(Collection<?> c) { if (!tainted) tainted = isTainted(c); if (typeSafe) checkTypeSafe(c); return super.addAll(c); } @Override public boolean addAll(int index, Collection<?> c) { if (!tainted) tainted = isTainted(c); if (typeSafe) checkTypeSafe(c); return super.addAll(index, c); } @Override public Object set(int index, Object element) { if (!tainted) tainted = isTainted(element); if (typeSafe) checkTypeSafe(element); return super.set(index, element); } /** * IllegalArgumentException if o is a non-Role object. */ private static void checkTypeSafe(Object o) { try { o = (Role) o; } catch (ClassCastException e) { throw new IllegalArgumentException(e); } } /** * IllegalArgumentException if c contains any non-Role objects. */ private static void checkTypeSafe(Collection<?> c) { try { Role r; for (Object o : c) r = (Role) o; } catch (ClassCastException e) { throw new IllegalArgumentException(e); } } /** * Returns true if o is a non-Role object. */ private static boolean isTainted(Object o) { try { checkTypeSafe(o); } catch (IllegalArgumentException e) { return true; } return false; } /** * Returns true if c contains any non-Role objects. */ private static boolean isTainted(Collection<?> c) { try { checkTypeSafe(c); } catch (IllegalArgumentException e) { return true; } return false; } }
/* * $Id: PortableDocumentImpl.java 2700 2006-01-11 11:27:08Z zbinl $ * * ace - a collaborative editor * Copyright (C) 2005 Mark Bigler, Simon Raess, Lukas Zbinden * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ package ch.iserver.ace.net.core; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.jboss.logging.Logger; import ch.iserver.ace.CaretUpdate; import ch.iserver.ace.Fragment; import ch.iserver.ace.net.RemoteUserProxy; /** * Default implementation of interface {@link ch.iserver.ace.net.PortableDocument} for * the network layer. * This object contains the actual data of the document. */ public class PortableDocumentImpl implements PortableDocumentExt { private static Logger LOG = Logger.getLogger(PortableDocumentImpl.class); /** * A list of fragments */ private List fragments; /** * A map of selections */ private Map selections; //participantId to caretupdate /** * A map with the RemoteUserProxy's */ private Map proxies; //participantId to remoteuserproxy /** * The document id */ private String docId; /** * The publisher id */ private String publisherId; /** * The participant id of the local user */ private int participantId; /** * Default constructor. */ public PortableDocumentImpl() { fragments = Collections.synchronizedList(new ArrayList()); selections = Collections.synchronizedMap(new LinkedHashMap()); proxies = Collections.synchronizedMap(new LinkedHashMap()); } /************************************************/ /** methods from interface PortableDocumentExt **/ /************************************************/ /** * {@inheritDoc} */ public void addFragment(Fragment fragment) { fragments.add(fragment); } /** * {@inheritDoc} */ public void addParticipant(int id, RemoteUserProxyExt proxy) { proxies.put(new Integer(id), proxy); } /** * {@inheritDoc} */ public void setSelection(int participantId, CaretUpdate selection) { selections.put(new Integer(participantId), selection); } /** * {@inheritDoc} */ public void setDocumentId(String id) { this.docId = id; } /** * {@inheritDoc} */ public String getDocumentId() { return docId; } /** * {@inheritDoc} */ public void setPublisherId(String publisherId) { this.publisherId = publisherId; } /** * {@inheritDoc} */ public String getPublisherId() { return publisherId; } /** * {@inheritDoc} */ public int getParticipantId() { return participantId; } /** * {@inheritDoc} */ public void setParticpantId(int id) { LOG.debug("setParticipantId(" + id + ")"); this.participantId = id; } /** * {@inheritDoc} */ public List getUsers() { return new ArrayList(proxies.values()); } /** * {@inheritDoc} */ public Map getParticipantIdUserMapping() { return proxies; } /*********************************************/ /** methods from interface PortableDocument **/ /*********************************************/ /** * @see ch.iserver.ace.net.PortableDocument#getParticipantIds() */ public int[] getParticipantIds() { int[] ids = new int[proxies.size()]; Iterator iter = proxies.keySet().iterator(); int cnt = 0; while (iter.hasNext()) { Integer id = (Integer) iter.next(); ids[cnt++] = id.intValue(); } return ids; } /** * @see ch.iserver.ace.net.PortableDocument#getUserProxy(int) */ public RemoteUserProxy getUserProxy(int participantId) { return (RemoteUserProxy) proxies.get(new Integer(participantId)); } /** * @see ch.iserver.ace.net.PortableDocument#getSelection(int) */ public CaretUpdate getSelection(int participantId) { return (CaretUpdate) selections.get(new Integer(participantId)); } /** * @see ch.iserver.ace.net.PortableDocument#getFragments() */ public Iterator getFragments() { return fragments.iterator(); } /** * {@inheritDoc} */ public String toString() { return "PortableDocumentImpl("+docId+", "+publisherId+", "+proxies.keySet()+" participants, "+fragments.size()+" fragments, "+selections.keySet()+" selections)"; } }
/* * Copyright (c) 2010-2013 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.repo.sql.data.common; import com.evolveum.midpoint.prism.ItemDefinition; import com.evolveum.midpoint.prism.PrismContext; import com.evolveum.midpoint.repo.sql.data.common.embedded.RPolyString; import com.evolveum.midpoint.repo.sql.data.common.enums.ROperationResultStatus; import com.evolveum.midpoint.repo.sql.query.definition.JaxbName; import com.evolveum.midpoint.repo.sql.util.DtoTranslationException; import com.evolveum.midpoint.repo.sql.util.RUtil; import com.evolveum.midpoint.schema.GetOperationOptions; import com.evolveum.midpoint.schema.SelectorOptions; import com.evolveum.midpoint.xml.ns._public.common.common_3.UserType; import org.hibernate.annotations.Cascade; import org.hibernate.annotations.Columns; import org.hibernate.annotations.ForeignKey; import org.hibernate.annotations.Index; import javax.persistence.*; import java.util.Collection; import java.util.HashSet; import java.util.Set; /** * @author lazyman */ @Entity @Table(uniqueConstraints = @UniqueConstraint(columnNames = {"name_norm"})) @org.hibernate.annotations.Table(appliesTo = "m_user", indexes = {@Index(name = "iFullName", columnNames = "fullName_orig"), @Index(name = "iFamilyName", columnNames = "familyName_orig"), @Index(name = "iGivenName", columnNames = "givenName_orig"), @Index(name = "iLocality", columnNames = "locality_orig")}) @ForeignKey(name = "fk_user") public class RUser extends RFocus<UserType> implements OperationResult { private RPolyString name; private RPolyString fullName; private RPolyString givenName; private RPolyString familyName; private RPolyString additionalName; private RPolyString honorificPrefix; private RPolyString honorificSuffix; private String emailAddress; private String telephoneNumber; private String employeeNumber; private Set<String> employeeType; private Set<RPolyString> organizationalUnit; private RPolyString localityUser; private String costCenter; private String locale; private String timezone; private RPolyString title; private RPolyString nickName; private String preferredLanguage; private Set<RPolyString> organization; //operation result private ROperationResultStatus status; //user photo private boolean hasPhoto; private Set<RUserPhoto> jpegPhoto; @ElementCollection @ForeignKey(name = "fk_user_organization") @CollectionTable(name = "m_user_organization", joinColumns = { @JoinColumn(name = "user_oid", referencedColumnName = "oid") }) @Cascade({org.hibernate.annotations.CascadeType.ALL}) public Set<RPolyString> getOrganization() { return organization; } @Embedded public RPolyString getAdditionalName() { return additionalName; } public String getEmailAddress() { return emailAddress; } @ElementCollection @ForeignKey(name = "fk_user_org_unit") @CollectionTable(name = "m_user_organizational_unit", joinColumns = { @JoinColumn(name = "user_oid", referencedColumnName = "oid") }) @Cascade({org.hibernate.annotations.CascadeType.ALL}) public Set<RPolyString> getOrganizationalUnit() { return organizationalUnit; } public String getTelephoneNumber() { return telephoneNumber; } @ElementCollection @ForeignKey(name = "fk_user_employee_type") @CollectionTable(name = "m_user_employee_type", joinColumns = { @JoinColumn(name = "user_oid", referencedColumnName = "oid") }) @Cascade({org.hibernate.annotations.CascadeType.ALL}) public Set<String> getEmployeeType() { return employeeType; } @Embedded public RPolyString getFamilyName() { return familyName; } @Embedded public RPolyString getFullName() { return fullName; } @Embedded public RPolyString getGivenName() { return givenName; } @JaxbName(localPart = "locality") @Embedded @AttributeOverrides({ @AttributeOverride(name = "orig", column = @Column(name = "locality_orig")), @AttributeOverride(name = "norm", column = @Column(name = "locality_norm")) }) public RPolyString getLocalityUser() { return localityUser; } @Index(name = "iEmployeeNumber") public String getEmployeeNumber() { return employeeNumber; } @Embedded public RPolyString getHonorificPrefix() { return honorificPrefix; } @Embedded public RPolyString getHonorificSuffix() { return honorificSuffix; } @Embedded public RPolyString getName() { return name; } public String getCostCenter() { return costCenter; } public String getLocale() { return locale; } @Embedded public RPolyString getNickName() { return nickName; } public String getPreferredLanguage() { return preferredLanguage; } public String getTimezone() { return timezone; } @Embedded public RPolyString getTitle() { return title; } @Enumerated(EnumType.ORDINAL) public ROperationResultStatus getStatus() { return status; } public boolean isHasPhoto() { return hasPhoto; } @OneToMany(mappedBy = "owner", orphanRemoval = true) @Cascade({org.hibernate.annotations.CascadeType.ALL}) public Set<RUserPhoto> getJpegPhoto() { if (jpegPhoto == null) { jpegPhoto = new HashSet<>(); } return jpegPhoto; } public void setHasPhoto(boolean hasPhoto) { this.hasPhoto = hasPhoto; } public void setJpegPhoto(Set<RUserPhoto> jpegPhoto) { this.jpegPhoto = jpegPhoto; } public void setStatus(ROperationResultStatus status) { this.status = status; } public void setCostCenter(String costCenter) { this.costCenter = costCenter; } public void setLocale(String locale) { this.locale = locale; } public void setOrganization(Set<RPolyString> organization) { this.organization = organization; } public void setNickName(RPolyString nickName) { this.nickName = nickName; } public void setPreferredLanguage(String preferredLanguage) { this.preferredLanguage = preferredLanguage; } public void setTimezone(String timezone) { this.timezone = timezone; } public void setTitle(RPolyString title) { this.title = title; } public void setName(RPolyString name) { this.name = name; } public void setAdditionalName(RPolyString additionalName) { this.additionalName = additionalName; } public void setEmailAddress(String emailAddress) { this.emailAddress = emailAddress; } public void setEmployeeNumber(String employeeNumber) { this.employeeNumber = employeeNumber; } public void setEmployeeType(Set<String> employeeType) { this.employeeType = employeeType; } public void setFamilyName(RPolyString familyName) { this.familyName = familyName; } public void setGivenName(RPolyString givenName) { this.givenName = givenName; } public void setHonorificPrefix(RPolyString honorificPrefix) { this.honorificPrefix = honorificPrefix; } public void setHonorificSuffix(RPolyString honorificSuffix) { this.honorificSuffix = honorificSuffix; } public void setLocalityUser(RPolyString locality) { this.localityUser = locality; } public void setOrganizationalUnit(Set<RPolyString> organizationalUnit) { this.organizationalUnit = organizationalUnit; } public void setTelephoneNumber(String telephoneNumber) { this.telephoneNumber = telephoneNumber; } public void setFullName(RPolyString fullName) { this.fullName = fullName; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; if (!super.equals(o)) return false; RUser rUser = (RUser) o; if (name != null ? !name.equals(rUser.name) : rUser.name != null) return false; if (additionalName != null ? !additionalName.equals(rUser.additionalName) : rUser.additionalName != null) return false; if (emailAddress != null ? !emailAddress.equals(rUser.emailAddress) : rUser.emailAddress != null) return false; if (employeeNumber != null ? !employeeNumber.equals(rUser.employeeNumber) : rUser.employeeNumber != null) return false; if (employeeType != null ? !employeeType.equals(rUser.employeeType) : rUser.employeeType != null) return false; if (familyName != null ? !familyName.equals(rUser.familyName) : rUser.familyName != null) return false; if (fullName != null ? !fullName.equals(rUser.fullName) : rUser.fullName != null) return false; if (givenName != null ? !givenName.equals(rUser.givenName) : rUser.givenName != null) return false; if (honorificPrefix != null ? !honorificPrefix.equals(rUser.honorificPrefix) : rUser.honorificPrefix != null) return false; if (honorificSuffix != null ? !honorificSuffix.equals(rUser.honorificSuffix) : rUser.honorificSuffix != null) return false; if (localityUser != null ? !localityUser.equals(rUser.localityUser) : rUser.localityUser != null) return false; if (organizationalUnit != null ? !organizationalUnit.equals(rUser.organizationalUnit) : rUser.organizationalUnit != null) return false; if (telephoneNumber != null ? !telephoneNumber.equals(rUser.telephoneNumber) : rUser.telephoneNumber != null) return false; if (locale != null ? !locale.equals(rUser.locale) : rUser.locale != null) return false; if (title != null ? !title.equals(rUser.title) : rUser.title != null) return false; if (nickName != null ? !nickName.equals(rUser.nickName) : rUser.nickName != null) return false; if (preferredLanguage != null ? !preferredLanguage.equals(rUser.preferredLanguage) : rUser.preferredLanguage != null) return false; if (timezone != null ? !timezone.equals(rUser.timezone) : rUser.timezone != null) return false; if (costCenter != null ? !costCenter.equals(rUser.costCenter) : rUser.costCenter != null) return false; if (organization != null ? !organization.equals(rUser.organization) : rUser.organization != null) return false; if (status != rUser.status) return false; return true; } @Override public int hashCode() { int result = super.hashCode(); result = 31 * result + (name != null ? name.hashCode() : 0); result = 31 * result + (fullName != null ? fullName.hashCode() : 0); result = 31 * result + (givenName != null ? givenName.hashCode() : 0); result = 31 * result + (familyName != null ? familyName.hashCode() : 0); result = 31 * result + (honorificPrefix != null ? honorificPrefix.hashCode() : 0); result = 31 * result + (honorificSuffix != null ? honorificSuffix.hashCode() : 0); result = 31 * result + (employeeNumber != null ? employeeNumber.hashCode() : 0); result = 31 * result + (localityUser != null ? localityUser.hashCode() : 0); result = 31 * result + (costCenter != null ? costCenter.hashCode() : 0); result = 31 * result + (locale != null ? locale.hashCode() : 0); result = 31 * result + (title != null ? title.hashCode() : 0); result = 31 * result + (nickName != null ? nickName.hashCode() : 0); result = 31 * result + (preferredLanguage != null ? preferredLanguage.hashCode() : 0); result = 31 * result + (timezone != null ? timezone.hashCode() : 0); result = 31 * result + (status != null ? status.hashCode() : 0); return result; } public static void copyFromJAXB(UserType jaxb, RUser repo, PrismContext prismContext) throws DtoTranslationException { RFocus.copyFromJAXB(jaxb, repo, prismContext); repo.setName(RPolyString.copyFromJAXB(jaxb.getName())); repo.setFullName(RPolyString.copyFromJAXB(jaxb.getFullName())); repo.setGivenName(RPolyString.copyFromJAXB(jaxb.getGivenName())); repo.setFamilyName(RPolyString.copyFromJAXB(jaxb.getFamilyName())); repo.setHonorificPrefix(RPolyString.copyFromJAXB(jaxb.getHonorificPrefix())); repo.setHonorificSuffix(RPolyString.copyFromJAXB(jaxb.getHonorificSuffix())); repo.setEmployeeNumber(jaxb.getEmployeeNumber()); repo.setLocalityUser(RPolyString.copyFromJAXB(jaxb.getLocality())); repo.setAdditionalName(RPolyString.copyFromJAXB(jaxb.getAdditionalName())); repo.setEmailAddress(jaxb.getEmailAddress()); repo.setTelephoneNumber(jaxb.getTelephoneNumber()); repo.setCostCenter(jaxb.getCostCenter()); repo.setLocale(jaxb.getLocale()); repo.setTimezone(jaxb.getTimezone()); repo.setPreferredLanguage(jaxb.getPreferredLanguage()); repo.setTitle(RPolyString.copyFromJAXB(jaxb.getTitle())); repo.setNickName(RPolyString.copyFromJAXB(jaxb.getNickName())); ItemDefinition def = jaxb.asPrismObject().getDefinition(); RUtil.copyResultFromJAXB(def, jaxb.F_RESULT, jaxb.getResult(), repo, prismContext); //sets repo.setEmployeeType(RUtil.listToSet(jaxb.getEmployeeType())); repo.setOrganizationalUnit(RUtil.listPolyToSet(jaxb.getOrganizationalUnit())); repo.setOrganization(RUtil.listPolyToSet(jaxb.getOrganization())); if (jaxb.getJpegPhoto() != null) { RUserPhoto photo = new RUserPhoto(); photo.setOwner(repo); photo.setPhoto(jaxb.getJpegPhoto()); repo.getJpegPhoto().add(photo); repo.setHasPhoto(true); } } @Override public UserType toJAXB(PrismContext prismContext, Collection<SelectorOptions<GetOperationOptions>> options) throws DtoTranslationException { UserType object = new UserType(); RUtil.revive(object, prismContext); RUser.copyToJAXB(this, object, prismContext, options); return object; } }
/** * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright 2012-2015 the original author or authors. */ package org.assertj.core.api; import java.util.Comparator; import org.assertj.core.data.Index; import org.assertj.core.internal.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.ShortArrays; import org.assertj.core.util.VisibleForTesting; public abstract class AbstractShortArrayAssert<S extends AbstractShortArrayAssert<S>> extends AbstractArrayAssert<S, short[], Short> { @VisibleForTesting protected ShortArrays arrays = ShortArrays.instance(); public AbstractShortArrayAssert(short[] actual, Class<?> selfType) { super(actual, selfType); } /** {@inheritDoc} */ @Override public void isNullOrEmpty() { arrays.assertNullOrEmpty(info, actual); } /** {@inheritDoc} */ @Override public void isEmpty() { arrays.assertEmpty(info, actual); } /** {@inheritDoc} */ @Override public S isNotEmpty() { arrays.assertNotEmpty(info, actual); return myself; } /** {@inheritDoc} */ @Override public S hasSize(int expected) { arrays.assertHasSize(info, actual, expected); return myself; } /** {@inheritDoc} */ @Override public S hasSameSizeAs(Iterable<?> other) { arrays.assertHasSameSizeAs(info, actual, other); return myself; } /** * Verifies that the actual array contains the given values, in any order. * * @param values the given values. * @return {@code this} assertion object. * @throws NullPointerException if the given argument is {@code null}. * @throws IllegalArgumentException if the given argument is an empty array. * @throws AssertionError if the actual array is {@code null}. * @throws AssertionError if the actual array does not contain the given values. */ public S contains(short... values) { arrays.assertContains(info, actual, values); return myself; } /** * Verifies that the actual array contains only the given values and nothing else, in any order. * * @param values the given values. * @return {@code this} assertion object. * @throws NullPointerException if the given argument is {@code null}. * @throws IllegalArgumentException if the given argument is an empty array. * @throws AssertionError if the actual array is {@code null}. * @throws AssertionError if the actual array does not contain the given values, i.e. the actual array contains some * or none of the given values, or the actual array contains more values than the given ones. */ public S containsOnly(short... values) { arrays.assertContainsOnly(info, actual, values); return myself; } /** * Verifies that the actual array contains the given values only once. * <p> * Examples : * * <pre><code class='java'> * // assertion will pass * assertThat(new short[] { 1, 2, 3 }).containsOnlyOnce(1, 2); * * // assertions will fail * assertThat(new short[] { 1, 2, 1 }).containsOnlyOnce(1); * assertThat(new short[] { 1, 2, 3 }).containsOnlyOnce(4); * assertThat(new short[] { 1, 2, 3, 3 }).containsOnlyOnce(0, 1, 2, 3, 4, 5); * </code></pre> * * @param values the given values. * @return {@code this} assertion object. * @throws NullPointerException if the given argument is {@code null}. * @throws IllegalArgumentException if the given argument is an empty array. * @throws AssertionError if the actual array is {@code null}. * @throws AssertionError if the actual group does not contain the given values, i.e. the actual group contains some * or none of the given values, or the actual group contains more than once these values. */ public S containsOnlyOnce(short... values) { arrays.assertContainsOnlyOnce(info, actual, values); return myself; } /** * Verifies that the actual array contains the given sequence, without any other values between them. * <p> * Example: * * <pre><code class='java'> * // assertion will pass * assertThat(new short[] { 1, 2, 3 }).containsSequence(1, 2); * * // assertion will fail * assertThat(new short[] { 1, 2, 3 }).containsSequence(1, 3); * assertThat(new short[] { 1, 2, 3 }).containsSequence(2, 1); * </code></pre> * * </p> * * @param sequence the sequence of values to look for. * @return myself assertion object. * @throws AssertionError if the actual array is {@code null}. * @throws AssertionError if the given array is {@code null}. * @throws AssertionError if the actual array does not contain the given sequence. */ public S containsSequence(short... sequence) { arrays.assertContainsSequence(info, actual, sequence); return myself; } /** * Verifies that the actual array contains the given subsequence (possibly with other values between them). * <p> * Example: * * <pre><code class='java'> * // assertion will pass * assertThat(new short[] { 1, 2, 3 }).containsSubsequence(1, 2); * assertThat(new short[] { 1, 2, 3 }).containsSubsequence(1, 3); * * // assertion will fail * assertThat(new short[] { 1, 2, 3 }).containsSubsequence(2, 1); * </code></pre> * * </p> * * @param subsequence the subsequence of values to look for. * @return myself assertion object. * @throws AssertionError if the actual array is {@code null}. * @throws AssertionError if the given array is {@code null}. * @throws AssertionError if the actual array does not contain the given subsequence. */ public S containsSubsequence(short... subsequence) { arrays.assertContainsSubsequence(info, actual, subsequence); return myself; } /** * Verifies that the actual array contains the given value at the given index. * * @param value the value to look for. * @param index the index where the value should be stored in the actual array. * @return myself assertion object. * @throws AssertionError if the actual array is {@code null} or empty. * @throws NullPointerException if the given {@code Index} is {@code null}. * @throws IndexOutOfBoundsException if the value of the given {@code Index} is equal to or greater than the size of * the actual array. * @throws AssertionError if the actual array does not contain the given value at the given index. */ public S contains(short value, Index index) { arrays.assertContains(info, actual, value, index); return myself; } /** * Verifies that the actual array does not contain the given values. * * @param values the given values. * @return {@code this} assertion object. * @throws NullPointerException if the given argument is {@code null}. * @throws IllegalArgumentException if the given argument is an empty array. * @throws AssertionError if the actual array is {@code null}. * @throws AssertionError if the actual array contains any of the given values. */ public S doesNotContain(short... values) { arrays.assertDoesNotContain(info, actual, values); return myself; } /** * Verifies that the actual array does not contain the given value at the given index. * * @param value the value to look for. * @param index the index where the value should be stored in the actual array. * @return myself assertion object. * @throws AssertionError if the actual array is {@code null}. * @throws NullPointerException if the given {@code Index} is {@code null}. * @throws AssertionError if the actual array contains the given value at the given index. */ public S doesNotContain(short value, Index index) { arrays.assertDoesNotContain(info, actual, value, index); return myself; } /** * Verifies that the actual array does not contain duplicates. * * @return {@code this} assertion object. * @throws AssertionError if the actual array is {@code null}. * @throws AssertionError if the actual array contains duplicates. */ public S doesNotHaveDuplicates() { arrays.assertDoesNotHaveDuplicates(info, actual); return myself; } /** * Verifies that the actual array starts with the given sequence of values, without any other values between them. * Similar to <code>{@link #containsSequence(short...)}</code>, but it also verifies that the first element in the * sequence is also first element of the actual array. * * @param sequence the sequence of values to look for. * @return myself assertion object. * @throws NullPointerException if the given argument is {@code null}. * @throws IllegalArgumentException if the given argument is an empty array. * @throws AssertionError if the actual array is {@code null}. * @throws AssertionError if the actual array does not start with the given sequence. */ public S startsWith(short... sequence) { arrays.assertStartsWith(info, actual, sequence); return myself; } /** * Verifies that the actual array ends with the given sequence of values, without any other values between them. * Similar to <code>{@link #containsSequence(short...)}</code>, but it also verifies that the last element in the * sequence is also last element of the actual array. * * @param sequence the sequence of values to look for. * @return myself assertion object. * @throws NullPointerException if the given argument is {@code null}. * @throws IllegalArgumentException if the given argument is an empty array. * @throws AssertionError if the actual array is {@code null}. * @throws AssertionError if the actual array does not end with the given sequence. */ public S endsWith(short... sequence) { arrays.assertEndsWith(info, actual, sequence); return myself; } /** {@inheritDoc} */ @Override public S isSorted() { arrays.assertIsSorted(info, actual); return myself; } /** {@inheritDoc} */ @Override public S isSortedAccordingTo(Comparator<? super Short> comparator) { arrays.assertIsSortedAccordingToComparator(info, actual, comparator); return myself; } /** {@inheritDoc} */ @Override public S usingElementComparator(Comparator<? super Short> customComparator) { this.arrays = new ShortArrays(new ComparatorBasedComparisonStrategy(customComparator)); return myself; } /** {@inheritDoc} */ @Override public S usingDefaultElementComparator() { this.arrays = ShortArrays.instance(); return myself; } /** * Verifies that the actual group contains only the given values and nothing else, <b>in order</b>. * <p> * Example : * * <pre><code class='java'> * short[] shorts = { 1, 2, 3 }; * * // assertion will pass * assertThat(shorts).containsExactly(1, 2, 3); * * // assertion will fail as actual and expected orders differ. * assertThat(shorts).containsExactly(2, 1, 3); * </code></pre> * * @param values the given values. * @return {@code this} assertion object. * @throws NullPointerException if the given argument is {@code null}. * @throws AssertionError if the actual group is {@code null}. * @throws AssertionError if the actual group does not contain the given values with same order, i.e. the actual group * contains some or none of the given values, or the actual group contains more values than the given ones * or values are the same but the order is not. */ public S containsExactly(short... values) { objects.assertEqual(info, actual, values); return myself; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zookeeper.server.quorum; import java.io.PrintWriter; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import org.apache.zookeeper.KeeperException.SessionExpiredException; import org.apache.zookeeper.KeeperException.SessionMovedException; import org.apache.zookeeper.KeeperException.UnknownSessionException; import org.apache.zookeeper.server.SessionTrackerImpl; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The learner session tracker is used by learners (followers and observers) to * track zookeeper sessions which may or may not be echoed to the leader. When * a new session is created it is saved locally in a wrapped * LocalSessionTracker. It can subsequently be upgraded to a global session * as required. If an upgrade is requested the session is removed from local * collections while keeping the same session ID. It is up to the caller to * queue a session creation request for the leader. * A secondary function of the learner session tracker is to remember sessions * which have been touched in this service. This information is passed along * to the leader with a ping. */ public class LearnerSessionTracker extends UpgradeableSessionTracker { private static final Logger LOG = LoggerFactory.getLogger(LearnerSessionTracker.class); private final SessionExpirer expirer; // Touch table for the global sessions private final AtomicReference<Map<Long, Integer>> touchTable = new AtomicReference<Map<Long, Integer>>(); private final long serverId; private final AtomicLong nextSessionId = new AtomicLong(); private final boolean localSessionsEnabled; private final ConcurrentMap<Long, Integer> globalSessionsWithTimeouts; public LearnerSessionTracker(SessionExpirer expirer, ConcurrentMap<Long, Integer> sessionsWithTimeouts, int tickTime, long id, boolean localSessionsEnabled) { this.expirer = expirer; this.touchTable.set(new ConcurrentHashMap<Long, Integer>()); this.globalSessionsWithTimeouts = sessionsWithTimeouts; this.serverId = id; nextSessionId.set(SessionTrackerImpl.initializeNextSession(serverId)); this.localSessionsEnabled = localSessionsEnabled; if (this.localSessionsEnabled) { createLocalSessionTracker(expirer, tickTime, id); } } public void removeSession(long sessionId) { if (localSessionTracker != null) { localSessionTracker.removeSession(sessionId); } globalSessionsWithTimeouts.remove(sessionId); touchTable.get().remove(sessionId); } public void start() { if (localSessionTracker != null) { localSessionTracker.start(); } } public void shutdown() { if (localSessionTracker != null) { localSessionTracker.shutdown(); } } public boolean isGlobalSession(long sessionId) { return globalSessionsWithTimeouts.containsKey(sessionId); } public boolean addGlobalSession(long sessionId, int sessionTimeout) { boolean added = globalSessionsWithTimeouts.put(sessionId, sessionTimeout) == null; if (localSessionsEnabled && added) { // Only do extra logging so we know what kind of session this is // if we're supporting both kinds of sessions LOG.info("Adding global session 0x" + Long.toHexString(sessionId)); } touchTable.get().put(sessionId, sessionTimeout); return added; } public boolean addSession(long sessionId, int sessionTimeout) { boolean added; if (localSessionsEnabled && !isGlobalSession(sessionId)) { added = localSessionTracker.addSession(sessionId, sessionTimeout); // Check for race condition with session upgrading if (isGlobalSession(sessionId)) { added = false; localSessionTracker.removeSession(sessionId); } else if (added) { LOG.info("Adding local session 0x" + Long.toHexString(sessionId)); } } else { added = addGlobalSession(sessionId, sessionTimeout); } return added; } public boolean touchSession(long sessionId, int sessionTimeout) { if (localSessionsEnabled) { if (localSessionTracker.touchSession(sessionId, sessionTimeout)) { return true; } if (!isGlobalSession(sessionId)) { return false; } } touchTable.get().put(sessionId, sessionTimeout); return true; } public Map<Long, Integer> snapshot() { return touchTable.getAndSet(new ConcurrentHashMap<Long, Integer>()); } public long createSession(int sessionTimeout) { if (localSessionsEnabled) { return localSessionTracker.createSession(sessionTimeout); } return nextSessionId.getAndIncrement(); } public void checkSession(long sessionId, Object owner) throws SessionExpiredException, SessionMovedException { if (localSessionTracker != null) { try { localSessionTracker.checkSession(sessionId, owner); return; } catch (UnknownSessionException e) { // Check whether it's a global session. We can ignore those // because they are handled at the leader, but if not, rethrow. // We check local session status first to avoid race condition // with session upgrading. if (!isGlobalSession(sessionId)) { throw new SessionExpiredException(); } } } } public void setOwner(long sessionId, Object owner) throws SessionExpiredException { if (localSessionTracker != null) { try { localSessionTracker.setOwner(sessionId, owner); return; } catch (SessionExpiredException e) { // Check whether it's a global session. We can ignore those // because they are handled at the leader, but if not, rethrow. // We check local session status first to avoid race condition // with session upgrading. if (!isGlobalSession(sessionId)) { throw e; } } } } public void dumpSessions(PrintWriter pwriter) { if (localSessionTracker != null) { pwriter.print("Local "); localSessionTracker.dumpSessions(pwriter); } pwriter.print("Global Sessions("); pwriter.print(globalSessionsWithTimeouts.size()); pwriter.println("):"); SortedSet<Long> sessionIds = new TreeSet<Long>( globalSessionsWithTimeouts.keySet()); for (long sessionId : sessionIds) { pwriter.print("0x"); pwriter.print(Long.toHexString(sessionId)); pwriter.print("\t"); pwriter.print(globalSessionsWithTimeouts.get(sessionId)); pwriter.println("ms"); } } public void setSessionClosing(long sessionId) { // Global sessions handled on the leader; this call is a no-op if // not tracked as a local session so safe to call in both cases. if (localSessionTracker != null) { localSessionTracker.setSessionClosing(sessionId); } } @Override public Map<Long, Set<Long>> getSessionExpiryMap() { return new HashMap<Long, Set<Long>>(); } }
/** * Copyright 2015 DuraSpace, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fcrepo.camel; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.slf4j.LoggerFactory.getLogger; import java.io.IOException; import java.io.InputStream; import java.net.URI; import org.apache.camel.Exchange; import org.apache.camel.Message; import org.apache.camel.converter.stream.CachedOutputStream; import org.apache.camel.impl.DefaultProducer; import org.apache.camel.util.ExchangeHelper; import org.apache.camel.util.IOHelper; import org.slf4j.Logger; import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.TransactionSystemException; import org.springframework.transaction.support.DefaultTransactionStatus; import org.springframework.transaction.support.TransactionCallbackWithoutResult; import org.springframework.transaction.support.TransactionTemplate; /** * The Fedora producer. * * @author Aaron Coburn * @since October 20, 2014 */ public class FcrepoProducer extends DefaultProducer { public static final String DEFAULT_CONTENT_TYPE = "application/rdf+xml"; private static final Logger LOGGER = getLogger(FcrepoProducer.class); private FcrepoEndpoint endpoint; private FcrepoClient client; private TransactionTemplate transactionTemplate; /** * Create a FcrepoProducer object * * @param endpoint the FcrepoEndpoint corresponding to the exchange. */ public FcrepoProducer(final FcrepoEndpoint endpoint) { super(endpoint); this.endpoint = endpoint; this.transactionTemplate = endpoint.createTransactionTemplate(); this.client = new FcrepoClient( endpoint.getAuthUsername(), endpoint.getAuthPassword(), endpoint.getAuthHost(), endpoint.getThrowExceptionOnFailure()); } /** * Define how message exchanges are processed. * * @param exchange the InOut message exchange * @throws FcrepoOperationFailedException when the underlying HTTP request results in an error */ @Override public void process(final Exchange exchange) throws FcrepoOperationFailedException { if (exchange.isTransacted()) { transactionTemplate.execute(new TransactionCallbackWithoutResult() { protected void doInTransactionWithoutResult(final TransactionStatus status) { final DefaultTransactionStatus st = (DefaultTransactionStatus)status; final FcrepoTransactionObject tx = (FcrepoTransactionObject)st.getTransaction(); try { doRequest(exchange, tx.getSessionId()); } catch (FcrepoOperationFailedException ex) { throw new TransactionSystemException( "Error executing fcrepo request in transaction: ", ex); } } }); } else { doRequest(exchange, null); } } private void doRequest(final Exchange exchange, final String transaction) throws FcrepoOperationFailedException { final Message in = exchange.getIn(); final HttpMethods method = getMethod(exchange); final String contentType = getContentType(exchange); final String accept = getAccept(exchange); final String url = getUrl(exchange, transaction); final String prefer = getPrefer(exchange); LOGGER.debug("Fcrepo Request [{}] with method [{}]", url, method); FcrepoResponse response; switch (method) { case PATCH: response = client.patch(getMetadataUri(url), in.getBody(InputStream.class)); exchange.getIn().setBody(extractResponseBodyAsStream(response.getBody(), exchange)); break; case PUT: response = client.put(URI.create(url), in.getBody(InputStream.class), contentType); exchange.getIn().setBody(extractResponseBodyAsStream(response.getBody(), exchange)); break; case POST: response = client.post(URI.create(url), in.getBody(InputStream.class), contentType); exchange.getIn().setBody(extractResponseBodyAsStream(response.getBody(), exchange)); break; case DELETE: response = client.delete(URI.create(url)); exchange.getIn().setBody(extractResponseBodyAsStream(response.getBody(), exchange)); break; case HEAD: response = client.head(URI.create(url)); exchange.getIn().setBody(null); break; case GET: default: if (endpoint.getFixity()) { response = client.get(URI.create(url + FcrepoConstants.FIXITY), accept, prefer); } else if (endpoint.getMetadata()) { response = client.get(getMetadataUri(url), accept, prefer); } else { response = client.get(URI.create(url), accept, prefer); } exchange.getIn().setBody(extractResponseBodyAsStream(response.getBody(), exchange)); } exchange.getIn().setHeader(Exchange.CONTENT_TYPE, response.getContentType()); exchange.getIn().setHeader(Exchange.HTTP_RESPONSE_CODE, response.getStatusCode()); } /** * Retrieve the resource location from a HEAD request. */ private URI getMetadataUri(final String url) throws FcrepoOperationFailedException { final FcrepoResponse headResponse = client.head(URI.create(url)); if (headResponse.getLocation() != null) { return headResponse.getLocation(); } else { return URI.create(url); } } /** * Given an exchange, determine which HTTP method to use. Basically, use GET unless the value of the * Exchange.HTTP_METHOD header is defined. Unlike the http4: component, the request does not use POST if there is * a message body defined. This is so in order to avoid inadvertant changes to the repository. * * @param exchange the incoming message exchange */ private HttpMethods getMethod(final Exchange exchange) { final HttpMethods method = exchange.getIn().getHeader(Exchange.HTTP_METHOD, HttpMethods.class); if (method == null) { return HttpMethods.GET; } else { return method; } } /** * Given an exchange, extract the contentType value for use with a Content-Type header. The order of preference is * so: 1) a contentType value set on the endpoint 2) a contentType value set on the Exchange.CONTENT_TYPE header * * @param exchange the incoming message exchange */ private String getContentType(final Exchange exchange) { final String contentTypeString = ExchangeHelper.getContentType(exchange); if (!isBlank(endpoint.getContentType())) { return endpoint.getContentType(); } else if (!isBlank(contentTypeString)) { return contentTypeString; } else { return null; } } /** * Given an exchange, extract the value for use with an Accept header. The order of preference is: * 1) whether a transform is being requested 2) an accept value is set on the endpoint 3) a value set on * the Exchange.ACCEPT_CONTENT_TYPE header 4) a value set on an "Accept" header 5) the endpoint * DEFAULT_CONTENT_TYPE (i.e. application/rdf+xml) * * @param exchange the incoming message exchange */ private String getAccept(final Exchange exchange) { final Message in = exchange.getIn(); final String fcrepoTransform = in.getHeader(FcrepoHeaders.FCREPO_TRANSFORM, String.class); final String acceptHeader = getAcceptHeader(exchange); if (!isBlank(endpoint.getTransform()) || !isBlank(fcrepoTransform)) { return "application/json"; } else if (!isBlank(endpoint.getAccept())) { return endpoint.getAccept(); } else if (!isBlank(acceptHeader)) { return acceptHeader; } else { return DEFAULT_CONTENT_TYPE; } } /** * Given an exchange, extract the value of an incoming Accept header. * * @param exchange the incoming message exchange */ private String getAcceptHeader(final Exchange exchange) { final Message in = exchange.getIn(); if (!isBlank(in.getHeader(Exchange.ACCEPT_CONTENT_TYPE, String.class))) { return in.getHeader(Exchange.ACCEPT_CONTENT_TYPE, String.class); } else if (!isBlank(in.getHeader("Accept", String.class))) { return in.getHeader("Accept", String.class); } else { return null; } } /** * The resource path can be set either by the Camel header (CamelFcrepoIdentifier) * or by fedora's jms headers (org.fcrepo.jms.identifier). This method extracts * a path from the appropriate header (the camel header overrides the jms header). * * @param exchange The camel exchange * @return String */ private String getPathFromHeaders(final Exchange exchange) { final Message in = exchange.getIn(); if (!isBlank(in.getHeader(FcrepoHeaders.FCREPO_IDENTIFIER, String.class))) { return in.getHeader(FcrepoHeaders.FCREPO_IDENTIFIER, String.class); } else if (!isBlank(in.getHeader(JmsHeaders.IDENTIFIER, String.class))) { return in.getHeader(JmsHeaders.IDENTIFIER, String.class); } else { return ""; } } /** * Extract a transformation path from the exchange if the appropriate headers * are set. This will format the URL to use the transform program defined * in the CamelFcrepoTransform header or the transform uri option (in that * order of precidence). * * @param exchange the camel message exchange * @return String */ private String getTransformPath(final Exchange exchange) { final Message in = exchange.getIn(); final HttpMethods method = getMethod(exchange); final String transformProgram = in.getHeader(FcrepoHeaders.FCREPO_TRANSFORM, String.class); final String fcrTransform = "/fcr:transform"; if (!isBlank(endpoint.getTransform()) || !isBlank(transformProgram)) { if (method == HttpMethods.POST) { return fcrTransform; } else if (method == HttpMethods.GET) { if (!isBlank(transformProgram)) { return fcrTransform + "/" + transformProgram; } else { return fcrTransform + "/" + endpoint.getTransform(); } } } return ""; } /** * Given an exchange, extract the fully qualified URL for a fedora resource. By default, this will use the entire * path set on the endpoint. If either of the following headers are defined, they will be appended to that path in * this order of preference: 1) FCREPO_IDENTIFIER 2) org.fcrepo.jms.identifier * * @param exchange the incoming message exchange */ private String getUrl(final Exchange exchange, final String transaction) { final StringBuilder url = new StringBuilder(); final String transformPath = getTransformPath(exchange); final HttpMethods method = getMethod(exchange); url.append(endpoint.getBaseUrlWithScheme()); if (transaction != null) { url.append("/"); url.append(transaction); } url.append(getPathFromHeaders(exchange)); if (!isBlank(transformPath)) { url.append(transformPath); } else if (method == HttpMethods.DELETE && endpoint.getTombstone()) { url.append("/fcr:tombstone"); } return url.toString(); } /** * Given an exchange, extract the Prefer headers, if any. * * @param exchange the incoming message exchange */ private String getPrefer(final Exchange exchange) { final Message in = exchange.getIn(); if (getMethod(exchange) == HttpMethods.GET) { if (!isBlank(in.getHeader(FcrepoHeaders.FCREPO_PREFER, String.class))) { return in.getHeader(FcrepoHeaders.FCREPO_PREFER, String.class); } else { return buildPreferHeader(endpoint.getPreferInclude(), endpoint.getPreferOmit()); } } else { return null; } } /** * Build the prefer header from include and/or omit endpoint values */ private String buildPreferHeader(final String include, final String omit) { if (isBlank(include) && isBlank(omit)) { return null; } else { final StringBuilder prefer = new StringBuilder("return=representation;"); if (!isBlank(include)) { prefer.append(" include=\"" + addPreferNamespace(include) + "\";"); } if (!isBlank(omit)) { prefer.append(" omit=\"" + addPreferNamespace(omit) + "\";"); } return prefer.toString(); } } /** * Add the appropriate namespace to the prefer header in case the * short form was supplied. */ private String addPreferNamespace(final String property) { final String prefer = RdfNamespaces.PREFER_PROPERTIES.get(property); if (!isBlank(prefer)) { return prefer; } else { return property; } } private static Object extractResponseBodyAsStream(final InputStream is, final Exchange exchange) { // As httpclient is using a AutoCloseInputStream, it will be closed when the connection is closed // we need to cache the stream for it. if (is == null) { return null; } // convert the input stream to StreamCache if the stream cache is not disabled if (exchange.getProperty(Exchange.DISABLE_HTTP_STREAM_CACHE, Boolean.FALSE, Boolean.class)) { return is; } else { try (final CachedOutputStream cos = new CachedOutputStream(exchange)) { // This CachedOutputStream will not be closed when the exchange is onCompletion IOHelper.copyAndCloseInput(is, cos); // When the InputStream is closed, the CachedOutputStream will be closed return cos.newStreamCache(); } catch (IOException ex) { LOGGER.debug("Error extracting body from http request", ex); return null; } } } }
/*- * Copyright (c) 2015 Mudfish Networks <contact@loxch.com> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. */ package net.threerocks.go.demo; import android.app.Notification; import android.app.PendingIntent; import android.app.Service; import android.content.Intent; import android.content.SharedPreferences; import android.os.IBinder; import android.preference.PreferenceManager; import android.support.v4.app.NotificationCompat; import android.util.Log; import net.mudfish.x.MudfishX; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileReader; import java.io.FileWriter; import java.util.Random; public class MudfishXService extends Service { private static String TAG = "MudfishXService"; @Override public void onCreate() { super.onCreate(); } @Override public void onDestroy() { super.onDestroy(); } @Override public IBinder onBind(Intent intent) { // Used only in case of bound services. return null; } public static int unsignedToBytes(byte b) { return b & 0xFF; } private String randomIPAddress() { Random rand = new Random(); byte[] ipAddr = new byte[4]; rand.nextBytes(ipAddr); ipAddr[0] = 10; ipAddr[1] = (byte)(ipAddr[1] & (byte)0xff); if (ipAddr[1] == (byte)255) ipAddr[1] = (byte)254; ipAddr[2] = (byte)(ipAddr[2] & (byte)0xff); ipAddr[3] = (byte)(ipAddr[3] & (byte)0xff); StringBuilder sb = new StringBuilder(18); for(byte b : ipAddr) { if(sb.length() > 0) sb.append("."); sb.append(String.format("%d", unsignedToBytes(b))); } return sb.toString(); } private String randomMACAddress() { Random rand = new Random(); byte[] macAddr = new byte[6]; rand.nextBytes(macAddr); macAddr[0] = (byte)(macAddr[0] & (byte)254); StringBuilder sb = new StringBuilder(18); for(byte b : macAddr){ if(sb.length() > 0) sb.append(":"); sb.append(String.format("%02x", b)); } return sb.toString(); } protected String buildConfigFile(String serverPublicIp, String serverLocalIp) { String localip = randomIPAddress(); String macaddr = randomMACAddress(); String config = "" + "interface eth0 { \n" + " macaddr " + macaddr + " \n" + " mtu 1500 \n" + " ip " + localip + " \n" + " netmask 255.0.0.0 \n" + "} -connect \"" + serverPublicIp + ":20007\" \n" + " \n" + "zlib_compress zc0 \n" + " \n" + "tcprelay_client http0 { \n" + " backend { \n" + " connect " + serverLocalIp + ":80 \n" + " } \n" + "} -protocol \"TCP\" -listen \"127.0.0.1:18080\" \n" + " \n" + "tcprelay_client http1 { \n" + " backend { \n" + " connect " + serverLocalIp + ":81 \n" + " bandwidth_scaling { \n" + " zlib_compress zc0 \n" + " } \n" + " } \n" + "} -protocol \"TCP\" -listen \"127.0.0.1:18081\" \n" + " \n" + "dedup_client dc2 { \n" + " connect " + serverLocalIp + ":8082 \n" + "} \n" + " \n" + "tcprelay_client http2 { \n" + " backend { \n" + " connect " + serverLocalIp + ":82 \n" + " bandwidth_scaling { \n" + " dedup_client dc2 \n" + " } \n" + " } \n" + "} -protocol \"TCP\" -listen \"127.0.0.1:18082\" \n" + " \n" + "dedup_client dc3 { \n" + " connect " + serverLocalIp + ":8083 \n" + "} \n" + " \n" + "tcprelay_client http3 { \n" + " backend { \n" + " connect " + serverLocalIp + ":83 \n" + " bandwidth_scaling { \n" + " zlib_compress zc0 \n" + " dedup_client dc3 \n" + " } \n" + " } \n" + "} -protocol \"TCP\" -listen \"127.0.0.1:18083\" \n"; String path = null; try { BufferedWriter bw; File temp; temp = File.createTempFile("mudfish.x", ".conf"); temp.deleteOnExit(); bw = new BufferedWriter(new FileWriter(temp)); bw.write(config); bw.close(); path = temp.getAbsolutePath(); Log.i(TAG, "Config file : " + path); } catch (Exception e) { e.printStackTrace(); } finally { } return (path); } private void loadMudfishXConfig() { File cacheDir; MainApplication app; MudfishX x; SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this); String configFile, server_id; String serverPublicIp; int error; cacheDir = this.getCacheDir(); server_id = prefs.getString("pref_targets_testserver", "1"); if (server_id.equals("1")) serverPublicIp = "50.112.240.91"; else if (server_id.equals("2")) serverPublicIp = "107.170.249.208"; else serverPublicIp = "107.170.249.208"; configFile = buildConfigFile(serverPublicIp, "10.255.0.1"); app = (MainApplication)getApplication(); x = app.mMudfishX; error = x.loadConfig(configFile); if (error != 0) Log.e(TAG, "mMudfishX.loadConfig() failed: " + x.strerror()); } @Override public int onStartCommand(Intent intent, int flags, int startId) { if (intent == null) return START_NOT_STICKY; if (intent.getAction().equals("start")) { loadMudfishXConfig(); } return START_STICKY; } }
package org.apache.maven.plugin.gpg; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.apache.maven.artifact.Artifact; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugins.annotations.Component; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.project.MavenProject; import org.apache.maven.project.MavenProjectHelper; import org.codehaus.plexus.util.FileUtils; import org.codehaus.plexus.util.SelectorUtils; /** * Sign project artifact, the POM, and attached artifacts with GnuPG for deployment. * * @author Jason van Zyl * @author Jason Dillon * @author Daniel Kulp */ @Mojo( name = "sign", defaultPhase = LifecyclePhase.VERIFY, threadSafe = true ) public class GpgSignAttachedMojo extends AbstractGpgMojo { private static final String DEFAULT_EXCLUDES[] = new String[]{ "**/*.md5", "**/*.sha1", "**/*.asc" }; /** * Skip doing the gpg signing. */ @Parameter( property = "gpg.skip", defaultValue = "false" ) private boolean skip; /** * A list of files to exclude from being signed. Can contain Ant-style wildcards and double wildcards. The default * excludes are <code>**&#47;*.md5 **&#47;*.sha1 **&#47;*.asc</code>. * * @since 1.0-alpha-4 */ @Parameter private String[] excludes; /** * The directory where to store signature files. * * @since 1.0-alpha-4 */ @Parameter( defaultValue = "${project.build.directory}/gpg", alias = "outputDirectory" ) private File ascDirectory; /** * The maven project. */ @Component protected MavenProject project; /** * Maven ProjectHelper */ @Component private MavenProjectHelper projectHelper; public void execute() throws MojoExecutionException, MojoFailureException { if ( skip ) { // We're skipping the signing stuff return; } if ( excludes == null || excludes.length == 0 ) { excludes = DEFAULT_EXCLUDES; } String newExcludes[] = new String[excludes.length]; for ( int i = 0; i < excludes.length; i++ ) { String pattern; pattern = excludes[i].trim().replace( '/', File.separatorChar ).replace( '\\', File.separatorChar ); if ( pattern.endsWith( File.separator ) ) { pattern += "**"; } newExcludes[i] = pattern; } excludes = newExcludes; GpgSigner signer = newSigner( project ); // ---------------------------------------------------------------------------- // What we need to generateSignatureForArtifact here // ---------------------------------------------------------------------------- signer.setOutputDirectory( ascDirectory ); signer.setBuildDirectory( new File( project.getBuild().getDirectory() ) ); signer.setBaseDirectory( project.getBasedir() ); List signingBundles = new ArrayList(); if ( !"pom".equals( project.getPackaging() ) ) { // ---------------------------------------------------------------------------- // Project artifact // ---------------------------------------------------------------------------- Artifact artifact = project.getArtifact(); File file = artifact.getFile(); if ( file != null && file.isFile() ) { getLog().debug( "Generating signature for " + file ); File projectArtifactSignature = signer.generateSignatureForArtifact( file ); if ( projectArtifactSignature != null ) { signingBundles.add( new SigningBundle( artifact.getArtifactHandler().getExtension(), projectArtifactSignature ) ); } } else if ( project.getAttachedArtifacts().isEmpty() ) { throw new MojoFailureException( "The project artifact has not been assembled yet. " + "Please do not invoke this goal before the lifecycle phase \"package\"." ); } else { getLog().debug( "Main artifact not assembled, skipping signature generation" ); } } // ---------------------------------------------------------------------------- // POM // ---------------------------------------------------------------------------- File pomToSign = new File( project.getBuild().getDirectory(), project.getBuild().getFinalName() + ".pom" ); try { FileUtils.copyFile( project.getFile(), pomToSign ); } catch ( IOException e ) { throw new MojoExecutionException( "Error copying POM for signing.", e ); } getLog().debug( "Generating signature for " + pomToSign ); File pomSignature = signer.generateSignatureForArtifact( pomToSign ); if ( pomSignature != null ) { signingBundles.add( new SigningBundle( "pom", pomSignature ) ); } // ---------------------------------------------------------------------------- // Attached artifacts // ---------------------------------------------------------------------------- for ( Iterator i = project.getAttachedArtifacts().iterator(); i.hasNext(); ) { Artifact artifact = (Artifact) i.next(); File file = artifact.getFile(); getLog().debug( "Generating signature for " + file ); File signature = signer.generateSignatureForArtifact( file ); if ( signature != null ) { signingBundles.add( new SigningBundle( artifact.getArtifactHandler().getExtension(), artifact.getClassifier(), signature ) ); } } // ---------------------------------------------------------------------------- // Attach all the signatures // ---------------------------------------------------------------------------- for ( Iterator i = signingBundles.iterator(); i.hasNext(); ) { SigningBundle bundle = (SigningBundle) i.next(); projectHelper.attachArtifact( project, bundle.getExtension() + GpgSigner.SIGNATURE_EXTENSION, bundle.getClassifier(), bundle.getSignature() ); } } /** * Tests whether or not a name matches against at least one exclude pattern. * * @param name The name to match. Must not be <code>null</code>. * @return <code>true</code> when the name matches against at least one exclude pattern, or <code>false</code> * otherwise. */ protected boolean isExcluded( String name ) { for ( int i = 0; i < excludes.length; i++ ) { if ( SelectorUtils.matchPath( excludes[i], name ) ) { return true; } } return false; } }
/* * Copyright 2017-2021 Crown Copyright * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.gchq.gaffer.sparkaccumulo.operation.handler.dataframe; import org.apache.spark.rdd.RDD; import org.apache.spark.sql.Row; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.sources.EqualTo; import org.apache.spark.sql.sources.Filter; import org.apache.spark.sql.sources.GreaterThan; import org.junit.jupiter.api.Test; import uk.gov.gchq.gaffer.accumulostore.AccumuloProperties; import uk.gov.gchq.gaffer.accumulostore.AccumuloStore; import uk.gov.gchq.gaffer.accumulostore.SingleUseMiniAccumuloStore; import uk.gov.gchq.gaffer.commonutil.StreamUtil; import uk.gov.gchq.gaffer.commonutil.stream.Streams; import uk.gov.gchq.gaffer.data.element.Edge; import uk.gov.gchq.gaffer.data.element.Element; import uk.gov.gchq.gaffer.data.element.Entity; import uk.gov.gchq.gaffer.data.elementdefinition.view.View; import uk.gov.gchq.gaffer.data.elementdefinition.view.ViewElementDefinition; import uk.gov.gchq.gaffer.operation.OperationException; import uk.gov.gchq.gaffer.operation.impl.add.AddElements; import uk.gov.gchq.gaffer.spark.SparkContextUtil; import uk.gov.gchq.gaffer.spark.SparkSessionProvider; import uk.gov.gchq.gaffer.spark.operation.dataframe.ConvertElementToRow; import uk.gov.gchq.gaffer.spark.operation.dataframe.converter.schema.SchemaToStructTypeConverter; import uk.gov.gchq.gaffer.store.Context; import uk.gov.gchq.gaffer.store.Store; import uk.gov.gchq.gaffer.store.StoreException; import uk.gov.gchq.gaffer.store.schema.Schema; import uk.gov.gchq.gaffer.user.User; import uk.gov.gchq.koryphe.impl.predicate.IsMoreThan; import uk.gov.gchq.koryphe.tuple.predicate.TupleAdaptedPredicate; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import java.util.function.Predicate; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; /** * Contains unit tests for {@link AccumuloStoreRelation}. */ public class AccumuloStoreRelationTest { private static final AccumuloProperties PROPERTIES = AccumuloProperties.loadStoreProperties(StreamUtil.storeProps(AccumuloStoreRelationTest.class)); @Test public void testBuildScanFullView() throws OperationException, StoreException { final Schema schema = getSchema(); final View view = getViewFromSchema(schema); testBuildScanWithView("testBuildScanFullView", view, e -> true); } @Test public void testBuildScanRestrictViewToOneGroup() throws OperationException, StoreException { final View view = new View.Builder() .edge(GetDataFrameOfElementsHandlerTest.EDGE_GROUP) .build(); final Predicate<Element> returnElement = (Element element) -> element.getGroup().equals(GetDataFrameOfElementsHandlerTest.EDGE_GROUP); testBuildScanWithView("testBuildScanRestrictViewToOneGroup", view, returnElement); } @Test public void testBuildScanRestrictViewByProperty() throws OperationException, StoreException { final List<TupleAdaptedPredicate<String, ?>> filters = new ArrayList<>(); filters.add(new TupleAdaptedPredicate<>(new IsMoreThan(5, false), new String[]{"property1"})); final View view = new View.Builder() .edge(GetDataFrameOfElementsHandlerTest.EDGE_GROUP, new ViewElementDefinition.Builder() .postAggregationFilterFunctions(filters) .build()) .build(); final Predicate<Element> returnElement = (Element element) -> element.getGroup().equals(GetDataFrameOfElementsHandlerTest.EDGE_GROUP) && (Integer) element.getProperty("property1") > 5; testBuildScanWithView("testBuildScanRestrictViewByProperty", view, returnElement); } private void testBuildScanWithView(final String name, final View view, final Predicate<Element> returnElement) throws OperationException, StoreException { // Given final SparkSession sparkSession = SparkSessionProvider.getSparkSession(); final Schema schema = getSchema(); final AccumuloStore store = new SingleUseMiniAccumuloStore(); store.initialise("graphId", schema, PROPERTIES); addElements(store); // When final AccumuloStoreRelation relation = new AccumuloStoreRelation( SparkContextUtil.createContext(new User(), sparkSession), Collections.emptyList(), view, store, null); final RDD<Row> rdd = relation.buildScan(); final Row[] returnedElements = (Row[]) rdd.collect(); // Then // - Actual results are: final Set<Row> results = new HashSet<>(Arrays.asList(returnedElements)); // - Expected results are: final SchemaToStructTypeConverter schemaConverter = new SchemaToStructTypeConverter(schema, view, new ArrayList<>()); final ConvertElementToRow elementConverter = new ConvertElementToRow(schemaConverter.getUsedProperties(), schemaConverter.getPropertyNeedsConversion(), schemaConverter.getConverterByProperty()); final Set<Row> expectedRows = new HashSet<>(); Streams.toStream(getElements()) .filter(returnElement) .map(elementConverter::apply) .forEach(expectedRows::add); assertEquals(expectedRows, results); } @Test public void testBuildScanSpecifyColumnsFullView() throws OperationException, StoreException { final Schema schema = getSchema(); final View view = getViewFromSchema(schema); final String[] requiredColumns = new String[]{"property1"}; testBuildScanSpecifyColumnsWithView(view, requiredColumns, e -> true); } private void testBuildScanSpecifyColumnsWithView(final View view, final String[] requiredColumns, final Predicate<Element> returnElement) throws OperationException, StoreException { // Given final SparkSession sparkSession = SparkSessionProvider.getSparkSession(); final Schema schema = getSchema(); final AccumuloStore store = new SingleUseMiniAccumuloStore(); store.initialise("graphId", schema, PROPERTIES); addElements(store); // When final AccumuloStoreRelation relation = new AccumuloStoreRelation( SparkContextUtil.createContext(new User(), sparkSession), Collections.emptyList(), view, store, null); final RDD<Row> rdd = relation.buildScan(requiredColumns); final Row[] returnedElements = (Row[]) rdd.collect(); // Then // - Actual results are: final Set<Row> results = new HashSet<>(); for (int i = 0; i < returnedElements.length; i++) { results.add(returnedElements[i]); } // - Expected results are: final SchemaToStructTypeConverter schemaConverter = new SchemaToStructTypeConverter(schema, view, new ArrayList<>()); final ConvertElementToRow elementConverter = new ConvertElementToRow(new LinkedHashSet<>(Arrays.asList(requiredColumns)), schemaConverter.getPropertyNeedsConversion(), schemaConverter.getConverterByProperty()); final Set<Row> expectedRows = new HashSet<>(); Streams.toStream(getElements()) .filter(returnElement) .map(elementConverter::apply) .forEach(expectedRows::add); assertEquals(expectedRows, results); } @Test public void testBuildScanSpecifyColumnsAndFiltersFullView() throws OperationException, StoreException { final Schema schema = getSchema(); final View view = getViewFromSchema(schema); final String[] requiredColumns = new String[1]; requiredColumns[0] = "property1"; final Filter[] filters = new Filter[1]; filters[0] = new GreaterThan("property1", 4); final Predicate<Element> returnElement = (Element element) -> ((Integer) element.getProperty("property1")) > 4; testBuildScanSpecifyColumnsAndFiltersWithView(view, requiredColumns, filters, returnElement); } private void testBuildScanSpecifyColumnsAndFiltersWithView(final View view, final String[] requiredColumns, final Filter[] filters, final Predicate<Element> returnElement) throws OperationException, StoreException { // Given final SparkSession sparkSession = SparkSessionProvider.getSparkSession(); final Schema schema = getSchema(); final AccumuloStore store = new SingleUseMiniAccumuloStore(); store.initialise("graphId", schema, PROPERTIES); addElements(store); // When final AccumuloStoreRelation relation = new AccumuloStoreRelation( SparkContextUtil.createContext(new User(), sparkSession), Collections.emptyList(), view, store, null); final RDD<Row> rdd = relation.buildScan(requiredColumns, filters); final Row[] returnedElements = (Row[]) rdd.collect(); // Then // - Actual results are: final Set<Row> results = new HashSet<>(Arrays.asList(returnedElements)); // - Expected results are: final SchemaToStructTypeConverter schemaConverter = new SchemaToStructTypeConverter(schema, view, new ArrayList<>()); final ConvertElementToRow elementConverter = new ConvertElementToRow(new LinkedHashSet<>(Arrays.asList(requiredColumns)), schemaConverter.getPropertyNeedsConversion(), schemaConverter.getConverterByProperty()); final Set<Row> expectedRows = new HashSet<>(); Streams.toStream(getElements()) .filter(returnElement) .map(elementConverter::apply) .forEach(expectedRows::add); assertEquals(expectedRows, results); } @Test public void shouldReturnEmptyDataFrameWithNoResultsFromFilter() throws StoreException, OperationException { // Given final SparkSession sparkSession = SparkSessionProvider.getSparkSession(); final Schema schema = getSchema(); final View view = getViewFromSchema(schema); final AccumuloStore store = new SingleUseMiniAccumuloStore(); store.initialise("graphId", schema, PROPERTIES); addElements(store); final String[] requiredColumns = new String[1]; requiredColumns[0] = "property1"; final Filter[] filters = new Filter[1]; filters[0] = new EqualTo("group", "abc"); // When final AccumuloStoreRelation relation = new AccumuloStoreRelation( SparkContextUtil.createContext(new User(), sparkSession), Collections.emptyList(), view, store, null); final RDD<Row> rdd = relation.buildScan(requiredColumns, filters); // Then assertThat(rdd.isEmpty()).isTrue(); } private static Schema getSchema() { return Schema.fromJson( AccumuloStoreRelationTest.class.getResourceAsStream("/schema-DataFrame/elements.json"), AccumuloStoreRelationTest.class.getResourceAsStream("/schema-DataFrame/types.json"), AccumuloStoreRelationTest.class.getResourceAsStream("/schema-DataFrame/serialisation.json")); } private static View getViewFromSchema(final Schema schema) { return new View.Builder() .entities(schema.getEntityGroups()) .edges(schema.getEdgeGroups()) .build(); } private static void addElements(final Store store) throws OperationException { store.execute(new AddElements.Builder().input(getElements()).build(), new Context(new User())); } private static List<Element> getElements() { final List<Element> elements = new ArrayList<>(); for (int i = 0; i < 10; i++) { final Entity entity = new Entity.Builder() .group(GetDataFrameOfElementsHandlerTest.ENTITY_GROUP) .vertex("" + i) .property("columnQualifier", 1) .property("property1", i) .property("property2", 3.0F) .property("property3", 4.0D) .property("property4", i * 2L) .property("count", 6L) .build(); final Edge edge1 = new Edge.Builder() .group(GetDataFrameOfElementsHandlerTest.EDGE_GROUP) .source("" + i) .dest("B") .directed(true) .property("columnQualifier", 1) .property("property1", 2) .property("property2", 3.0F) .property("property3", 4.0D) .property("property4", 5L) .property("count", 100L) .build(); final Edge edge2 = new Edge.Builder() .group(GetDataFrameOfElementsHandlerTest.EDGE_GROUP) .source("" + i) .dest("C") .directed(true) .property("columnQualifier", 6) .property("property1", 7) .property("property2", 8.0F) .property("property3", 9.0D) .property("property4", 10L) .property("count", i * 200L) .build(); final Edge edge3 = new Edge.Builder() .group(GetDataFrameOfElementsHandlerTest.EDGE_GROUP2) .source("" + i) .dest("D") .directed(true) .property("property1", 1000) .build(); elements.add(edge1); elements.add(edge2); elements.add(edge3); elements.add(entity); } return elements; } }
package com.tapadoo.alerter.demo; import android.graphics.Color; import android.graphics.Typeface; import android.net.Uri; import android.os.Bundle; import androidx.appcompat.app.AppCompatActivity; import androidx.appcompat.widget.Toolbar; import android.view.Gravity; import android.view.View; import android.widget.Toast; import com.tapadoo.alerter.Alert; import com.tapadoo.alerter.Alerter; import com.tapadoo.alerter.OnHideAlertListener; import com.tapadoo.alerter.OnShowAlertListener; /** * Java Demo Activity to ensure backwards compatibility * * @author Kevin Murphy * @since 5/06/2018 */ public class JavaDemoActivity extends AppCompatActivity implements View.OnClickListener { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(com.tapadoo.alerter.demo.R.layout.activity_demo); Toolbar toolbar = findViewById(com.tapadoo.alerter.demo.R.id.toolbar); setSupportActionBar(toolbar); findViewById(com.tapadoo.alerter.demo.R.id.btnAlertDefault).setOnClickListener(this); findViewById(com.tapadoo.alerter.demo.R.id.btnAlertColoured).setOnClickListener(this); findViewById(com.tapadoo.alerter.demo.R.id.btnAlertCustomIcon).setOnClickListener(this); findViewById(com.tapadoo.alerter.demo.R.id.btnAlertTextOnly).setOnClickListener(this); findViewById(com.tapadoo.alerter.demo.R.id.btnAlertOnClick).setOnClickListener(this); findViewById(com.tapadoo.alerter.demo.R.id.btnAlertVerbose).setOnClickListener(this); findViewById(com.tapadoo.alerter.demo.R.id.btnAlertCallback).setOnClickListener(this); findViewById(com.tapadoo.alerter.demo.R.id.btnAlertInfiniteDuration).setOnClickListener(this); findViewById(com.tapadoo.alerter.demo.R.id.btnAlertWithProgress).setOnClickListener(this); findViewById(com.tapadoo.alerter.demo.R.id.btnAlertWithCustomFont).setOnClickListener(this); findViewById(com.tapadoo.alerter.demo.R.id.btnAlertSwipeToDismissEnabled).setOnClickListener(this); findViewById(com.tapadoo.alerter.demo.R.id.btnAlertSound).setOnClickListener(this); } @Override protected void onStart() { super.onStart(); getWindow().setBackgroundDrawableResource(android.R.color.white); } @Override public void onClick(View view) { int i = view.getId(); if (i == com.tapadoo.alerter.demo.R.id.btnAlertColoured) { showAlertColoured(); } else if (i == com.tapadoo.alerter.demo.R.id.btnAlertCustomIcon) { showAlertWithIcon(); } else if (i == com.tapadoo.alerter.demo.R.id.btnAlertTextOnly) { showAlertTextOnly(); } else if (i == com.tapadoo.alerter.demo.R.id.btnAlertOnClick) { showAlertWithOnClick(); } else if (i == com.tapadoo.alerter.demo.R.id.btnAlertVerbose) { showAlertVerbose(); } else if (i == com.tapadoo.alerter.demo.R.id.btnAlertCallback) { showAlertCallbacks(); } else if (i == com.tapadoo.alerter.demo.R.id.btnAlertInfiniteDuration) { showAlertInfiniteDuration(); } else if (i == com.tapadoo.alerter.demo.R.id.btnAlertWithProgress) { showAlertWithProgress(); } else if (i == com.tapadoo.alerter.demo.R.id.btnAlertWithCustomFont) { showAlertWithCustomFont(); } else if (i == com.tapadoo.alerter.demo.R.id.btnAlertWithCustomColor) { showAlertWithCustomColor(); } else if (i == com.tapadoo.alerter.demo.R.id.btnAlertSwipeToDismissEnabled) { showAlertSwipeToDismissEnabled(); } else if (i == com.tapadoo.alerter.demo.R.id.btnAlertSound) { showAlertSound(); } else if (i == com.tapadoo.alerter.demo.R.id.btnCenterAlert) { showAlertFromCenter(); } else if (i == com.tapadoo.alerter.demo.R.id.btnBottomAlert) { showAlertFromBottom(); } else { showAlertDefault(); } } private void showAlertDefault() { Alerter.create(JavaDemoActivity.this) .setTitle(com.tapadoo.alerter.demo.R.string.title_activity_example) .setText("Alert text...") .show(); } private void showAlertColoured() { Alerter.create(JavaDemoActivity.this) .setTitle("Alert Title") .setText("Alert text...") .setBackgroundColorRes(com.tapadoo.alerter.demo.R.color.colorAccent) .show(); } private void showAlertWithIcon() { Alerter.create(JavaDemoActivity.this) .setText("Alert text...") .setIcon(com.tapadoo.alerter.demo.R.drawable.alerter_ic_mail_outline) .setIconColorFilter(0) // Optional - Removes white tint .setIconSize(R.dimen.custom_icon_size) // Optional - default is 38dp .show(); } private void showAlertTextOnly() { Alerter.create(JavaDemoActivity.this) .setText("Alert text...") .show(); } private void showAlertWithOnClick() { Alerter.create(JavaDemoActivity.this) .setTitle("Alert Title") .setText("Alert text...") .setDuration(10000) .setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { Toast.makeText(JavaDemoActivity.this, "OnClick Called", Toast.LENGTH_LONG).show(); } }) .show(); } private void showAlertVerbose() { Alerter.create(JavaDemoActivity.this) .setTitle("Alert Title") .setText("The alert scales to accommodate larger bodies of text. " + "The alert scales to accommodate larger bodies of text. " + "The alert scales to accommodate larger bodies of text.") .show(); } private void showAlertCallbacks() { Alerter.create(JavaDemoActivity.this) .setTitle("Alert Title") .setText("Alert text...") .setDuration(10000) .setOnShowListener(new OnShowAlertListener() { @Override public void onShow() { Toast.makeText(JavaDemoActivity.this, "Show Alert", Toast.LENGTH_LONG).show(); } }) .setOnHideListener(new OnHideAlertListener() { @Override public void onHide() { Toast.makeText(JavaDemoActivity.this, "Hide Alert", Toast.LENGTH_LONG).show(); } }) .show(); } private void showAlertInfiniteDuration() { Alerter.create(JavaDemoActivity.this) .setTitle("Alert Title") .setText("Alert text...") .enableInfiniteDuration(true) .show(); } private void showAlertWithProgress() { Alerter.create(JavaDemoActivity.this) .setTitle("Alert Title") .setText("Alert text...") .enableProgress(true) .setProgressColorRes(com.tapadoo.alerter.demo.R.color.colorPrimary) .show(); } private void showAlertWithCustomFont() { Alerter.create(JavaDemoActivity.this) .setTitle("Alert Title") .setTitleAppearance(com.tapadoo.alerter.demo.R.style.AlertTextAppearance_Title) .setTitleTypeface(Typeface.createFromAsset(getAssets(), "Pacifico-Regular.ttf")) .setText("Alert text...") .setTextAppearance(com.tapadoo.alerter.demo.R.style.AlertTextAppearance_Text) .setTextTypeface(Typeface.createFromAsset(getAssets(), "ScopeOne-Regular.ttf")) .show(); } private void showAlertWithCustomColor() { Alert alert = Alerter.create(JavaDemoActivity.this) .setTitle("Yellow Alert Title") .setText("Red Alert text...") .show(); alert.getTitle().setTextColor(Color.YELLOW); alert.getText().setTextColor(Color.RED); } private void showAlertSwipeToDismissEnabled() { Alerter.create(JavaDemoActivity.this) .setTitle("Alert Title") .setText("Alert text...") .enableSwipeToDismiss() .setOnHideListener(new OnHideAlertListener() { @Override public void onHide() { Toast.makeText(JavaDemoActivity.this, "Hide Alert", Toast.LENGTH_LONG).show(); } }) .show(); } private void showAlertWithCustomAnimations() { Alerter.create(JavaDemoActivity.this) .setTitle("Alert Title") .setText("Alert text...") .setEnterAnimation(com.tapadoo.alerter.demo.R.anim.alerter_slide_in_from_left) .setExitAnimation(com.tapadoo.alerter.demo.R.anim.alerter_slide_out_to_right) .show(); } private void showAlertWithButtons() { Alerter.create(JavaDemoActivity.this) .setTitle("Alert Title") .setText("Alert text...") .addButton("Okay", com.tapadoo.alerter.demo.R.style.AlertButton, new View.OnClickListener() { @Override public void onClick(View v) { Toast.makeText(JavaDemoActivity.this, "Okay Clicked", Toast.LENGTH_SHORT).show(); } }) .show(); } private void showAlertSound() { Alerter.create(JavaDemoActivity.this) .setTitle("Alert Title") .setText("Alert text...") .setBackgroundColorRes(R.color.colorAccent) .setSound(Uri.parse("android.resource://" + getPackageName() + "/raw/ringtone")) .show(); } private void showAlertFromCenter() { Alerter.create(JavaDemoActivity.this) .setTitle(com.tapadoo.alerter.demo.R.string.title_activity_example) .setText("Alert text...") .setLayoutGravity(Gravity.CENTER) .show(); } private void showAlertFromBottom() { Alerter.create(JavaDemoActivity.this) .setTitle(com.tapadoo.alerter.demo.R.string.title_activity_example) .setText("Alert text...") .setLayoutGravity(Gravity.BOTTOM) .show(); } }
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.logs.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/logs-2014-03-28/DescribeExportTasks" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DescribeExportTasksRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The ID of the export task. Specifying a task ID filters the results to zero or one export tasks. * </p> */ private String taskId; /** * <p> * The status code of the export task. Specifying a status code filters the results to zero or more export tasks. * </p> */ private String statusCode; /** * <p> * The token for the next set of items to return. (You received this token from a previous call.) * </p> */ private String nextToken; /** * <p> * The maximum number of items returned. If you don't specify a value, the default is up to 50 items. * </p> */ private Integer limit; /** * <p> * The ID of the export task. Specifying a task ID filters the results to zero or one export tasks. * </p> * * @param taskId * The ID of the export task. Specifying a task ID filters the results to zero or one export tasks. */ public void setTaskId(String taskId) { this.taskId = taskId; } /** * <p> * The ID of the export task. Specifying a task ID filters the results to zero or one export tasks. * </p> * * @return The ID of the export task. Specifying a task ID filters the results to zero or one export tasks. */ public String getTaskId() { return this.taskId; } /** * <p> * The ID of the export task. Specifying a task ID filters the results to zero or one export tasks. * </p> * * @param taskId * The ID of the export task. Specifying a task ID filters the results to zero or one export tasks. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeExportTasksRequest withTaskId(String taskId) { setTaskId(taskId); return this; } /** * <p> * The status code of the export task. Specifying a status code filters the results to zero or more export tasks. * </p> * * @param statusCode * The status code of the export task. Specifying a status code filters the results to zero or more export * tasks. * @see ExportTaskStatusCode */ public void setStatusCode(String statusCode) { this.statusCode = statusCode; } /** * <p> * The status code of the export task. Specifying a status code filters the results to zero or more export tasks. * </p> * * @return The status code of the export task. Specifying a status code filters the results to zero or more export * tasks. * @see ExportTaskStatusCode */ public String getStatusCode() { return this.statusCode; } /** * <p> * The status code of the export task. Specifying a status code filters the results to zero or more export tasks. * </p> * * @param statusCode * The status code of the export task. Specifying a status code filters the results to zero or more export * tasks. * @return Returns a reference to this object so that method calls can be chained together. * @see ExportTaskStatusCode */ public DescribeExportTasksRequest withStatusCode(String statusCode) { setStatusCode(statusCode); return this; } /** * <p> * The status code of the export task. Specifying a status code filters the results to zero or more export tasks. * </p> * * @param statusCode * The status code of the export task. Specifying a status code filters the results to zero or more export * tasks. * @see ExportTaskStatusCode */ public void setStatusCode(ExportTaskStatusCode statusCode) { this.statusCode = statusCode.toString(); } /** * <p> * The status code of the export task. Specifying a status code filters the results to zero or more export tasks. * </p> * * @param statusCode * The status code of the export task. Specifying a status code filters the results to zero or more export * tasks. * @return Returns a reference to this object so that method calls can be chained together. * @see ExportTaskStatusCode */ public DescribeExportTasksRequest withStatusCode(ExportTaskStatusCode statusCode) { setStatusCode(statusCode); return this; } /** * <p> * The token for the next set of items to return. (You received this token from a previous call.) * </p> * * @param nextToken * The token for the next set of items to return. (You received this token from a previous call.) */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * The token for the next set of items to return. (You received this token from a previous call.) * </p> * * @return The token for the next set of items to return. (You received this token from a previous call.) */ public String getNextToken() { return this.nextToken; } /** * <p> * The token for the next set of items to return. (You received this token from a previous call.) * </p> * * @param nextToken * The token for the next set of items to return. (You received this token from a previous call.) * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeExportTasksRequest withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * <p> * The maximum number of items returned. If you don't specify a value, the default is up to 50 items. * </p> * * @param limit * The maximum number of items returned. If you don't specify a value, the default is up to 50 items. */ public void setLimit(Integer limit) { this.limit = limit; } /** * <p> * The maximum number of items returned. If you don't specify a value, the default is up to 50 items. * </p> * * @return The maximum number of items returned. If you don't specify a value, the default is up to 50 items. */ public Integer getLimit() { return this.limit; } /** * <p> * The maximum number of items returned. If you don't specify a value, the default is up to 50 items. * </p> * * @param limit * The maximum number of items returned. If you don't specify a value, the default is up to 50 items. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeExportTasksRequest withLimit(Integer limit) { setLimit(limit); return this; } /** * Returns a string representation of this object; useful for testing and debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getTaskId() != null) sb.append("TaskId: ").append(getTaskId()).append(","); if (getStatusCode() != null) sb.append("StatusCode: ").append(getStatusCode()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()).append(","); if (getLimit() != null) sb.append("Limit: ").append(getLimit()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribeExportTasksRequest == false) return false; DescribeExportTasksRequest other = (DescribeExportTasksRequest) obj; if (other.getTaskId() == null ^ this.getTaskId() == null) return false; if (other.getTaskId() != null && other.getTaskId().equals(this.getTaskId()) == false) return false; if (other.getStatusCode() == null ^ this.getStatusCode() == null) return false; if (other.getStatusCode() != null && other.getStatusCode().equals(this.getStatusCode()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; if (other.getLimit() == null ^ this.getLimit() == null) return false; if (other.getLimit() != null && other.getLimit().equals(this.getLimit()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getTaskId() == null) ? 0 : getTaskId().hashCode()); hashCode = prime * hashCode + ((getStatusCode() == null) ? 0 : getStatusCode().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); hashCode = prime * hashCode + ((getLimit() == null) ? 0 : getLimit().hashCode()); return hashCode; } @Override public DescribeExportTasksRequest clone() { return (DescribeExportTasksRequest) super.clone(); } }
/* ======================================================================== * * This file is part of CODEC, which is a Java package for encoding * and decoding ASN.1 data structures. * * Author: Fraunhofer Institute for Computer Graphics Research IGD * Department A8: Security Technology * Fraunhoferstr. 5, 64283 Darmstadt, Germany * * Rights: Copyright (c) 2004 by Fraunhofer-Gesellschaft * zur Foerderung der angewandten Forschung e.V. * Hansastr. 27c, 80686 Munich, Germany. * * ------------------------------------------------------------------------ * * The software package is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of the * License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software package; if not, write to the Free * Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, * MA 02110-1301, USA or obtain a copy of the license at * http://www.fsf.org/licensing/licenses/lgpl.txt. * * ------------------------------------------------------------------------ * * The CODEC library can solely be used and distributed according to * the terms and conditions of the GNU Lesser General Public License for * non-commercial research purposes and shall not be embedded in any * products or services of any user or of any third party and shall not * be linked with any products or services of any user or of any third * party that will be commercially exploited. * * The CODEC library has not been tested for the use or application * for a determined purpose. It is a developing version that can * possibly contain errors. Therefore, Fraunhofer-Gesellschaft zur * Foerderung der angewandten Forschung e.V. does not warrant that the * operation of the CODEC library will be uninterrupted or error-free. * Neither does Fraunhofer-Gesellschaft zur Foerderung der angewandten * Forschung e.V. warrant that the CODEC library will operate and * interact in an uninterrupted or error-free way together with the * computer program libraries of third parties which the CODEC library * accesses and which are distributed together with the CODEC library. * * Fraunhofer-Gesellschaft zur Foerderung der angewandten Forschung e.V. * does not warrant that the operation of the third parties's computer * program libraries themselves which the CODEC library accesses will * be uninterrupted or error-free. * * Fraunhofer-Gesellschaft zur Foerderung der angewandten Forschung e.V. * shall not be liable for any errors or direct, indirect, special, * incidental or consequential damages, including lost profits resulting * from the combination of the CODEC library with software of any user * or of any third party or resulting from the implementation of the * CODEC library in any products, systems or services of any user or * of any third party. * * Fraunhofer-Gesellschaft zur Foerderung der angewandten Forschung e.V. * does not provide any warranty nor any liability that utilization of * the CODEC library will not interfere with third party intellectual * property rights or with any other protected third party rights or will * cause damage to third parties. Fraunhofer Gesellschaft zur Foerderung * der angewandten Forschung e.V. is currently not aware of any such * rights. * * The CODEC library is supplied without any accompanying services. * * ======================================================================== */ package codec.util; import java.security.GeneralSecurityException; import java.security.InvalidKeyException; import java.security.cert.CertificateException; import java.security.cert.X509Certificate; import java.util.HashSet; import java.util.Iterator; import java.util.Set; /** * Provides utility methods for managing certificates. For instance verification * of certificate chains and similar recurring tasks. * * For each certificate that is verified the following constraints are checked: * <ul> * <li> Validity period. * <li> Key usage bits (<code>keyCertSign</code>(5)). * <li> Basic constraints (chain length constraints). * <li> Issuer to subject chaining. * <li> Signature validity. * </ul> * Future revisions of this class might also provide automatic checking of * certificate revocation lists. * * @author Volker Roth * @version "$Id: CertificateChainVerifier.java,v 1.2 2000/12/06 17:47:34 vroth * Exp $" */ public class CertificateChainVerifier extends Object { /** * The <code>CertificateSource</code> with the trusted CA certificates. */ private CertificateSource trusted_; /** * Creates an instance. * * @param trusted * The <code>CertifiateSource</code> with the trusted CA * certificates. */ public CertificateChainVerifier(CertificateSource trusted) { if (trusted == null) { throw new NullPointerException("Trusted cert source"); } trusted_ = trusted; } /** * Verifies the given certificate. The chain of certificate issuers is * traced using the certificates in <code>other * </code> as well as the * trusted certificates that were passed to the constructor of this * instance. The chain must end in a trusted certificate. * <p> * * In case of mismatches or errors this method aborts with an exception. In * the case of success it completes normally. * * @param cert * The certificate that is verified. * @param other * A <code>CertificateSource</code> with supplementary * certificates. These certificates are not treated as * trusted certificates. Hence they do not complete a * certificate chain. * @throws GeneralSecurityException * if something goes wrong. Reasons can be expired * certificates, invalid signatures, unavailable algorithms, * and more. The exact cause is signalled by the actual type * of exception being thrown. For instance, a bad signature * is signalled by means of a * <code>SignatureException</code>. */ public void verify(X509Certificate cert, CertificateSource other) throws GeneralSecurityException { X509Certificate issuerCert; X509Certificate origCert; int chainLength; Set verified; if (cert == null) { throw new NullPointerException("Certificate"); } verified = new HashSet(8); origCert = cert; /* * The invariant is that valid certificates enter the loop. The validity * of issuer certs is checked within the loop. */ cert.checkValidity(); /* * Repeat ad infinitum unless we hit a valid trusted CA certificate. */ for (chainLength = 0; true; chainLength++) { /* * Check for vicious cycles in the certificate chain. */ if (verified.contains(cert)) { throw new CertificateException("Circular chain!"); } issuerCert = checkIssuer(cert, trusted_, chainLength); /* * Did we hit a trusted cert? This is the exit point of the method * for successful verification. */ if (issuerCert != null) { return; } issuerCert = checkIssuer(cert, other, chainLength); /* * If there is a matching valid issuer cert in the 'other' cert * source then we go on and try to match that one against the * trusted certs. If there isn't then we boil out the hard way. */ if (issuerCert == null) { fail("Untrusted certificate: %s", origCert); } cert = issuerCert; } } /** * Verifies the given certificate against the trusted certificates passed to * the constructor of this instance. If the verification succeeds then this * method completes normally. Otherwise, an exception is thrown. * * @throws GeneralSecurityException * if the verification fails. The exact cause is signalled * by means of the exception sub-type. */ public void verify(X509Certificate cert) throws GeneralSecurityException { if (cert == null) { throw new NullPointerException("Certificate"); } /* * The invariant is that valid certificates enter the loop. The validity * of issuer certs is checked within the loop. */ cert.checkValidity(); if (checkIssuer(cert, trusted_, 0) == null) { fail("Untrusted certificate: %s", cert); } return; } /** * Verifies the given certificate chain. In case of a successful * verification this method completes normally. Otherwise, it throws an * exception. In order for the verification to succeed, at least one * certificate in it must be a trusted certificate and the chain must be * valid up to the trusted certificate. * * @param chain * The chain of certificates to be verified. The chain starts * at index 0. Each certificate but the first must * authenticate the preceeding certificate in the chain. * @throws GeneralSecurityException * if the verification fails. */ public void verifyChain(X509Certificate[] chain) throws GeneralSecurityException { if (chain == null) { throw new NullPointerException("Chain"); } X509Certificate issuerCert; X509Certificate cert; boolean[] usage; int maxChainLength; int n; if (chain.length < 1 || chain[0] == null) { throw new CertificateException( "Chain is empty or element 0 is null!"); } cert = chain[0]; cert.checkValidity(); if (isTrusted(cert)) { return; } for (n = 1; n < chain.length; n++) { issuerCert = chain[n]; if (issuerCert == null) { throw new CertificateException("Null cert at " + n); } issuerCert.checkValidity(); /* * Check key usage extension bits. */ usage = issuerCert.getKeyUsage(); if (usage == null || usage.length < 6 || !usage[5]) { fail("Not a key signing certificate: %s", issuerCert); } /* * Check basic constraints. */ maxChainLength = issuerCert.getBasicConstraints(); if (maxChainLength < 0) { fail("Chain contains non CA cert: %s", issuerCert); } if (maxChainLength + 1 < n) { fail("Chain too long at %s", issuerCert); } if (!cert.getIssuerDN().equals(issuerCert.getSubjectDN())) { fail("Issuer vs. subject mismatch in cert: %s", cert); } cert.verify(issuerCert.getPublicKey()); /* * Check if we already found a trusted cert. */ if (isTrusted(issuerCert)) { return; } } fail("Chain of %s is not trusted!", chain[0]); } /** * Checks if the given certificate is a trusted certificate. * * @param cert * The certificate to check. * @return <code>true</code> if <code>cert</code> is a trusted * certificate and <code>false</code> otherwise. */ public boolean isTrusted(X509Certificate cert) { X509Certificate trustedCert; if (cert == null) { return false; } trustedCert = trusted_.getCertificate(cert.getIssuerDN(), cert .getSerialNumber()); if (trustedCert == null) { return false; } return trustedCert.equals(cert); } /** * Retrieves the potential issuer certificates of the given certificate from * <code>certSource</code> and does the appropriate verification steps. In * case of success, the issuer certificate is returned and <code>null</code> * otherwise. * * @param cert * The certificate to check, and whose issuing certificate * shall be returned. * @param certSource * The <code>CertificateSource</code> with the certificates * that are able to authenticate <code>cert</code>. * @param chainLength * The current length of the chain. This value is required * for testing the basic constraints on the issuing * certificates. If <code> * cert</code> is an end-user * certificate then <code> * chainLength</code> must be 0. * @return The issuing certificate or <code>null</code> is none could be * found in <code>certSource</code>. * @throws GeneralSecurityException * if the verification fails. */ private X509Certificate checkIssuer(X509Certificate cert, CertificateSource certSource, int chainLength) throws GeneralSecurityException { X509Certificate issuerCert; Iterator i; int maxChainLength; /* * There might be more than one matching issuer certificate, e.g. if the * issuer is cross-certified with multiple other CAs. */ i = certSource.certificates(cert.getIssuerDN(), CertificateSource.KEY_CERT_SIGN); while (i.hasNext()) { issuerCert = (X509Certificate) i.next(); /* * Step 1: Check if the issuer cert is still valid. Key usage bits * were checked implicitly by the iterator. */ try { issuerCert.checkValidity(); } catch (CertificateException e) { System.err.println("Warning, trusted cert is not current:\n" + issuerCert); continue; } /* * Step 2: Verify the certificate, this might fail because we got a * wrong issuer certificate with a key type that doesn't match. * However, this shouldn't happen in practice because CAs should use * different distinguished names for different certificates. */ try { cert.verify(issuerCert.getPublicKey()); } catch (InvalidKeyException e) { continue; } /* * Step 3: Check basic constraints. The current chain length must * not be longer than certified in the current issuer cert. */ maxChainLength = issuerCert.getBasicConstraints(); if (maxChainLength < 0) { fail("Trusted cert is not a CA cert: %s", issuerCert); } if (maxChainLength < chainLength) { fail("Certificate chain too long (" + maxChainLength + " > " + chainLength + ") at %s", issuerCert); } return issuerCert; } return null; } /** * Throws a <code>CertificateException</code> with the given error * message. If <code>message</code> contains &quot;%s&quot; then the first * occurence of this substring is replaced by a string that gives the issuer * DN and serial number of <code>cert</code>. No <code>null</code> * arguments are accepted. * * @param message * The message of the exception to be thrown. * @param cert * The certificate whose issuer DN and serial number shall be * substituted into <code> * message</code>. * @throws CertificateException * always. */ private void fail(String message, X509Certificate cert) throws CertificateException { int n; n = message.indexOf("%s"); if (n >= 0) { message = message.substring(0, n) + "issuer=\"" + cert.getIssuerDN().getName() + "\", serial=" + cert.getSerialNumber() + message.substring(n + 2); } throw new CertificateException(message); } }
package com.kuloud.android.aboutlibraries.ui.adapter; import android.app.AlertDialog; import android.content.Context; import android.content.Intent; import android.graphics.drawable.Drawable; import android.net.Uri; import android.support.v7.widget.RecyclerView; import android.text.Html; import android.text.TextUtils; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.ImageView; import android.widget.TextView; import com.kuloud.android.aboutlibraries.entity.Library; import com.kuloud.android.aboutlibraries.util.MovementCheck; import com.kuloud.android.aboutlibraries.R; import java.util.LinkedList; import java.util.List; public class LibsRecyclerViewAdapter extends RecyclerView.Adapter<RecyclerView.ViewHolder> { private static final int TYPE_HEADER = 0; private static final int TYPE_ITEM = 1; private Context ctx; private List<Library> libs = new LinkedList<Library>(); private boolean showLicense = false; private boolean showLicenseDialog = true; private boolean showVersion = false; private boolean header = false; private String aboutAppName; private String aboutSpecial1; private String aboutSpecial1Description; private String aboutSpecial2; private String aboutSpecial2Description; private String aboutSpecial3; private String aboutSpecial3Description; private Boolean aboutShowVersion; private Boolean aboutShowVersionName; private Boolean aboutShowVersionCode; private Integer aboutVersionCode; private String aboutVersionName; private String aboutDescription; private boolean aboutShowIcon; private Drawable aboutIcon; public LibsRecyclerViewAdapter(Context ctx, boolean showLicense, boolean showLicenseDialog, boolean showVersion) { this.ctx = ctx; this.showLicense = showLicense; this.showLicenseDialog = showLicenseDialog; this.showVersion = showVersion; } @Override public RecyclerView.ViewHolder onCreateViewHolder(final ViewGroup viewGroup, int viewType) { if (viewType == TYPE_HEADER) { View v = LayoutInflater.from(viewGroup.getContext()).inflate(R.layout.listheader_opensource, viewGroup, false); return new HeaderViewHolder(v); } View v = LayoutInflater.from(viewGroup.getContext()).inflate(R.layout.listitem_opensource, viewGroup, false); return new ViewHolder(v); } @Override public void onBindViewHolder(final RecyclerView.ViewHolder viewHolder, int position) { if (viewHolder instanceof HeaderViewHolder) { HeaderViewHolder holder = (HeaderViewHolder) viewHolder; //Set the Icon or hide it if (aboutShowIcon && aboutIcon != null) { holder.aboutIcon.setImageDrawable(aboutIcon); } else { holder.aboutIcon.setVisibility(View.GONE); } //Set the description or hide it if (!TextUtils.isEmpty(aboutAppName)) { holder.aboutAppName.setText(aboutAppName); } else { holder.aboutAppName.setVisibility(View.GONE); } // Reset aboutSpecial fields holder.aboutSpecialContainer.setVisibility(View.GONE); holder.aboutSpecial1.setVisibility(View.GONE); holder.aboutSpecial2.setVisibility(View.GONE); holder.aboutSpecial3.setVisibility(View.GONE); // set the values for the special fields if (!TextUtils.isEmpty(aboutSpecial1) && !TextUtils.isEmpty(aboutSpecial1Description)) { holder.aboutSpecial1.setText(aboutSpecial1); holder.aboutSpecial1.setVisibility(View.VISIBLE); holder.aboutSpecial1.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { try { AlertDialog.Builder builder = new AlertDialog.Builder(ctx); builder.setMessage(Html.fromHtml(aboutSpecial1Description)); builder.create().show(); } catch (Exception ex) { } } }); holder.aboutSpecialContainer.setVisibility(View.VISIBLE); } if (!TextUtils.isEmpty(aboutSpecial2) && !TextUtils.isEmpty(aboutSpecial2Description)) { holder.aboutSpecial2.setText(aboutSpecial2); holder.aboutSpecial2.setVisibility(View.VISIBLE); holder.aboutSpecial2.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { try { AlertDialog.Builder builder = new AlertDialog.Builder(ctx); builder.setMessage(Html.fromHtml(aboutSpecial2Description)); builder.create().show(); } catch (Exception ex) { } } }); holder.aboutSpecialContainer.setVisibility(View.VISIBLE); } if (!TextUtils.isEmpty(aboutSpecial3) && !TextUtils.isEmpty(aboutSpecial3Description)) { holder.aboutSpecial3.setText(aboutSpecial3); holder.aboutSpecial3.setVisibility(View.VISIBLE); holder.aboutSpecial3.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { try { AlertDialog.Builder builder = new AlertDialog.Builder(ctx); builder.setMessage(Html.fromHtml(aboutSpecial3Description)); builder.create().show(); } catch (Exception ex) { } } }); holder.aboutSpecialContainer.setVisibility(View.VISIBLE); } //set the Version or hide it if (aboutShowVersion != null && aboutShowVersion) { holder.aboutVersion.setText(ctx.getString(R.string.version) + " " + aboutVersionName + " (" + aboutVersionCode + ")"); } else { if (aboutShowVersionName != null && aboutShowVersionName) { holder.aboutVersion.setText(ctx.getString(R.string.version) + " " + aboutVersionName); } else if (aboutShowVersionCode != null && aboutShowVersionCode) { holder.aboutVersion.setText(ctx.getString(R.string.version) + " " + aboutVersionCode); } else { holder.aboutVersion.setVisibility(View.GONE); } } //Set the description or hide it if (!TextUtils.isEmpty(aboutDescription)) { holder.aboutAppDescription.setText(Html.fromHtml(aboutDescription)); holder.aboutAppDescription.setMovementMethod(MovementCheck.getInstance()); } else { holder.aboutAppDescription.setVisibility(View.GONE); } //if there is no description or no icon and version number hide the divider if (!aboutShowIcon && !aboutShowVersion || TextUtils.isEmpty(aboutDescription)) { holder.aboutDivider.setVisibility(View.GONE); } } else if (viewHolder instanceof ViewHolder) { ViewHolder holder = (ViewHolder) viewHolder; final Library library = getItem(position); //Set texts holder.libraryName.setText(library.getLibraryName()); holder.libraryCreator.setText(library.getAuthor()); if (TextUtils.isEmpty(library.getLibraryDescription())) { holder.libraryDescription.setText(library.getLibraryDescription()); } else { holder.libraryDescription.setText(Html.fromHtml(library.getLibraryDescription())); } //Set License or Version Text if (TextUtils.isEmpty(library.getLibraryVersion()) && library.getLicense() != null && TextUtils.isEmpty(library.getLicense().getLicenseName()) || (!showVersion && !showLicense)) { holder.libraryBottomDivider.setVisibility(View.GONE); holder.libraryBottomContainer.setVisibility(View.GONE); } else { holder.libraryBottomDivider.setVisibility(View.VISIBLE); holder.libraryBottomContainer.setVisibility(View.VISIBLE); if (!TextUtils.isEmpty(library.getLibraryVersion()) && showVersion) { holder.libraryVersion.setText(library.getLibraryVersion()); } else { holder.libraryVersion.setText(""); } if (library.getLicense() != null && !TextUtils.isEmpty(library.getLicense().getLicenseName()) && showLicense) { holder.libraryLicense.setText(library.getLicense().getLicenseName()); } else { holder.libraryLicense.setText(""); } } //Define onClickListener if (!TextUtils.isEmpty(library.getAuthorWebsite())) { holder.libraryCreator.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { try { Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(library.getAuthorWebsite())); ctx.startActivity(browserIntent); } catch (Exception ex) { } } }); } else { holder.libraryCreator.setOnClickListener(null); } if (!TextUtils.isEmpty(library.getLibraryWebsite())) { holder.libraryDescription.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { try { Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(library.getLibraryWebsite())); ctx.startActivity(browserIntent); } catch (Exception ex) { } } }); } else { holder.libraryDescription.setOnClickListener(null); } if (library.getLicense() != null && !TextUtils.isEmpty((library.getLicense().getLicenseWebsite()))) { holder.libraryBottomContainer.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { try { if (showLicenseDialog && !TextUtils.isEmpty(library.getLicense().getLicenseDescription())) { AlertDialog.Builder builder = new AlertDialog.Builder(ctx); builder.setMessage(Html.fromHtml(library.getLicense().getLicenseDescription())); builder.create().show(); } else { Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(library.getLicense().getLicenseWebsite())); ctx.startActivity(browserIntent); } } catch (Exception ex) { } } }); } else { holder.libraryBottomContainer.setOnClickListener(null); } } } @Override public int getItemViewType(int position) { if (position == 0 && header) { return TYPE_HEADER; } return TYPE_ITEM; } @Override public int getItemCount() { return libs == null ? 0 : libs.size(); } public Library getItem(int pos) { return libs.get(pos); } public long getItemId(int pos) { return pos; } public void setLibs(List<Library> libs) { this.libs = libs; this.notifyItemRangeInserted(0, libs.size() - 1); } public void addLibs(List<Library> libs) { this.libs.addAll(libs); } public void setHeader(String aboutAppName, String aboutDescription, String aboutSpecial1, String aboutSpecial1Description, String aboutSpecial2, String aboutSpecial2Description, String aboutSpecial3, String aboutSpecial3Description, String aboutVersionName, Integer aboutVersionCode, Boolean aboutShowVersion, Boolean aboutShowVersionName, Boolean aboutShowVersionCode, Drawable aboutIcon, boolean aboutShowIcon) { this.header = true; this.libs.add(0, null); this.aboutAppName = aboutAppName; this.aboutDescription = aboutDescription; this.aboutSpecial1 = aboutSpecial1; this.aboutSpecial1Description = aboutSpecial1Description; this.aboutSpecial2 = aboutSpecial2; this.aboutSpecial2Description = aboutSpecial2Description; this.aboutSpecial3 = aboutSpecial3; this.aboutSpecial3Description = aboutSpecial3Description; this.aboutVersionName = aboutVersionName; this.aboutVersionCode = aboutVersionCode; this.aboutShowVersion = aboutShowVersion; this.aboutShowVersionName = aboutShowVersionName; this.aboutShowVersionCode = aboutShowVersionCode; this.aboutIcon = aboutIcon; this.aboutShowIcon = aboutShowIcon; this.notifyItemInserted(0); } public void deleteHeader() { if (header) { if (this.libs.size() > 0) { this.libs.remove(0); } } this.header = false; } public static class HeaderViewHolder extends RecyclerView.ViewHolder { ImageView aboutIcon; TextView aboutAppName; View aboutSpecialContainer; Button aboutSpecial1; Button aboutSpecial2; Button aboutSpecial3; TextView aboutVersion; View aboutDivider; TextView aboutAppDescription; public HeaderViewHolder(View headerView) { super(headerView); //get the about this app views aboutIcon = (ImageView) headerView.findViewById(R.id.aboutIcon); aboutAppName = (TextView) headerView.findViewById(R.id.aboutName); aboutSpecialContainer = headerView.findViewById(R.id.aboutSpecialContainer); aboutSpecial1 = (Button) headerView.findViewById(R.id.aboutSpecial1); aboutSpecial2 = (Button) headerView.findViewById(R.id.aboutSpecial2); aboutSpecial3 = (Button) headerView.findViewById(R.id.aboutSpecial3); aboutVersion = (TextView) headerView.findViewById(R.id.aboutVersion); aboutDivider = headerView.findViewById(R.id.aboutDivider); aboutAppDescription = (TextView) headerView.findViewById(R.id.aboutDescription); } } public static class ViewHolder extends RecyclerView.ViewHolder { View card; TextView libraryName; TextView libraryCreator; TextView libraryDescription; View libraryBottomDivider; View libraryBottomContainer; TextView libraryVersion; TextView libraryLicense; public ViewHolder(View itemView) { super(itemView); card = itemView; libraryName = (TextView) itemView.findViewById(R.id.libraryName); libraryCreator = (TextView) itemView.findViewById(R.id.libraryCreator); libraryDescription = (TextView) itemView.findViewById(R.id.libraryDescription); libraryBottomDivider = itemView.findViewById(R.id.libraryBottomDivider); libraryBottomContainer = itemView.findViewById(R.id.libraryBottomContainer); libraryVersion = (TextView) itemView.findViewById(R.id.libraryVersion); libraryLicense = (TextView) itemView.findViewById(R.id.libraryLicense); } } }
/* * See LICENSE file in distribution for copyright and licensing * information. */ package ioke.lang; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import ioke.lang.exceptions.ControlFlow; import ioke.lang.util.IdentitySet; /** * * @author <a href="mailto:ola.bini@gmail.com">Ola Bini</a> */ public class IokeSet extends IokeData { private Set<Object> set; public IokeSet() { this(new HashSet<>()); } public IokeSet(Set<Object> s) { this.set = s; } @Override public void init(IokeObject obj) throws ControlFlow { final Runtime runtime = obj.runtime; obj.setKind("Set"); obj.mimics(IokeObject .as(runtime.mixins.getCell(null, null, "Sequenced"), null), runtime.nul, runtime.nul); obj.registerMethod( runtime.newNativeMethod("returns a hash for the set", new NativeMethod.WithNoArguments("hash") { @Override public Object activate(IokeObject method, IokeObject context, IokeObject message, Object on) throws ControlFlow { getArguments().getEvaluatedArguments( context, message, on, new ArrayList<>(), new HashMap<String, Object>()); return context.runtime.newNumber( ((IokeSet) IokeObject.data(on)).set .hashCode()); } })); obj.registerMethod(runtime.newNativeMethod( "returns true if the left hand side set is equal to the right hand side set.", new TypeCheckingNativeMethod("==") { private final TypeCheckingArgumentsDefinition ARGUMENTS = TypeCheckingArgumentsDefinition .builder().receiverMustMimic(runtime.set) .withRequiredPositional("other") .getArguments(); @Override public TypeCheckingArgumentsDefinition getArguments() { return ARGUMENTS; } @Override public Object activate(IokeObject self, Object on, List<Object> args, Map<String, Object> keywords, IokeObject context, IokeObject message) throws ControlFlow { getArguments().getEvaluatedArguments(context, message, on, args, new HashMap<String, Object>()); Object other = args.get(0); return ((other instanceof IokeObject) && (IokeObject .data(other) instanceof IokeSet) && ((IokeSet) IokeObject.data(on)).set .equals(((IokeSet) IokeObject .data(other)).set)) ? context.runtime._true : context.runtime._false; } })); obj.registerMethod(obj.runtime.newNativeMethod( "Returns a text inspection of the object", new TypeCheckingNativeMethod.WithNoArguments("inspect", runtime.set) { @Override public Object activate(IokeObject method, Object on, List<Object> args, Map<String, Object> keywords, IokeObject context, IokeObject message) throws ControlFlow { return method.runtime .newText(IokeSet.getInspect(on)); } })); obj.registerMethod(obj.runtime.newNativeMethod( "Converts this set to use identity semantics, and then returns it.", new TypeCheckingNativeMethod.WithNoArguments( "withIdentitySemantics!", runtime.set) { @Override public Object activate(IokeObject method, Object on, List<Object> args, Map<String, Object> keywords, IokeObject context, IokeObject message) throws ControlFlow { IokeSet set = (IokeSet) IokeObject.data(on); set.set = new IdentitySet<>(set.set); return on; } })); obj.registerMethod(obj.runtime.newNativeMethod( "Returns a brief text inspection of the object", new TypeCheckingNativeMethod.WithNoArguments("notice", runtime.set) { @Override public Object activate(IokeObject method, Object on, List<Object> args, Map<String, Object> keywords, IokeObject context, IokeObject message) throws ControlFlow { return method.runtime .newText(IokeSet.getNotice(on)); } })); obj.registerMethod(obj.runtime.newNativeMethod( "returns true if this set is empty, false otherwise", new TypeCheckingNativeMethod.WithNoArguments("empty?", runtime.set) { @Override public Object activate(IokeObject method, Object on, List<Object> args, Map<String, Object> keywords, IokeObject context, IokeObject message) throws ControlFlow { return ((IokeSet) IokeObject.data(on)).getSet() .isEmpty() ? context.runtime._true : context.runtime._false; } })); obj.registerMethod(obj.runtime.newNativeMethod( "Adds the argument to this set, if it's not already in the set. Returns the set after adding the object.", new TypeCheckingNativeMethod("<<") { private final TypeCheckingArgumentsDefinition ARGUMENTS = TypeCheckingArgumentsDefinition .builder().receiverMustMimic(runtime.set) .withRequiredPositional("value") .getArguments(); @Override public TypeCheckingArgumentsDefinition getArguments() { return ARGUMENTS; } @Override public Object activate(IokeObject method, Object on, List<Object> args, Map<String, Object> keywords, IokeObject context, IokeObject message) throws ControlFlow { ((IokeSet) IokeObject.data(on)).set .add(args.get(0)); return on; } })); obj.registerMethod(obj.runtime.newNativeMethod( "Removes the argument from the set, if it's in the set. Returns the set after removing the object.", new TypeCheckingNativeMethod("remove!") { private final TypeCheckingArgumentsDefinition ARGUMENTS = TypeCheckingArgumentsDefinition .builder().receiverMustMimic(runtime.set) .withRequiredPositional("value") .getArguments(); @Override public TypeCheckingArgumentsDefinition getArguments() { return ARGUMENTS; } @Override public Object activate(IokeObject method, Object on, List<Object> args, Map<String, Object> keywords, IokeObject context, IokeObject message) throws ControlFlow { ((IokeSet) IokeObject.data(on)).set .remove(args.get(0)); return on; } })); obj.registerMethod(runtime.newNativeMethod( "returns a new set that contains the receivers elements and the elements of the set sent in as the argument.", new TypeCheckingNativeMethod("+") { private final TypeCheckingArgumentsDefinition ARGUMENTS = TypeCheckingArgumentsDefinition .builder().receiverMustMimic(runtime.set) .withRequiredPositional("otherSet") .whichMustMimic(runtime.set).getArguments(); @Override public TypeCheckingArgumentsDefinition getArguments() { return ARGUMENTS; } @Override public Object activate(IokeObject method, Object on, List<Object> args, Map<String, Object> keywords, IokeObject context, IokeObject message) throws ControlFlow { Set<Object> newSet = new HashSet<>(); newSet.addAll( ((IokeSet) IokeObject.data(on)).getSet()); newSet.addAll( ((IokeSet) IokeObject.data(args.get(0))) .getSet()); return context.runtime.newSet(newSet); } })); obj.registerMethod(runtime.newNativeMethod( "returns a new set that is the intersection of the receiver and the argument.", new TypeCheckingNativeMethod("\u2229") { private final TypeCheckingArgumentsDefinition ARGUMENTS = TypeCheckingArgumentsDefinition .builder().receiverMustMimic(runtime.set) .withRequiredPositional("otherSet") .whichMustMimic(runtime.set).getArguments(); @Override public TypeCheckingArgumentsDefinition getArguments() { return ARGUMENTS; } @Override public Object activate(IokeObject method, Object on, List<Object> args, Map<String, Object> keywords, IokeObject context, IokeObject message) throws ControlFlow { Set<Object> newSet = new HashSet<>(); newSet.addAll( ((IokeSet) IokeObject.data(on)).getSet()); newSet.retainAll( ((IokeSet) IokeObject.data(args.get(0))) .getSet()); return context.runtime.newSet(newSet); } })); obj.registerMethod(runtime.newNativeMethod( "returns true if this set is a subset of the argument set", new TypeCheckingNativeMethod("\u2286") { private final TypeCheckingArgumentsDefinition ARGUMENTS = TypeCheckingArgumentsDefinition .builder().receiverMustMimic(runtime.set) .withRequiredPositional("otherSet") .whichMustMimic(runtime.set).getArguments(); @Override public TypeCheckingArgumentsDefinition getArguments() { return ARGUMENTS; } @Override public Object activate(IokeObject method, Object on, List<Object> args, Map<String, Object> keywords, IokeObject context, IokeObject message) throws ControlFlow { boolean result = ((IokeSet) IokeObject .data(args.get(0))).getSet().containsAll( ((IokeSet) IokeObject.data(on)) .getSet()); return result ? context.runtime._true : context.runtime._false; } })); obj.registerMethod(runtime.newNativeMethod( "returns true if this set is a proper subset of the argument set", new TypeCheckingNativeMethod("\u2282") { private final TypeCheckingArgumentsDefinition ARGUMENTS = TypeCheckingArgumentsDefinition .builder().receiverMustMimic(runtime.set) .withRequiredPositional("otherSet") .whichMustMimic(runtime.set).getArguments(); @Override public TypeCheckingArgumentsDefinition getArguments() { return ARGUMENTS; } @Override public Object activate(IokeObject method, Object on, List<Object> args, Map<String, Object> keywords, IokeObject context, IokeObject message) throws ControlFlow { Set<Object> one = ((IokeSet) IokeObject .data(args.get(0))).getSet(); Set<Object> two = ((IokeSet) IokeObject.data(on)) .getSet(); boolean result = one.containsAll(two); return (result && two.size() < one.size()) ? context.runtime._true : context.runtime._false; } })); obj.registerMethod(runtime.newNativeMethod( "returns true if this set is a superset of the argument set", new TypeCheckingNativeMethod("\u2287") { private final TypeCheckingArgumentsDefinition ARGUMENTS = TypeCheckingArgumentsDefinition .builder().receiverMustMimic(runtime.set) .withRequiredPositional("otherSet") .whichMustMimic(runtime.set).getArguments(); @Override public TypeCheckingArgumentsDefinition getArguments() { return ARGUMENTS; } @Override public Object activate(IokeObject method, Object on, List<Object> args, Map<String, Object> keywords, IokeObject context, IokeObject message) throws ControlFlow { boolean result = ((IokeSet) IokeObject.data(on)) .getSet().containsAll(((IokeSet) IokeObject .data(args.get(0))).getSet()); return result ? context.runtime._true : context.runtime._false; } })); obj.registerMethod(runtime.newNativeMethod( "returns true if this set is a proper superset of the argument set", new TypeCheckingNativeMethod("\u2283") { private final TypeCheckingArgumentsDefinition ARGUMENTS = TypeCheckingArgumentsDefinition .builder().receiverMustMimic(runtime.set) .withRequiredPositional("otherSet") .whichMustMimic(runtime.set).getArguments(); @Override public TypeCheckingArgumentsDefinition getArguments() { return ARGUMENTS; } @Override public Object activate(IokeObject method, Object on, List<Object> args, Map<String, Object> keywords, IokeObject context, IokeObject message) throws ControlFlow { Set<Object> one = ((IokeSet) IokeObject .data(args.get(0))).getSet(); Set<Object> two = ((IokeSet) IokeObject.data(on)) .getSet(); boolean result = two.containsAll(one); return (result && two.size() > one.size()) ? context.runtime._true : context.runtime._false; } })); obj.registerMethod(obj.runtime.newNativeMethod( "returns true if the receiver includes the evaluated argument, otherwise false", new TypeCheckingNativeMethod("include?") { private final TypeCheckingArgumentsDefinition ARGUMENTS = TypeCheckingArgumentsDefinition .builder().receiverMustMimic(runtime.set) .withRequiredPositional("object") .getArguments(); @Override public TypeCheckingArgumentsDefinition getArguments() { return ARGUMENTS; } @Override public Object activate(IokeObject method, Object on, List<Object> args, Map<String, Object> keywords, IokeObject context, IokeObject message) throws ControlFlow { return ((IokeSet) IokeObject.data(on)).getSet() .contains(args.get(0)) ? context.runtime._true : context.runtime._false; } })); obj.registerMethod(obj.runtime.newNativeMethod( "returns a new sequence to iterate over this set", new TypeCheckingNativeMethod.WithNoArguments("seq", runtime.set) { @Override public Object activate(IokeObject method, Object on, List<Object> args, Map<String, Object> keywords, IokeObject context, IokeObject message) throws ControlFlow { IokeObject obj = method.runtime.iteratorSequence .allocateCopy(null, null); obj.singleMimicsWithoutCheck( method.runtime.iteratorSequence); obj.setData(new Sequence.IteratorSequence( ((IokeSet) IokeObject.data(on)).set .iterator())); return obj; } })); obj.registerMethod(runtime.newNativeMethod( "takes either one, two or three arguments. if one argument is given, it should be a message chain that will be sent to each object in the set. the result will be thrown away. if two arguments are given, the first is an unevaluated name that will be set to each of the values in the set in succession, and then the second argument will be evaluated in a scope with that argument in it. if three arguments is given, the first one is an unevaluated name that will be set to the index of each element, and the other two arguments are the name of the argument for the value, and the actual code. the code will evaluate in a lexical context, and if the argument name is available outside the context, it will be shadowed. the method will return the set. the iteration order is not defined.", new NativeMethod("each") { private final DefaultArgumentsDefinition ARGUMENTS = DefaultArgumentsDefinition .builder() .withOptionalPositionalUnevaluated( "indexOrArgOrCode") .withOptionalPositionalUnevaluated("argOrCode") .withOptionalPositionalUnevaluated("code") .getArguments(); @Override public DefaultArgumentsDefinition getArguments() { return ARGUMENTS; } @Override public Object activate(IokeObject method, IokeObject context, IokeObject message, Object on) throws ControlFlow { getArguments().checkArgumentCount(context, message, on); Object onAsSet = context.runtime.set .convertToThis(on, message, context); Set<Object> set = ((IokeSet) IokeObject .data(onAsSet)).set; switch (message.getArgumentCount()) { case 0: { return Interpreter.send(runtime.seqMessage, context, on); } case 1: { IokeObject code = IokeObject.as( message.getArguments().get(0), context); for (Object o : set) { context.runtime.interpreter.evaluate( code, context, context.getRealContext(), o); } break; } case 2: { IokeObject c = context.runtime .newLexicalContext(context, "Lexical activation context for Set#each", context); String name = IokeObject .as(message.getArguments().get(0), context) .getName(); IokeObject code = IokeObject.as( message.getArguments().get(1), context); for (Object o : set) { c.setCell(name, o); context.runtime.interpreter.evaluate( code, c, c.getRealContext(), c); } break; } case 3: { IokeObject c = context.runtime .newLexicalContext(context, "Lexical activation context for Set#each", context); String iname = IokeObject .as(message.getArguments().get(0), context) .getName(); String name = IokeObject .as(message.getArguments().get(1), context) .getName(); IokeObject code = IokeObject.as( message.getArguments().get(2), context); int index = 0; for (Object o : set) { c.setCell(name, o); c.setCell(iname, runtime.newNumber(index++)); context.runtime.interpreter.evaluate( code, c, c.getRealContext(), c); } break; } } return onAsSet; } })); } public Set<Object> getSet() { return set; } @Override public IokeData cloneData(IokeObject obj, IokeObject m, IokeObject context) { return new IokeSet(new HashSet<>(set)); } @Override public String toString() { return set.toString(); } @Override public String toString(IokeObject obj) { return set.toString(); } public static String getInspect(Object on) throws ControlFlow { return ((IokeSet) (IokeObject.data(on))).inspect(on); } public static String getNotice(Object on) throws ControlFlow { return ((IokeSet) (IokeObject.data(on))).notice(on); } public String inspect(Object obj) throws ControlFlow { StringBuilder sb = new StringBuilder(); sb.append("set("); String sep = ""; for (Object o : set) { sb.append(sep).append(IokeObject.inspect(o)); sep = ", "; } sb.append(")"); return sb.toString(); } public String notice(Object obj) throws ControlFlow { StringBuilder sb = new StringBuilder(); sb.append("set("); String sep = ""; for (Object o : set) { sb.append(sep).append(IokeObject.notice(o)); sep = ", "; } sb.append(")"); return sb.toString(); } }// IokeSet
/** * Copyright 2017 Goldman Sachs. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.gs.obevo.impl.reader; import com.gs.obevo.api.appdata.ChangeInput; import com.gs.obevo.api.platform.ChangeType; import com.gs.obevo.impl.DeployMetricsCollectorImpl; import com.gs.obevo.util.hash.DbChangeHashStrategy; import com.gs.obevo.util.vfs.FileObject; import org.apache.commons.vfs2.FileName; import org.eclipse.collections.api.block.function.Function; import org.eclipse.collections.api.list.ImmutableList; import org.eclipse.collections.impl.block.factory.Predicates; import org.eclipse.collections.impl.set.mutable.UnifiedSet; import org.junit.Rule; import org.junit.Test; import org.junit.internal.matchers.ThrowableMessageMatcher; import org.junit.rules.ExpectedException; import static org.hamcrest.Matchers.containsString; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class TableChangeParserTest { @Rule public final ExpectedException thrown = ExpectedException.none(); private final String objectName = "MyObj"; private final ChangeType tableChangeType = mock(ChangeType.class); private final GetChangeType getChangeType = TableChangeParser.DEFAULT_IMPL; private static class EmptyContentHashStrategy implements DbChangeHashStrategy { @Override public String hashContent(String content) { return content.length() > 6 ? content.substring(0, 6).toLowerCase() : content.toLowerCase(); } } @Test public void testTemplate() throws Exception { TableChangeParser parser = new TableChangeParser(new EmptyContentHashStrategy(), getChangeType); String fileContent = "//// METADATA templateParams=\"suffix=1;suffix=2\"\n" + "//// CHANGE name=chng1\ncreate1\n" + "//// CHANGE name=chng2\ncreate2\n" + ""; ImmutableList<ChangeInput> changes = parser.value(tableChangeType, null, fileContent, "MyTemplate${suffix}", "schema", null); assertEquals(4, changes.size()); assertEquals(2, changes.count(Predicates.attributeEqual(new Function<ChangeInput, Object>() { @Override public Object valueOf(ChangeInput it) { return it.getObjectName(); } }, "MyTemplate1"))); assertEquals(2, changes.count(Predicates.attributeEqual(new Function<ChangeInput, Object>() { @Override public Object valueOf(ChangeInput it) { return it.getObjectName(); } }, "MyTemplate2"))); } @Test public void invalidNoContentAllowedInMetadata() throws Exception { thrown.expect(IllegalArgumentException.class); thrown.expect(new ThrowableMessageMatcher<Throwable>(containsString("First content of the file must be the"))); TableChangeParser parser = new TableChangeParser(new EmptyContentHashStrategy(), getChangeType); String fileContent = "contentNotAllowedHere\n" + "//// METADATA\n" + "invalid content\n" + "//// CHANGE name=chng1\n" + "CREATE TABLE;\n" + ""; parser.value(tableChangeType, null, fileContent, objectName, "schema", null); } @Test public void invalidNoContentAllowedInPrologue1() throws Exception { thrown.expect(IllegalArgumentException.class); thrown.expect(new ThrowableMessageMatcher<Throwable>(containsString("First content of the file must be the"))); TableChangeParser parser = new TableChangeParser(new EmptyContentHashStrategy(), getChangeType); String fileContent = "contentNotAllowedHere\n" + "//// METADATA\n" + "//// CHANGE name=chng1\n" + "CREATE TABLE;\n" + ""; parser.value(tableChangeType, null, fileContent, objectName, "schema", null); } @Test public void invalidNoContentAllowedInPrologue2() throws Exception { thrown.expect(IllegalArgumentException.class); thrown.expect(new ThrowableMessageMatcher<Throwable>(containsString("First content of the file must be the"))); TableChangeParser parser = new TableChangeParser(new EmptyContentHashStrategy(), getChangeType); String fileContent = "contentNotAllowedHere\n" + "//// CHANGE name=chng1\n" + "CREATE TABLE;\n" + ""; parser.value(tableChangeType, null, fileContent, objectName, "schema", null); } @Test public void invalidNoContentAllowedInPrologue3() throws Exception { thrown.expect(IllegalArgumentException.class); thrown.expect(new ThrowableMessageMatcher<Throwable>(containsString("No //// CHANGE sections found"))); TableChangeParser parser = new TableChangeParser(new EmptyContentHashStrategy(), getChangeType); String fileContent = "contentNotAllowedHere\n" + "CREATE TABLE;\n" + ""; parser.value(tableChangeType, null, fileContent, objectName, "schema", null); } @Test public void noMetadataContentAllowedAfterFirstLine1() throws Exception { thrown.expect(IllegalArgumentException.class); thrown.expect(new ThrowableMessageMatcher<Throwable>(containsString("Instead, found this section in between"))); TableChangeParser parser = new TableChangeParser(new EmptyContentHashStrategy(), getChangeType); String fileContent = "\n" + "//// METADATA\n" + "//// CHANGE name=chng1\n" + "CREATE TABLE;\n" + "//// METADATA\n" + "//// CHANGE name=chng1\n" + "CREATE TABLE;\n" + ""; parser.value(tableChangeType, null, fileContent, objectName, "schema", null); } @Test public void noMetadataContentAllowedAfterFirstLine1_FineInBackwardsCompatibleMode() throws Exception { TableChangeParser parser = new TableChangeParser(new EmptyContentHashStrategy(), true, new DeployMetricsCollectorImpl(), new TextMarkupDocumentReader(false), getChangeType); String fileContent = "\n" + "//// METADATA\n" + "//// CHANGE name=chng1\n" + "CREATE TABLE;\n" + "//// METADATA\n" + "//// CHANGE name=chng1\n" + "CREATE TABLE;\n" + ""; parser.value(tableChangeType, null, fileContent, objectName, "schema", null); } @Test public void noMetadataContentAllowedAfterFirstLine2() throws Exception { thrown.expect(IllegalArgumentException.class); thrown.expect(new ThrowableMessageMatcher<Throwable>(containsString("Instead, found this section in between"))); TableChangeParser parser = new TableChangeParser(new EmptyContentHashStrategy(), getChangeType); String fileContent = "\n" + "//// CHANGE name=chng1\n" + "CREATE TABLE;\n" + "//// METADATA\n" + ""; parser.value(tableChangeType, null, fileContent, objectName, "schema", null); } @Test public void noMetadataContentAllowedAfterFirstLine2_FineInBackwardsCompatibleMode() throws Exception { TableChangeParser parser = new TableChangeParser(new EmptyContentHashStrategy(), true, new DeployMetricsCollectorImpl(), new TextMarkupDocumentReader(false), getChangeType); String fileContent = "\n" + "//// CHANGE name=chng1\n" + "CREATE TABLE;\n" + "//// METADATA\n" + ""; parser.value(tableChangeType, null, fileContent, objectName, "schema", null); } @Test public void noMetadataContentAllowedAfterFirstLine3() throws Exception { thrown.expect(IllegalArgumentException.class); thrown.expect(new ThrowableMessageMatcher<Throwable>(containsString("Instead, found this section in between"))); TableChangeParser parser = new TableChangeParser(new EmptyContentHashStrategy(), getChangeType); String fileContent = "\n" + "//// METADATA\n" + "//// METADATA\n" + "//// CHANGE name=chng1\n" + "CREATE TABLE;\n" + "//// METADATA\n" + ""; parser.value(tableChangeType, null, fileContent, objectName, "schema", null); } @Test public void noMetadataContentAllowedAfterFirstLine3_FineInBackwardsCompatibleMode() throws Exception { TableChangeParser parser = new TableChangeParser(new EmptyContentHashStrategy(), true, new DeployMetricsCollectorImpl(), new TextMarkupDocumentReader(false), getChangeType); String fileContent = "\n" + "//// METADATA\n" + "//// METADATA\n" + "//// CHANGE name=chng1\n" + "CREATE TABLE;\n" + "//// METADATA\n" + ""; parser.value(tableChangeType, null, fileContent, objectName, "schema", null); } @Test public void noContentAtAll1() throws Exception { thrown.expect(IllegalArgumentException.class); thrown.expect(new ThrowableMessageMatcher<Throwable>(containsString("No //// " + TextMarkupDocumentReader.TAG_CHANGE + " sections found; at least one is required"))); TableChangeParser parser = new TableChangeParser(new EmptyContentHashStrategy(), getChangeType); String fileContent = ""; parser.value(tableChangeType, null, fileContent, objectName, "schema", null); } @Test public void noContentAtAll2() throws Exception { thrown.expect(IllegalArgumentException.class); thrown.expect(new ThrowableMessageMatcher<Throwable>(containsString("No //// " + TextMarkupDocumentReader.TAG_CHANGE + " sections found; at least one is required"))); TableChangeParser parser = new TableChangeParser(new EmptyContentHashStrategy(), getChangeType); String fileContent = "\n" + "//// METADATA\n" + ""; parser.value(tableChangeType, null, fileContent, objectName, "schema", null); } @Test public void testDbChange() { ChangeInput change = new TableChangeParser(new EmptyContentHashStrategy(), getChangeType) .value(tableChangeType, null, "//// CHANGE name=chng5Rollback applyGrants=true INACTIVE baselinedChanges=\"a,b,c\" \nmychange\n\n// ROLLBACK-IF-ALREADY-DEPLOYED\nmyrollbackcommand\n", objectName , "schem", null).get(0); assertEquals("schem", change.getObjectKey().getSchema()); assertEquals("chng5Rollback", change.getChangeKey().getChangeName()); assertEquals("mychange\n", change.getContent()); assertEquals("mychan", change.getContentHash()); assertEquals("myrollbackcommand", change.getRollbackIfAlreadyDeployedContent()); assertEquals(UnifiedSet.newSetWith("a", "b", "c"), change.getBaselinedChanges().toSet()); assertFalse(change.isActive()); assertTrue(change.getApplyGrants()); } @Test public void testDbChange2DiffValues() { ChangeInput change = new TableChangeParser(new EmptyContentHashStrategy(), getChangeType) .value(tableChangeType, null, "//// CHANGE name=chng5Rollback INACTIVE baselinedChanges=\"a,b,c\" \nmychange\n\n// ROLLBACK-IF-ALREADY-DEPLOYED\nmyrollbackcommand\n", objectName , "schem", null).get(0); assertEquals("schem", change.getObjectKey().getSchema()); assertEquals("chng5Rollback", change.getChangeKey().getChangeName()); assertEquals("mychange\n", change.getContent()); assertEquals("mychan", change.getContentHash()); assertEquals("myrollbackcommand", change.getRollbackIfAlreadyDeployedContent()); assertEquals(UnifiedSet.newSetWith("a", "b", "c"), change.getBaselinedChanges().toSet()); assertFalse(change.isActive()); assertNull(change.getApplyGrants()); } private FileObject file(String fileName, String fileContent) { FileName fileNameObj = mock(FileName.class); when(fileNameObj.getBaseName()).thenReturn(fileName); FileObject file = mock(FileObject.class); when(file.getName()).thenReturn(fileNameObj); when(file.getStringContent()).thenReturn(fileContent); return file; } }
/* * Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.orientechnologies.orient.object.db; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.IdentityHashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import javassist.util.proxy.Proxy; import javassist.util.proxy.ProxyObject; import com.orientechnologies.orient.core.command.OCommandRequest; import com.orientechnologies.orient.core.db.ODatabaseComplex; import com.orientechnologies.orient.core.db.ODatabaseSchemaAware; import com.orientechnologies.orient.core.db.ODatabaseWrapperAbstract; import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; import com.orientechnologies.orient.core.db.object.OLazyObjectMultivalueElement; import com.orientechnologies.orient.core.db.record.OIdentifiable; import com.orientechnologies.orient.core.db.record.ORecordElement; import com.orientechnologies.orient.core.exception.OConfigurationException; import com.orientechnologies.orient.core.hook.ORecordHook; import com.orientechnologies.orient.core.hook.ORecordHook.RESULT; import com.orientechnologies.orient.core.hook.ORecordHook.TYPE; import com.orientechnologies.orient.core.id.ORID; import com.orientechnologies.orient.core.metadata.OMetadata; import com.orientechnologies.orient.core.metadata.schema.OType; import com.orientechnologies.orient.core.metadata.security.OUser; import com.orientechnologies.orient.core.query.OQuery; import com.orientechnologies.orient.core.record.ORecord; import com.orientechnologies.orient.core.record.ORecordInternal; import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.core.tx.OTransaction; import com.orientechnologies.orient.core.tx.OTransaction.TXTYPE; import com.orientechnologies.orient.core.version.ORecordVersion; import com.orientechnologies.orient.object.enhancement.OObjectProxyMethodHandler; import com.orientechnologies.orient.object.serialization.OObjectSerializerHelper; @SuppressWarnings("unchecked") public abstract class ODatabasePojoAbstract<T extends Object> extends ODatabaseWrapperAbstract<ODatabaseDocumentTx> implements ODatabaseSchemaAware<T> { protected IdentityHashMap<Object, ODocument> objects2Records = new IdentityHashMap<Object, ODocument>(); protected IdentityHashMap<ODocument, T> records2Objects = new IdentityHashMap<ODocument, T>(); protected HashMap<ORID, ODocument> rid2Records = new HashMap<ORID, ODocument>(); protected boolean retainObjects = true; public ODatabasePojoAbstract(final ODatabaseDocumentTx iDatabase) { super(iDatabase); iDatabase.setDatabaseOwner(this); } public abstract ODocument pojo2Stream(final T iPojo, final ODocument record); public abstract Object stream2pojo(final ODocument record, final Object iPojo, final String iFetchPlan); @Override public void close() { objects2Records.clear(); records2Objects.clear(); rid2Records.clear(); super.close(); } public OTransaction getTransaction() { return underlying.getTransaction(); } public ODatabaseComplex<T> begin() { return (ODatabaseComplex<T>) underlying.begin(); } public ODatabaseComplex<T> begin(final TXTYPE iType) { return (ODatabaseComplex<T>) underlying.begin(iType); } public ODatabaseComplex<T> begin(final OTransaction iTx) { return (ODatabaseComplex<T>) underlying.begin(iTx); } public ODatabaseComplex<T> commit() { clearNewEntriesFromCache(); underlying.commit(); return this; } public ODatabaseComplex<T> rollback() { clearNewEntriesFromCache(); underlying.rollback(); final Set<ORID> rids = new HashSet<ORID>(rid2Records.keySet()); ORecord<?> record; Object object; for (ORID rid : rids) { if (rid.isTemporary()) { record = rid2Records.remove(rid); if (record != null) { object = records2Objects.remove(record); if (object != null) { objects2Records.remove(object); } } } } return this; } /** * Sets as dirty a POJO. This is useful when you change the object and need to tell to the engine to treat as dirty. * * @param iPojo * User object */ public void setDirty(final Object iPojo) { if (iPojo == null) return; final ODocument record = getRecordByUserObject(iPojo, false); if (record == null) throw new OObjectNotManagedException("The object " + iPojo + " is not managed by current database"); record.setDirty(); } /** * Sets as not dirty a POJO. This is useful when you change some other object and need to tell to the engine to treat this one as * not dirty. * * @param iPojo * User object */ public void unsetDirty(final Object iPojo) { if (iPojo == null) return; final ODocument record = getRecordByUserObject(iPojo, false); if (record == null) return; record.unsetDirty(); } public void setInternal(final ATTRIBUTES attribute, final Object iValue) { underlying.setInternal(attribute, iValue); } /** * Returns the version number of the object. Version starts from 0 assigned on creation. * * @param iPojo * User object */ public int getVersion(final Object iPojo) { final ODocument record = getRecordByUserObject(iPojo, false); if (record == null) throw new OObjectNotManagedException("The object " + iPojo + " is not managed by current database"); return record.getVersion(); } /** * Returns the object unique identity. * * @param iPojo * User object */ public ORID getIdentity(final Object iPojo) { final ODocument record = getRecordByUserObject(iPojo, false); if (record == null) throw new OObjectNotManagedException("The object " + iPojo + " is not managed by current database"); return record.getIdentity(); } public OUser getUser() { return underlying.getUser(); } public OMetadata getMetadata() { return underlying.getMetadata(); } /** * Returns a wrapped OCommandRequest instance to catch the result-set by converting it before to return to the user application. */ public <RET extends OCommandRequest> RET command(final OCommandRequest iCommand) { return (RET) new OCommandSQLPojoWrapper(this, underlying.command(iCommand)); } public <RET extends List<?>> RET query(final OQuery<?> iCommand, final Object... iArgs) { checkOpeness(); convertParameters(iArgs); final List<ODocument> result = underlying.query(iCommand, iArgs); if (result == null) return null; final List<Object> resultPojo = new ArrayList<Object>(); Object obj; for (OIdentifiable doc : result) { if (doc instanceof ODocument) { // GET THE ASSOCIATED DOCUMENT if (((ODocument) doc).getClassName() == null) obj = doc; else obj = getUserObjectByRecord(((ODocument) doc), iCommand.getFetchPlan(), true); resultPojo.add(obj); } else { resultPojo.add(doc); } } return (RET) resultPojo; } public ODatabaseComplex<T> delete(final ORecordInternal<?> iRecord) { underlying.delete((ODocument) iRecord); return this; } public ODatabaseComplex<T> delete(final ORID iRID) { underlying.delete(iRID); return this; } public ODatabaseComplex<T> delete(final ORID iRID, final ORecordVersion iVersion) { underlying.delete(iRID, iVersion); return this; } public ODatabaseComplex<T> cleanOutRecord(final ORID iRID, final ORecordVersion iVersion) { underlying.cleanOutRecord(iRID, iVersion); return this; } public <DBTYPE extends ODatabaseComplex<?>> DBTYPE registerHook(final ORecordHook iHookImpl) { underlying.registerHook(iHookImpl); return (DBTYPE) this; } public <DBTYPE extends ODatabaseComplex<?>> DBTYPE registerHook(final ORecordHook iHookImpl, ORecordHook.HOOK_POSITION iPosition) { underlying.registerHook(iHookImpl, iPosition); return (DBTYPE) this; } public RESULT callbackHooks(final TYPE iType, final OIdentifiable iObject) { return underlying.callbackHooks(iType, iObject); } public Set<ORecordHook> getHooks() { return underlying.getHooks(); } public <DBTYPE extends ODatabaseComplex<?>> DBTYPE unregisterHook(final ORecordHook iHookImpl) { underlying.unregisterHook(iHookImpl); return (DBTYPE) this; } public boolean isMVCC() { return underlying.isMVCC(); } public <DBTYPE extends ODatabaseComplex<?>> DBTYPE setMVCC(final boolean iMvcc) { underlying.setMVCC(iMvcc); return (DBTYPE) this; } /** * Specifies if retain handled objects in memory or not. Setting it to false can improve performance on large inserts. Default is * enabled. * * @param iValue * True to enable, false to disable it. * @see #isRetainObjects() */ public ODatabasePojoAbstract<T> setRetainObjects(final boolean iValue) { retainObjects = iValue; return this; } /** * Returns true if current configuration retains objects, otherwise false * * @param iValue * True to enable, false to disable it. * @see #setRetainObjects(boolean) */ public boolean isRetainObjects() { return retainObjects; } public ODocument getRecordByUserObject(final Object iPojo, final boolean iCreateIfNotAvailable) { if (iPojo instanceof ODocument) return (ODocument) iPojo; else if (iPojo instanceof Proxy) return ((OObjectProxyMethodHandler) ((ProxyObject) iPojo).getHandler()).getDoc(); ODocument record = objects2Records.get(iPojo); if (record == null) { // SEARCH BY RID final ORID rid = OObjectSerializerHelper.getObjectID(this, iPojo); if (rid != null && rid.isValid()) { record = rid2Records.get(rid); if (record == null) // LOAD IT record = underlying.load(rid); } else if (iCreateIfNotAvailable) { record = underlying.newInstance(iPojo.getClass().getSimpleName()); } else { return null; } registerUserObject(iPojo, record); } return record; } public boolean existsUserObjectByRID(ORID iRID) { return rid2Records.containsKey(iRID); } public ODocument getRecordById(final ORID iRecordId) { return iRecordId.isValid() ? rid2Records.get(iRecordId) : null; } public boolean isManaged(final Object iEntity) { return objects2Records.containsKey(iEntity); } public T getUserObjectByRecord(final OIdentifiable iRecord, final String iFetchPlan) { return getUserObjectByRecord(iRecord, iFetchPlan, true); } public T getUserObjectByRecord(final OIdentifiable iRecord, final String iFetchPlan, final boolean iCreate) { if (!(iRecord instanceof ODocument)) return null; // PASS FOR rid2Records MAP BECAUSE IDENTITY COULD BE CHANGED IF WAS NEW AND IN TX ODocument record = rid2Records.get(iRecord.getIdentity()); if (record == null) record = (ODocument) iRecord; Object pojo = records2Objects.get(record); if (pojo == null && iCreate) { checkOpeness(); try { if (iRecord.getRecord().getInternalStatus() == ORecordElement.STATUS.NOT_LOADED) record = (ODocument) record.load(); pojo = newInstance(record.getClassName()); registerUserObject(pojo, record); stream2pojo(record, pojo, iFetchPlan); } catch (Exception e) { throw new OConfigurationException("Cannot retrieve pojo from record " + record, e); } } return (T) pojo; } public void attach(final Object iPojo) { checkOpeness(); final ODocument record = objects2Records.get(iPojo); if (record != null) return; if (OObjectSerializerHelper.hasObjectID(iPojo)) { } else { throw new OObjectNotDetachedException("Cannot attach a non-detached object"); } } public <RET> RET detach(final Object iPojo) { checkOpeness(); for (Field field : iPojo.getClass().getDeclaredFields()) { final Object value = OObjectSerializerHelper.getFieldValue(iPojo, field.getName()); if (value instanceof OLazyObjectMultivalueElement) ((OLazyObjectMultivalueElement<?>) value).detach(false); } return (RET) iPojo; } /** * Register a new POJO */ public void registerUserObject(final Object iObject, final ORecordInternal<?> iRecord) { if (!(iRecord instanceof ODocument)) return; final ODocument doc = (ODocument) iRecord; if (retainObjects) { if (iObject != null) { objects2Records.put(iObject, doc); records2Objects.put(doc, (T) iObject); OObjectSerializerHelper.setObjectID(iRecord.getIdentity(), iObject); OObjectSerializerHelper.setObjectVersion(iRecord.getVersion(), iObject); } final ORID rid = iRecord.getIdentity(); if (rid.isValid()) rid2Records.put(rid, doc); } } public void unregisterPojo(final T iObject, final ODocument iRecord) { if (iObject != null) objects2Records.remove(iObject); if (iRecord != null) { records2Objects.remove(iRecord); final ORID rid = iRecord.getIdentity(); if (rid.isValid()) rid2Records.remove(rid); } } protected void clearNewEntriesFromCache() { for (Iterator<Entry<ORID, ODocument>> it = rid2Records.entrySet().iterator(); it.hasNext();) { Entry<ORID, ODocument> entry = it.next(); if (entry.getKey().isNew()) { it.remove(); } } for (Iterator<Entry<Object, ODocument>> it = objects2Records.entrySet().iterator(); it.hasNext();) { Entry<Object, ODocument> entry = it.next(); if (entry.getValue().getIdentity().isNew()) { it.remove(); } } for (Iterator<Entry<ODocument, T>> it = records2Objects.entrySet().iterator(); it.hasNext();) { Entry<ODocument, T> entry = it.next(); if (entry.getKey().getIdentity().isNew()) { it.remove(); } } } /** * Converts an array of parameters: if a POJO is used, then replace it with its record id. * * @param iArgs * Array of parameters as Object * @see #convertParameter(Object) */ protected void convertParameters(final Object... iArgs) { if (iArgs == null) return; // FILTER PARAMETERS for (int i = 0; i < iArgs.length; ++i) iArgs[i] = convertParameter(iArgs[i]); } /** * Convert a parameter: if a POJO is used, then replace it with its record id. * * @param iParameter * Parameter to convert, if applicable * @see #convertParameters(Object...) */ protected Object convertParameter(final Object iParameter) { if (iParameter != null) // FILTER PARAMETERS if (iParameter instanceof Map<?, ?>) { Map<String, Object> map = (Map<String, Object>) iParameter; for (Entry<String, Object> e : map.entrySet()) { map.put(e.getKey(), convertParameter(e.getValue())); } } else if (iParameter instanceof Collection<?>) { List<Object> result = new ArrayList<Object>(); for (Object object : (Collection<Object>) iParameter) { result.add(convertParameter(object)); } return result; } else if (iParameter != null && iParameter.getClass().isEnum()) { return ((Enum<?>) iParameter).name(); } else if (iParameter != null && !OType.isSimpleType(iParameter)) { final ORID rid = getIdentity(iParameter); if (rid != null && rid.isValid()) // REPLACE OBJECT INSTANCE WITH ITS RECORD ID return rid; } return iParameter; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.yarn; import org.apache.flink.api.common.time.Deadline; import org.apache.flink.api.common.time.Time; import org.apache.flink.client.cli.CliFrontend; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.GlobalConfiguration; import org.apache.flink.configuration.JobManagerOptions; import org.apache.flink.runtime.rest.RestClient; import org.apache.flink.runtime.rest.RestClientConfiguration; import org.apache.flink.runtime.rest.handler.legacy.messages.ClusterOverviewWithVersion; import org.apache.flink.runtime.rest.messages.ClusterConfigurationInfo; import org.apache.flink.runtime.rest.messages.ClusterConfigurationInfoEntry; import org.apache.flink.runtime.rest.messages.ClusterConfigurationInfoHeaders; import org.apache.flink.runtime.rest.messages.ClusterOverviewHeaders; import org.apache.flink.runtime.rest.messages.taskmanager.TaskManagerInfo; import org.apache.flink.runtime.rest.messages.taskmanager.TaskManagersHeaders; import org.apache.flink.runtime.rest.messages.taskmanager.TaskManagersInfo; import org.apache.flink.runtime.testutils.CommonTestUtils; import org.apache.flink.test.testdata.WordCountData; import org.apache.flink.testutils.logging.TestLoggerResource; import org.apache.flink.util.ExceptionUtils; import org.apache.flink.yarn.cli.FlinkYarnSessionCli; import org.apache.flink.yarn.configuration.YarnConfigOptions; import org.apache.flink.yarn.util.TestUtils; import org.apache.flink.shaded.guava18.com.google.common.net.HostAndPort; import org.apache.commons.io.FileUtils; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.client.api.YarnClient; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.event.Level; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.time.Duration; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.EnumSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import static junit.framework.TestCase.assertTrue; import static org.apache.flink.util.Preconditions.checkState; import static org.apache.flink.yarn.util.TestUtils.getTestJarPath; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasItem; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; /** * This test starts a MiniYARNCluster with a CapacityScheduler. Is has, by default a queue called * "default". The configuration here adds another queue: "qa-team". */ public class YARNSessionCapacitySchedulerITCase extends YarnTestBase { private static final Logger LOG = LoggerFactory.getLogger(YARNSessionCapacitySchedulerITCase.class); /** RestClient to query Flink cluster. */ private static RestClient restClient; /** * ExecutorService for {@link RestClient}. * * @see #restClient */ private static ExecutorService restClientExecutor; /** Toggles checking for prohibited strings in logs after the test has run. */ private boolean checkForProhibitedLogContents = true; @Rule public final TestLoggerResource cliTestLoggerResource = new TestLoggerResource(CliFrontend.class, Level.INFO); @Rule public final TestLoggerResource yarTestLoggerResource = new TestLoggerResource(YarnClusterDescriptor.class, Level.WARN); @BeforeClass public static void setup() throws Exception { YARN_CONFIGURATION.setClass( YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class, ResourceScheduler.class); YARN_CONFIGURATION.set("yarn.scheduler.capacity.root.queues", "default,qa-team"); YARN_CONFIGURATION.setInt("yarn.scheduler.capacity.root.default.capacity", 40); YARN_CONFIGURATION.setInt("yarn.scheduler.capacity.root.qa-team.capacity", 60); YARN_CONFIGURATION.set( YarnTestBase.TEST_CLUSTER_NAME_KEY, "flink-yarn-tests-capacityscheduler"); startYARNWithConfig(YARN_CONFIGURATION); restClientExecutor = Executors.newSingleThreadExecutor(); restClient = new RestClient( RestClientConfiguration.fromConfiguration(new Configuration()), restClientExecutor); } @AfterClass public static void tearDown() throws Exception { try { YarnTestBase.teardown(); } finally { if (restClient != null) { restClient.shutdown(Time.seconds(5)); } if (restClientExecutor != null) { restClientExecutor.shutdownNow(); } } } /** * Tests that a session cluster, that uses the resources from the <i>qa-team</i> queue, can be * started from the command line. */ @Test public void testStartYarnSessionClusterInQaTeamQueue() throws Exception { runTest( () -> runWithArgs( new String[] { "-j", flinkUberjar.getAbsolutePath(), "-t", flinkLibFolder.getAbsolutePath(), "-jm", "768m", "-tm", "1024m", "-qu", "qa-team" }, "JobManager Web Interface:", null, RunTypes.YARN_SESSION, 0)); } /** * Test per-job yarn cluster * * <p>This also tests the prefixed CliFrontend options for the YARN case We also test if the * requested parallelism of 2 is passed through. The parallelism is requested at the YARN client * (-ys). */ @Test public void perJobYarnCluster() throws Exception { runTest( () -> { LOG.info("Starting perJobYarnCluster()"); File exampleJarLocation = getTestJarPath("BatchWordCount.jar"); runWithArgs( new String[] { "run", "-m", "yarn-cluster", "-yj", flinkUberjar.getAbsolutePath(), "-yt", flinkLibFolder.getAbsolutePath(), "-ys", "2", // test that the job is executed with a DOP of 2 "-yjm", "768m", "-ytm", "1024m", exampleJarLocation.getAbsolutePath() }, /* test succeeded after this string */ "Program execution finished", /* prohibited strings: (to verify the parallelism) */ // (we should see "DataSink (...) (1/2)" and "DataSink (...) (2/2)" // instead) new String[] {"DataSink \\(.*\\) \\(1/1\\) switched to FINISHED"}, RunTypes.CLI_FRONTEND, 0, cliTestLoggerResource::getMessages); LOG.info("Finished perJobYarnCluster()"); }); } /** * Test per-job yarn cluster and memory calculations for off-heap use (see FLINK-7400) with the * same job as {@link #perJobYarnCluster()}. * * <p>This ensures that with (any) pre-allocated off-heap memory by us, there is some off-heap * memory remaining for Flink's libraries. Creating task managers will thus fail if no off-heap * memory remains. */ @Test public void perJobYarnClusterOffHeap() throws Exception { runTest( () -> { LOG.info("Starting perJobYarnCluster()"); File exampleJarLocation = getTestJarPath("BatchWordCount.jar"); // set memory constraints (otherwise this is the same test as // perJobYarnCluster() above) final long taskManagerMemoryMB = 1024; runWithArgs( new String[] { "run", "-m", "yarn-cluster", "-yj", flinkUberjar.getAbsolutePath(), "-yt", flinkLibFolder.getAbsolutePath(), "-ys", "2", // test that the job is executed with a DOP of 2 "-yjm", "768m", "-ytm", taskManagerMemoryMB + "m", exampleJarLocation.getAbsolutePath() }, /* test succeeded after this string */ "Program execution finished", /* prohibited strings: (to verify the parallelism) */ // (we should see "DataSink (...) (1/2)" and "DataSink (...) (2/2)" // instead) new String[] {"DataSink \\(.*\\) \\(1/1\\) switched to FINISHED"}, RunTypes.CLI_FRONTEND, 0, cliTestLoggerResource::getMessages); LOG.info("Finished perJobYarnCluster()"); }); } /** * Starts a session cluster on YARN, and submits a streaming job. * * <p>Tests * * <ul> * <li>if a custom YARN application name can be set from the command line, * <li>if the number of TaskManager slots can be set from the command line, * <li>if dynamic properties from the command line are set, * <li>if the vcores are set correctly (FLINK-2213), * <li>if jobmanager hostname/port are shown in web interface (FLINK-1902) * </ul> * * <p><b>Hint: </b> If you think it is a good idea to add more assertions to this test, think * again! */ @Test public void testVCoresAreSetCorrectlyAndJobManagerHostnameAreShownInWebInterfaceAndDynamicPropertiesAndYarnApplicationNameAndTaskManagerSlots() throws Exception { runTest( () -> { checkForProhibitedLogContents = false; final Runner yarnSessionClusterRunner = startWithArgs( new String[] { "-j", flinkUberjar.getAbsolutePath(), "-t", flinkLibFolder.getAbsolutePath(), "-jm", "768m", "-tm", "1024m", "-s", "3", // set the slots 3 to check if the vCores are set // properly! "-nm", "customName", "-Dfancy-configuration-value=veryFancy", "-D" + YarnConfigOptions.VCORES.key() + "=2" }, "JobManager Web Interface:", RunTypes.YARN_SESSION); try { final String logs = outContent.toString(); final HostAndPort hostAndPort = parseJobManagerHostname(logs); final String host = hostAndPort.getHostText(); final int port = hostAndPort.getPort(); LOG.info("Extracted hostname:port: {}:{}", host, port); submitJob("WindowJoin.jar"); // // Assert that custom YARN application name "customName" is set // final ApplicationReport applicationReport = getOnlyApplicationReport(); assertEquals("customName", applicationReport.getName()); // // Assert the number of TaskManager slots are set // waitForTaskManagerRegistration(host, port, Duration.ofMillis(30_000)); assertNumberOfSlotsPerTask(host, port, 3); final Map<String, String> flinkConfig = getFlinkConfig(host, port); // // Assert dynamic properties // assertThat(flinkConfig, hasEntry("fancy-configuration-value", "veryFancy")); // // FLINK-2213: assert that vcores are set // assertThat(flinkConfig, hasEntry(YarnConfigOptions.VCORES.key(), "2")); // // FLINK-1902: check if jobmanager hostname is shown in web interface // assertThat(flinkConfig, hasEntry(JobManagerOptions.ADDRESS.key(), host)); } finally { yarnSessionClusterRunner.sendStop(); yarnSessionClusterRunner.join(); } }); } private static HostAndPort parseJobManagerHostname(final String logs) { final Pattern p = Pattern.compile("JobManager Web Interface: http://([a-zA-Z0-9.-]+):([0-9]+)"); final Matcher matches = p.matcher(logs); String hostname = null; String port = null; while (matches.find()) { hostname = matches.group(1).toLowerCase(); port = matches.group(2); } checkState(hostname != null, "hostname not found in log"); checkState(port != null, "port not found in log"); return HostAndPort.fromParts(hostname, Integer.parseInt(port)); } private void submitJob(final String jobFileName) throws IOException, InterruptedException { Runner jobRunner = startWithArgs( new String[] { "run", "--detached", getTestJarPath(jobFileName).getAbsolutePath() }, "Job has been submitted with JobID", RunTypes.CLI_FRONTEND); jobRunner.join(); } private static void waitForTaskManagerRegistration( final String host, final int port, final Duration waitDuration) throws Exception { CommonTestUtils.waitUntilCondition( () -> getNumberOfTaskManagers(host, port) > 0, Deadline.fromNow(waitDuration)); } private static void assertNumberOfSlotsPerTask( final String host, final int port, final int slotsNumber) throws Exception { try { CommonTestUtils.waitUntilCondition( () -> getNumberOfSlotsPerTaskManager(host, port) == slotsNumber, Deadline.fromNow(Duration.ofSeconds(30))); } catch (final TimeoutException e) { final int currentNumberOfSlots = getNumberOfSlotsPerTaskManager(host, port); fail( String.format( "Expected slots per TM to be %d, was: %d", slotsNumber, currentNumberOfSlots)); } } private static int getNumberOfTaskManagers(final String host, final int port) throws Exception { final ClusterOverviewWithVersion clusterOverviewWithVersion = restClient .sendRequest(host, port, ClusterOverviewHeaders.getInstance()) .get(30_000, TimeUnit.MILLISECONDS); return clusterOverviewWithVersion.getNumTaskManagersConnected(); } private static int getNumberOfSlotsPerTaskManager(final String host, final int port) throws Exception { final TaskManagersInfo taskManagersInfo = restClient.sendRequest(host, port, TaskManagersHeaders.getInstance()).get(); return taskManagersInfo.getTaskManagerInfos().stream() .map(TaskManagerInfo::getNumberSlots) .findFirst() .orElse(0); } private static Map<String, String> getFlinkConfig(final String host, final int port) throws Exception { final ClusterConfigurationInfo clusterConfigurationInfoEntries = restClient .sendRequest(host, port, ClusterConfigurationInfoHeaders.getInstance()) .get(); return clusterConfigurationInfoEntries.stream() .collect( Collectors.toMap( ClusterConfigurationInfoEntry::getKey, ClusterConfigurationInfoEntry::getValue)); } /** * Test deployment to non-existing queue & ensure that the system logs a WARN message for the * user. (Users had unexpected behavior of Flink on YARN because they mistyped the target queue. * With an error message, we can help users identifying the issue) */ @Test public void testNonexistingQueueWARNmessage() throws Exception { runTest( () -> { LOG.info("Starting testNonexistingQueueWARNmessage()"); try { runWithArgs( new String[] { "-j", flinkUberjar.getAbsolutePath(), "-t", flinkLibFolder.getAbsolutePath(), "-jm", "768m", "-tm", "1024m", "-qu", "doesntExist" }, "to unknown queue: doesntExist", null, RunTypes.YARN_SESSION, 1); } catch (Exception e) { assertTrue( ExceptionUtils.findThrowableWithMessage( e, "to unknown queue: doesntExist") .isPresent()); } assertThat( yarTestLoggerResource.getMessages(), hasItem( containsString( "The specified queue 'doesntExist' does not exist. Available queues"))); LOG.info("Finished testNonexistingQueueWARNmessage()"); }); } /** * Test per-job yarn cluster with the parallelism set at the CliFrontend instead of the YARN * client. */ @Test public void perJobYarnClusterWithParallelism() throws Exception { runTest( () -> { LOG.info("Starting perJobYarnClusterWithParallelism()"); File exampleJarLocation = getTestJarPath("BatchWordCount.jar"); runWithArgs( new String[] { "run", "-p", "2", // test that the job is executed with a DOP of 2 "-m", "yarn-cluster", "-yj", flinkUberjar.getAbsolutePath(), "-yt", flinkLibFolder.getAbsolutePath(), "-ys", "2", "-yjm", "768m", "-ytm", "1024m", exampleJarLocation.getAbsolutePath() }, /* test succeeded after this string */ "Program execution finished", /* prohibited strings: (we want to see "DataSink (...) (2/2) switched to FINISHED") */ new String[] {"DataSink \\(.*\\) \\(1/1\\) switched to FINISHED"}, RunTypes.CLI_FRONTEND, 0, cliTestLoggerResource::getMessages); LOG.info("Finished perJobYarnClusterWithParallelism()"); }); } /** Test a fire-and-forget job submission to a YARN cluster. */ @Test(timeout = 60000) public void testDetachedPerJobYarnCluster() throws Exception { runTest( () -> { LOG.info("Starting testDetachedPerJobYarnCluster()"); File exampleJarLocation = getTestJarPath("BatchWordCount.jar"); testDetachedPerJobYarnClusterInternal(exampleJarLocation.getAbsolutePath()); LOG.info("Finished testDetachedPerJobYarnCluster()"); }); } /** Test a fire-and-forget job submission to a YARN cluster. */ @Test(timeout = 60000) public void testDetachedPerJobYarnClusterWithStreamingJob() throws Exception { runTest( () -> { LOG.info("Starting testDetachedPerJobYarnClusterWithStreamingJob()"); File exampleJarLocation = getTestJarPath("StreamingWordCount.jar"); testDetachedPerJobYarnClusterInternal(exampleJarLocation.getAbsolutePath()); LOG.info("Finished testDetachedPerJobYarnClusterWithStreamingJob()"); }); } private void testDetachedPerJobYarnClusterInternal(String job) throws Exception { YarnClient yc = YarnClient.createYarnClient(); yc.init(YARN_CONFIGURATION); yc.start(); // get temporary folder for writing output of wordcount example File tmpOutFolder = null; try { tmpOutFolder = tmp.newFolder(); } catch (IOException e) { throw new RuntimeException(e); } // get temporary file for reading input data for wordcount example File tmpInFile; try { tmpInFile = tmp.newFile(); FileUtils.writeStringToFile(tmpInFile, WordCountData.TEXT); } catch (IOException e) { throw new RuntimeException(e); } Runner runner = startWithArgs( new String[] { "run", "-m", "yarn-cluster", "-yj", flinkUberjar.getAbsolutePath(), "-yt", flinkLibFolder.getAbsolutePath(), "-yjm", "768m", "-yD", YarnConfigOptions.APPLICATION_TAGS.key() + "=test-tag", "-ytm", "1024m", "-ys", "2", // test requesting slots from YARN. "-p", "2", "--detached", job, "--input", tmpInFile.getAbsoluteFile().toString(), "--output", tmpOutFolder.getAbsoluteFile().toString() }, "Job has been submitted with JobID", RunTypes.CLI_FRONTEND); // it should usually be 2, but on slow machines, the number varies Assert.assertTrue( "There should be at most 2 containers running", getRunningContainers() <= 2); // give the runner some time to detach for (int attempt = 0; runner.isAlive() && attempt < 5; attempt++) { try { Thread.sleep(500); } catch (InterruptedException e) { } } Assert.assertFalse("The runner should detach.", runner.isAlive()); LOG.info("CLI Frontend has returned, so the job is running"); // find out the application id and wait until it has finished. try { List<ApplicationReport> apps = yc.getApplications(EnumSet.of(YarnApplicationState.RUNNING)); ApplicationId tmpAppId; if (apps.size() == 1) { // Better method to find the right appId. But sometimes the app is shutting down // very fast // Only one running tmpAppId = apps.get(0).getApplicationId(); LOG.info("waiting for the job with appId {} to finish", tmpAppId); // wait until the app has finished while (yc.getApplications(EnumSet.of(YarnApplicationState.RUNNING)).size() > 0) { sleep(500); } } else { // get appId by finding the latest finished appid apps = yc.getApplications(); Collections.sort( apps, new Comparator<ApplicationReport>() { @Override public int compare(ApplicationReport o1, ApplicationReport o2) { return o1.getApplicationId().compareTo(o2.getApplicationId()) * -1; } }); tmpAppId = apps.get(0).getApplicationId(); LOG.info( "Selected {} as the last appId from {}", tmpAppId, Arrays.toString(apps.toArray())); } final ApplicationId id = tmpAppId; // now it has finished. // check the output files. File[] listOfOutputFiles = tmpOutFolder.listFiles(); Assert.assertNotNull("Taskmanager output not found", listOfOutputFiles); LOG.info("The job has finished. TaskManager output files found in {}", tmpOutFolder); // read all output files in output folder to one output string String content = ""; for (File f : listOfOutputFiles) { if (f.isFile()) { content += FileUtils.readFileToString(f) + "\n"; } } // String content = FileUtils.readFileToString(taskmanagerOut); // check for some of the wordcount outputs. Assert.assertTrue( "Expected string 'da 5' or '(all,2)' not found in string '" + content + "'", content.contains("da 5") || content.contains("(da,5)") || content.contains("(all,2)")); Assert.assertTrue( "Expected string 'der 29' or '(mind,1)' not found in string'" + content + "'", content.contains("der 29") || content.contains("(der,29)") || content.contains("(mind,1)")); // check if the heap size for the TaskManager was set correctly File jobmanagerLog = TestUtils.findFile( "..", new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.contains("jobmanager.log") && dir.getAbsolutePath().contains(id.toString()); } }); Assert.assertNotNull("Unable to locate JobManager log", jobmanagerLog); content = FileUtils.readFileToString(jobmanagerLog); String expected = "Starting TaskManagers"; Assert.assertTrue( "Expected string '" + expected + "' not found in JobManager log: '" + jobmanagerLog + "'", content.contains(expected)); expected = " (2/2) (attempt #0) with attempt id "; Assert.assertTrue( "Expected string '" + expected + "' not found in JobManager log." + "This string checks that the job has been started with a parallelism of 2. Log contents: '" + jobmanagerLog + "'", content.contains(expected)); // make sure the detached app is really finished. LOG.info("Checking again that app has finished"); ApplicationReport rep; do { sleep(500); rep = yc.getApplicationReport(id); LOG.info("Got report {}", rep); } while (rep.getYarnApplicationState() == YarnApplicationState.RUNNING); verifyApplicationTags(rep); } finally { // cleanup the yarn-properties file String confDirPath = System.getenv("FLINK_CONF_DIR"); File configDirectory = new File(confDirPath); LOG.info( "testDetachedPerJobYarnClusterInternal: Using configuration directory " + configDirectory.getAbsolutePath()); // load the configuration LOG.info("testDetachedPerJobYarnClusterInternal: Trying to load configuration file"); Configuration configuration = GlobalConfiguration.loadConfiguration(configDirectory.getAbsolutePath()); try { File yarnPropertiesFile = FlinkYarnSessionCli.getYarnPropertiesLocation( configuration.getValue(YarnConfigOptions.PROPERTIES_FILE_LOCATION)); if (yarnPropertiesFile.exists()) { LOG.info( "testDetachedPerJobYarnClusterInternal: Cleaning up temporary Yarn address reference: {}", yarnPropertiesFile.getAbsolutePath()); yarnPropertiesFile.delete(); } } catch (Exception e) { LOG.warn( "testDetachedPerJobYarnClusterInternal: Exception while deleting the JobManager address file", e); } try { LOG.info("testDetachedPerJobYarnClusterInternal: Closing the yarn client"); yc.stop(); } catch (Exception e) { LOG.warn( "testDetachedPerJobYarnClusterInternal: Exception while close the yarn client", e); } } } /** * Ensures that the YARN application tags were set properly. * * <p>Since YARN application tags were only added in Hadoop 2.4, but Flink still supports Hadoop * 2.3, reflection is required to invoke the methods. If the method does not exist, this test * passes. */ private void verifyApplicationTags(final ApplicationReport report) throws InvocationTargetException, IllegalAccessException { final Method applicationTagsMethod; Class<ApplicationReport> clazz = ApplicationReport.class; try { // this method is only supported by Hadoop 2.4.0 onwards applicationTagsMethod = clazz.getMethod("getApplicationTags"); } catch (NoSuchMethodException e) { // only verify the tags if the method exists return; } @SuppressWarnings("unchecked") Set<String> applicationTags = (Set<String>) applicationTagsMethod.invoke(report); assertEquals(Collections.singleton("test-tag"), applicationTags); } @After public void checkForProhibitedLogContents() { if (checkForProhibitedLogContents) { ensureNoProhibitedStringInLogFiles(PROHIBITED_STRINGS, WHITELISTED_STRINGS); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.test.api.query; import javax.jcr.RepositoryException; import javax.jcr.Node; import javax.jcr.ItemExistsException; import javax.jcr.PathNotFoundException; import javax.jcr.Repository; import javax.jcr.Session; import javax.jcr.lock.LockException; import javax.jcr.version.VersionException; import javax.jcr.nodetype.ConstraintViolationException; import javax.jcr.nodetype.NoSuchNodeTypeException; import javax.jcr.query.Query; import org.apache.jackrabbit.test.AbstractJCRTest; import org.apache.jackrabbit.test.NotExecutableException; /** * Tests the method {@link javax.jcr.query.Query#storeAsNode(String)}. * */ public class SaveTest extends AbstractJCRTest { /** Simple XPath statement for test cases */ private String statement; protected void setUp() throws Exception { super.setUp(); statement = "//*[@jcr:primaryType='" + ntBase + "']"; } /** * Stores a {@link javax.jcr.query.Query#XPATH} query at: * <code>testRoot + "/" + nodeName1</code>. * @throws NotExecutableException if nt:query is not supported. */ public void testSave() throws RepositoryException, NotExecutableException { checkNtQuery(); Query query = superuser.getWorkspace().getQueryManager().createQuery(statement, Query.XPATH); query.storeAsNode(testRoot + "/" + nodeName1); assertTrue("Node has not been stored", testRootNode.hasNode(nodeName1)); Node queryNode = testRootNode.getNode(nodeName1); assertTrue("Query node is not of type nt:query", queryNode.isNodeType(ntQuery)); Query query2 = superuser.getWorkspace().getQueryManager().getQuery(queryNode); assertEquals("Persisted query does not match initial query.", query.getStatement(), query2.getStatement()); } /** * Tests if an {@link javax.jcr.ItemExistsException} is thrown when a query * is stored on an existing node and same name siblings are not allowed. * @throws NotExecutableException if nt:query is not supported. */ public void testItemExistsException() throws RepositoryException, NotExecutableException { checkNtQuery(); Query query = superuser.getWorkspace().getQueryManager().createQuery(statement, Query.XPATH); Node qNode = query.storeAsNode(testRoot + "/" + nodeName1); // create another one query = superuser.getWorkspace().getQueryManager().createQuery(statement, Query.XPATH); try { query.storeAsNode(testRoot + "/" + nodeName1); if (!qNode.getDefinition().allowsSameNameSiblings()) { // must throw if same name siblings are not allowed fail("Query.storeAsNode() did not throw ItemExistsException"); } } catch (ItemExistsException e) { if (qNode.getDefinition().allowsSameNameSiblings()) { fail("Query.storeAsNode() must not throw ItemExistsException " + "when same name siblings are allowed"); } else { // expected behaviour } } } /** * Tests if a {@link javax.jcr.PathNotFoundException} is thrown when a query * is stored to a non existent path. * @throws NotExecutableException if nt:query is not supported. */ public void testPathNotFoundException() throws RepositoryException, NotExecutableException { checkNtQuery(); Query query = superuser.getWorkspace().getQueryManager().createQuery(statement, Query.XPATH); try { query.storeAsNode(testRoot + "/" + nodeName1 + "/" + nodeName1); fail("Query.storeAsNode() must throw PathNotFoundException on invalid path"); } catch (PathNotFoundException e) { // expected behaviour } } /** * Tests if a {@link javax.jcr.version.VersionException} is thrown when a * query is stored under a checked in node. * <p> * The tests creates a node under <code>testRoot</code> with name * <code>nodeName1</code> and adds a mix:versionable mixin if the node is * not already versionable. * Then the test tries to store a query as <code>nodeName2</code> under node * <code>nodeName1</code>. * @throws NotExecutableException if nt:query is not supported. */ public void testVersionException() throws RepositoryException, NotExecutableException { checkNtQuery(); // check if repository supports versioning if (!isSupported(Repository.OPTION_VERSIONING_SUPPORTED)) { throw new NotExecutableException(); } Query query = superuser.getWorkspace().getQueryManager().createQuery(statement, Query.XPATH); // create a node that is versionable Node versionable = testRootNode.addNode(nodeName1, testNodeType); // or try to make it versionable if it is not ensureMixinType(versionable, mixVersionable); testRootNode.getSession().save(); versionable.checkin(); try { query.storeAsNode(testRoot + "/" + nodeName1 + "/" + nodeName2); fail("Query.storeAsNode() must throw VersionException, parent node is checked in."); } catch (VersionException e) { // expected behaviour } } /** * Tests if a {@link javax.jcr.nodetype.ConstraintViolationException} is * thrown if a query is stored under a node which does not allow child nodes. * <p> * The test creates a node <code>nodeName1</code> of type <code>testNodeType</code> * under <code>testRoot</code>. Then the test tries to store a query as * <code>nodeName2</code> under <code>nodeName1</code>. * @throws NotExecutableException if nt:query is not supported. */ public void testConstraintViolationException() throws RepositoryException, NotExecutableException { checkNtQuery(); Query query = superuser.getWorkspace().getQueryManager().createQuery(statement, Query.XPATH); testRootNode.addNode(nodeName1, testNodeTypeNoChildren); try { query.storeAsNode(testRoot + "/" + nodeName1 + "/" + nodeName2); superuser.save(); fail("Query.storeAsNode() must throw ConstraintViolationException, parent node does not allow child nodes."); } catch (ConstraintViolationException e) { // expected behaviour } } /** * Tests if a {@link javax.jcr.lock.LockException} is thrown if a query is * stored under a node locked by another <code>Session</code>. * <p> * The test creates a node <code>nodeName1</code> of type <code>testNodeType</code> * under <code>testRoot</code> and locks the node with the superuser session. * Then the test tries to store a query as <code>nodeName2</code> under * <code>nodeName1</code> with the readWrite <code>Session</code>. * @throws NotExecutableException if nt:query is not supported. */ public void testLockException() throws RepositoryException, NotExecutableException { checkNtQuery(); // check if repository supports locking if (!isSupported(Repository.OPTION_LOCKING_SUPPORTED)) { throw new NotExecutableException(); } // create a node that is lockable Node lockable = testRootNode.addNode(nodeName1, testNodeType); // or try to make it lockable if it is not ensureMixinType(lockable, mixLockable); testRootNode.getSession().save(); lockable.lock(false, true); Session readWrite = getHelper().getReadWriteSession(); try { Query query = readWrite.getWorkspace().getQueryManager().createQuery(statement, Query.XPATH); query.storeAsNode(testRoot + "/" + nodeName1 + "/" + nodeName2); fail("Query.storeAsNode() must throw LockException, parent node is locked."); } catch (LockException e) { // expected behaviour } finally { readWrite.logout(); lockable.unlock(); } } /** * Tests if the a {@link javax.jcr.RepositoryException} is thrown when * an malformed path is passed in {@link javax.jcr.query.Query#storeAsNode(String)}. * @throws NotExecutableException if nt:query is not supported. */ public void testRepositoryException() throws RepositoryException, NotExecutableException { checkNtQuery(); Query query = superuser.getWorkspace().getQueryManager().createQuery(statement, Query.XPATH); try { query.storeAsNode(testRoot + "/invalid[42]"); fail("Query.storeAsNode() must throw RepositoryException on malformed path."); } catch (RepositoryException e) { // expected behaviour } } //-------------------------------< internal >------------------------------- /** * Checks if the repository supports the nt:query node type otherwise throws * a <code>NotExecutableException</code>. * * @throws NotExecutableException if nt:query is not supported. */ private void checkNtQuery() throws RepositoryException, NotExecutableException { try { superuser.getWorkspace().getNodeTypeManager().getNodeType(ntQuery); } catch (NoSuchNodeTypeException e) { // not supported throw new NotExecutableException("repository does not support nt:query"); } } }
/* * [The "BSD license"] * Copyright (c) 2011, abego Software GmbH, Germany (http://www.abego.org) * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * 3. Neither the name of the abego Software GmbH nor the names of its * contributors may be used to endorse or promote products derived from this * software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package org.abego.treelayout.util; import java.util.Iterator; import java.util.List; import org.abego.treelayout.util.DefaultTreeForTreeLayout; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.*; /** * * @author Udo Borkowski (ub@abego.org) * * */ public class DefaultTreeForTreeLayoutTest { String root = "root"; String n1 = "n1"; String n2 = "n2"; DefaultTreeForTreeLayout<String> tree; @Before public void setUp() { tree = new DefaultTreeForTreeLayout<String>(root); tree.addChild(root, n1); tree.addChild(root, n2); } @Test public void testGetRoot() throws Exception { assertEquals(root, tree.getRoot()); } @Test public void testIsLeaf() throws Exception { assertEquals(false, tree.isLeaf(root)); assertEquals(true, tree.isLeaf(n1)); assertEquals(true, tree.isLeaf(n2)); } @Test public void testGetChildrenList() throws Exception { List<String> children = tree.getChildrenList(root); assertEquals(2, children.size()); assertEquals(n1, children.get(0)); assertEquals(n2, children.get(1)); } @Test public void testGetChildrenList_leaf() throws Exception { List<String> children = tree.getChildrenList(n1); assertEquals(0, children.size()); } @Test public void testIsChildOfParent() throws Exception { assertEquals(false, tree.isChildOfParent(root, n1)); assertEquals(true, tree.isChildOfParent(n1, root)); assertEquals(true, tree.isChildOfParent(n2, root)); assertEquals(false, tree.isChildOfParent(n1, n2)); } @Test public void testGetChildren() throws Exception { Iterable<String> children = tree.getChildren(root); Iterator<String> iter = children.iterator(); assertEquals(n1, iter.next()); assertEquals(n2, iter.next()); assertFalse(iter.hasNext()); } @Test public void testGetChildren_leaf() throws Exception { // getChildren is not required to handle leaf nodes, so it may throw an // exception here. If it does not throw an exception it must return an // empty iterator. try { Iterable<String> iter = tree.getChildren(n1); assertFalse(iter.iterator().hasNext()); } catch (Exception ex) { // OK to fail (see above) } } @Test public void testGetChildrenReverse() throws Exception { Iterable<String> children = tree.getChildrenReverse(root); Iterator<String> iter = children.iterator(); assertEquals(n2, iter.next()); assertEquals(n1, iter.next()); assertFalse(iter.hasNext()); } @Test public void testGetChildrenReverse_leaf() throws Exception { // getChildrenReverse is not required to handle leaf nodes, so it may // throw an exception here. If it does not throw an exception it must // return an empty iterator. try { Iterable<String> iter = tree.getChildrenReverse(n1); assertFalse(iter.iterator().hasNext()); } catch (Exception ex) { // OK to fail (see above) } } @Test public void testGetFirstChild() throws Exception { assertEquals(n1, tree.getFirstChild(root)); } @Test public void testGetFirstChild_leaf() throws Exception { try { tree.getFirstChild(n1); fail("exception expected"); } catch (Exception ex) { // any exception will do } } @Test public void testGetLastChild() throws Exception { assertEquals(n2, tree.getLastChild(root)); } @Test public void testGetLastChild_leaf() throws Exception { try { tree.getLastChild(n1); fail("exception expected"); } catch (Exception ex) { // any exception will do } } @Test public void testHasNode() throws Exception { assertTrue(tree.hasNode(root)); assertTrue(tree.hasNode(n1)); assertTrue(tree.hasNode(n2)); assertFalse(tree.hasNode("wrong node")); } @Test public void testAddChild() throws Exception { DefaultTreeForTreeLayout<String> tree = new DefaultTreeForTreeLayout<String>( "ROOT"); tree.addChild("ROOT", "N1"); tree.addChild("ROOT", "N2"); tree.addChild("N1", "N1.1"); assertEquals("N1", tree.getChildrenList("ROOT").get(0)); assertEquals("N1.1", tree.getChildrenList("N1").get(0)); assertEquals("N2", tree.getChildrenList("ROOT").get(1)); } @Test public void testAddChildren() throws Exception { DefaultTreeForTreeLayout<String> tree = new DefaultTreeForTreeLayout<String>( "ROOT"); tree.addChildren("ROOT", "N1", "N2"); tree.addChildren("N1", "N1.1"); assertEquals("N1", tree.getChildrenList("ROOT").get(0)); assertEquals("N1.1", tree.getChildrenList("N1").get(0)); assertEquals("N2", tree.getChildrenList("ROOT").get(1)); } @Test public void testAddChild_alreadyInTree() throws Exception { DefaultTreeForTreeLayout<String> tree = new DefaultTreeForTreeLayout<String>( "ROOT"); try { tree.addChild("ROOT", "ROOT"); fail("exception expected"); } catch (Exception ex) { assertEquals("node is already in the tree", ex.getMessage()); } tree.addChild("ROOT", "N1"); try { tree.addChild("ROOT", "N1"); fail("exception expected"); } catch (Exception ex) { assertEquals("node is already in the tree", ex.getMessage()); } } @Test public void testAddChild_parentNotInTree() throws Exception { DefaultTreeForTreeLayout<String> tree = new DefaultTreeForTreeLayout<String>( "ROOT"); try { tree.addChild("N1", "N1.1"); fail("exception expected"); } catch (Exception ex) { assertEquals("parentNode is not in the tree", ex.getMessage()); } } }
package com.mobgen.halo.android.content.edition; import android.os.Bundle; import com.mobgen.halo.android.content.edition.batch.BatchBundleizeHelper; import com.mobgen.halo.android.content.models.BatchOperationResults; import com.mobgen.halo.android.content.models.BatchOperations; import com.mobgen.halo.android.content.models.HaloContentInstance; import com.mobgen.halo.android.framework.common.exceptions.HaloParsingException; import com.mobgen.halo.android.framework.common.helpers.subscription.ISubscription; import com.mobgen.halo.android.framework.toolbox.bus.Event; import com.mobgen.halo.android.framework.toolbox.bus.EventId; import com.mobgen.halo.android.framework.toolbox.data.CallbackV2; import com.mobgen.halo.android.sdk.api.Halo; import com.mobgen.halo.android.testing.CallbackFlag; import com.mobgen.halo.android.testing.HaloRobolectricTest; import com.mobgen.halo.android.testing.MockServer; import org.junit.Test; import java.io.IOException; import static com.mobgen.halo.android.content.edition.HaloContentEditApi.BATCH_FINISHED_EVENT; import static com.mobgen.halo.android.content.mock.fixtures.ServerFixtures.CONTENT_BATCH_API; import static com.mobgen.halo.android.content.mock.fixtures.ServerFixtures.CONTENT_EDIT_API; import static com.mobgen.halo.android.content.mock.fixtures.ServerFixtures.enqueueServerError; import static com.mobgen.halo.android.content.mock.fixtures.ServerFixtures.enqueueServerFile; import static com.mobgen.halo.android.content.mock.instrumentation.HaloEditContentInstruments.givenABatchContentSuccessCallback; import static com.mobgen.halo.android.content.mock.instrumentation.HaloEditContentInstruments.givenABatchContentSuccessScheduledCallback; import static com.mobgen.halo.android.content.mock.instrumentation.HaloEditContentInstruments.givenABatchOperationsEventSubscription; import static com.mobgen.halo.android.content.mock.instrumentation.HaloEditContentInstruments.givenABatchOperationsEventSubscriptionScheduled; import static com.mobgen.halo.android.content.mock.instrumentation.HaloEditContentInstruments.givenANewHaloContentEditOptions; import static com.mobgen.halo.android.content.mock.instrumentation.HaloEditContentInstruments.givenAUpdateHaloContentEditOptions; import static com.mobgen.halo.android.content.mock.instrumentation.HaloMock.givenADefaultHalo; import static com.mobgen.halo.android.testing.CallbackFlag.newCallbackFlag; import static org.assertj.core.api.Java6Assertions.assertThat; import static com.mobgen.halo.android.content.mock.instrumentation.HaloEditContentInstruments.givenAContentSuccessCallback; import static com.mobgen.halo.android.content.mock.instrumentation.HaloEditContentInstruments.givenAContentAuthenticationErrorCallback; public class HaloContentEditApiTest extends HaloRobolectricTest { private MockServer mMockServer; private static Halo mHalo; private CallbackFlag mCallbackFlag; @Override public void onStart() throws IOException, HaloParsingException { mMockServer = MockServer.create(); mHalo = givenADefaultHalo(mMockServer.start()); mCallbackFlag = newCallbackFlag(); } @Override public void onDestroy() throws IOException { mHalo.uninstall(); mMockServer.shutdown(); } @Test public void thatCanAddGeneralContentInstance() throws IOException { enqueueServerFile(mMockServer, CONTENT_EDIT_API); CallbackV2<HaloContentInstance> callback = givenAContentSuccessCallback(mCallbackFlag, "5874c5f06a3a0d1e00c8039d"); HaloContentEditApi.with(mHalo) .addContent(givenANewHaloContentEditOptions()) .execute(callback); assertThat(mCallbackFlag.isFlagged()).isTrue(); } @Test public void thatCanHandleAutenticationExceptionAfterAddingContent() throws IOException { enqueueServerError(mMockServer, 403); CallbackV2<HaloContentInstance> callback = givenAContentAuthenticationErrorCallback(mCallbackFlag); HaloContentEditApi.with(mHalo) .addContent(givenANewHaloContentEditOptions()) .execute(callback); assertThat(mCallbackFlag.isFlagged()).isTrue(); } @Test public void thatCanUpdateGeneralContentInstance() throws IOException { enqueueServerFile(mMockServer, CONTENT_EDIT_API); CallbackV2<HaloContentInstance> callback = givenAContentSuccessCallback(mCallbackFlag, "5874c5f06a3a0d1e00c8039d"); HaloContentEditApi.with(mHalo) .updateContent(givenAUpdateHaloContentEditOptions()) .execute(callback); assertThat(mCallbackFlag.isFlagged()).isTrue(); } @Test public void thatCanHandleAutenticationExceptionAfterUpdatingContent() throws IOException { enqueueServerError(mMockServer, 403); CallbackV2<HaloContentInstance> callback = givenAContentAuthenticationErrorCallback(mCallbackFlag); HaloContentEditApi.with(mHalo) .updateContent(givenANewHaloContentEditOptions()) .execute(callback); assertThat(mCallbackFlag.isFlagged()).isTrue(); } @Test public void thatCanDeleteGeneralContentInstance() throws IOException { enqueueServerFile(mMockServer, CONTENT_EDIT_API); CallbackV2<HaloContentInstance> callback = givenAContentSuccessCallback(mCallbackFlag, "5874c5f06a3a0d1e00c8039d"); HaloContentEditApi.with(mHalo) .deleteContent(givenAUpdateHaloContentEditOptions()) .execute(callback); assertThat(mCallbackFlag.isFlagged()).isTrue(); } @Test public void thatCanHandleAutenticationExceptionAfterDeletingContent() throws IOException { enqueueServerError(mMockServer, 403); CallbackV2<HaloContentInstance> callback = givenAContentAuthenticationErrorCallback(mCallbackFlag); HaloContentEditApi.with(mHalo) .deleteContent(givenANewHaloContentEditOptions()) .execute(callback); assertThat(mCallbackFlag.isFlagged()).isTrue(); } @Test public void thatCanPerfomABatchOperation() throws IOException { enqueueServerFile(mMockServer, CONTENT_BATCH_API); CallbackV2<BatchOperationResults> callback = givenABatchContentSuccessCallback(mCallbackFlag, false); HaloContentInstance instanceDelete = givenANewHaloContentEditOptions(); HaloContentInstance instanceUpdate = givenANewHaloContentEditOptions(); HaloContentInstance instanceCreate = givenAUpdateHaloContentEditOptions(); BatchOperations batchOperations = BatchOperations.builder() .delete(instanceDelete) .create(instanceCreate) .update(instanceUpdate) .build(); HaloContentEditApi.with(mHalo) .batch(batchOperations, true) .execute(callback); assertThat(mCallbackFlag.isFlagged()).isTrue(); } @Test public void thatCanReceiveResultOperationByEventOnSubscription() throws IOException { enqueueServerFile(mMockServer, CONTENT_BATCH_API); CallbackV2<BatchOperationResults> callback = givenABatchContentSuccessCallback(mCallbackFlag, true); HaloContentInstance instanceDelete = givenANewHaloContentEditOptions(); HaloContentInstance instanceUpdate = givenANewHaloContentEditOptions(); HaloContentInstance instanceCreate = givenAUpdateHaloContentEditOptions(); BatchOperations batchOperations = BatchOperations.builder() .delete(instanceDelete) .create(instanceCreate) .update(instanceUpdate) .build(); ISubscription eventSubscription = givenABatchOperationsEventSubscription(mHalo, true); HaloContentEditApi.with(mHalo) .batch(batchOperations, true) .execute(callback); assertThat(eventSubscription).isNotNull(); assertThat(mCallbackFlag.isFlagged()).isTrue(); } @Test public void thatCanReceiveConflictsOperationByEventOnSubscription() throws IOException { HaloContentInstance instanceDelete = givenAUpdateHaloContentEditOptions(); HaloContentInstance instanceUpdate = givenAUpdateHaloContentEditOptions(); HaloContentInstance instanceCreate = givenANewHaloContentEditOptions(); BatchOperations conflictOperations = BatchOperations.builder() .delete(instanceDelete) .create(instanceCreate) .update(instanceUpdate) .build(); ISubscription eventSubscription = givenABatchOperationsEventSubscription(mHalo, false); Bundle batchConflict = BatchBundleizeHelper.bundleizeBatchOperations(conflictOperations); Halo.instance().framework().emit(new Event(EventId.create(BATCH_FINISHED_EVENT), batchConflict)); assertThat(eventSubscription).isNotNull(); assertThat(mCallbackFlag.isFlagged()).isFalse(); } @Test public void thatCanScheduleABatchOperation() throws IOException { enqueueServerError(mMockServer, 500); enqueueServerFile(mMockServer, CONTENT_BATCH_API); CallbackV2<BatchOperationResults> callback = givenABatchContentSuccessScheduledCallback(mCallbackFlag); HaloContentInstance instanceDelete = givenAUpdateHaloContentEditOptions(); HaloContentInstance instanceUpdate = givenAUpdateHaloContentEditOptions(); HaloContentInstance instanceCreate = givenANewHaloContentEditOptions(); BatchOperations batchOperations = BatchOperations.builder() .delete(instanceDelete) .create(instanceCreate) .update(instanceUpdate) .build(); ISubscription eventSubscription = givenABatchOperationsEventSubscriptionScheduled(mHalo); HaloContentEditApi.with(mHalo) .batch(batchOperations, true) .execute(callback); assertThat(eventSubscription).isNotNull(); assertThat(mCallbackFlag.isFlagged()).isTrue(); } @Test public void thatCanPerfomAllBatchOperationsAfterSchedule() throws IOException { enqueueServerError(mMockServer, 500); enqueueServerFile(mMockServer, CONTENT_BATCH_API); CallbackV2<BatchOperationResults> callback = givenABatchContentSuccessScheduledCallback(mCallbackFlag); HaloContentInstance instanceDelete = givenAUpdateHaloContentEditOptions(); HaloContentInstance instanceUpdate = givenAUpdateHaloContentEditOptions(); HaloContentInstance instanceCreate = givenANewHaloContentEditOptions(); BatchOperations batchOperations = BatchOperations.builder() .delete(instanceDelete) .create(instanceCreate) .update(instanceUpdate) .truncate(instanceCreate) .createOrUpdate(instanceCreate) .build(); ISubscription eventSubscription = givenABatchOperationsEventSubscriptionScheduled(mHalo); HaloContentEditApi.with(mHalo) .batch(batchOperations, true) .execute(callback); assertThat(eventSubscription).isNotNull(); assertThat(mCallbackFlag.isFlagged()).isTrue(); } }
/* * Copyright 2010 Chad Retz * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.statmantis.model; import java.io.Serializable; import java.util.List; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.JoinTable; import javax.persistence.ManyToMany; import javax.persistence.ManyToOne; import javax.persistence.Table; import org.hibernate.annotations.CollectionOfElements; import org.statmantis.annotation.XmlInternalProperty; /** * Info for a team in a {@link Game} * * @author Chad Retz */ @Entity @Table(name = "TeamGameInfo") @SuppressWarnings("serial") public class TeamGameInfo implements Serializable { @Id @GeneratedValue(strategy = GenerationType.SEQUENCE) @Column(name = "TeamGameInfoId", unique = true, nullable = false) private long teamGameInfoId; @Column(name = "GameNumber", nullable = false) private int gameNumber; @Column(name = "Score", nullable = false) private int score; @CollectionOfElements @JoinTable(name = "TeamGameInfoLineScore", joinColumns = @JoinColumn(name = "TeamId")) @Column(name = "InningScore", nullable = false) private List<Integer> inningScores; @Column(name = "AtBats", nullable = false) private int atBats; @Column(name = "Hits", nullable = false) private int hits; @Column(name = "Doubles", nullable = false) private int doubles; @Column(name = "Triples", nullable = false) private int triples; @Column(name = "Homeruns", nullable = false) private int homeruns; @Column(name = "Rbi", nullable = false) private int rbi; @Column(name = "SacrificeHits", nullable = false) private int sacrificeHits; @Column(name = "SacrificeFlies", nullable = false) private int sacrificeFlies; @Column(name = "HitByPitch", nullable = false) private int hitByPitch; @Column(name = "Walks", nullable = false) private int walks; @Column(name = "IntentionalWalks", nullable = false) private int intentionalWalks; @Column(name = "Strikeouts", nullable = false) private int strikeouts; @Column(name = "StolenBases", nullable = false) private int stolenBases; @Column(name = "CaughtStealing", nullable = false) private int caughtStealing; @Column(name = "GroundedIntoDoublePlays", nullable = false) private int groundedIntoDoublePlays; @Column(name = "OpponentCatcherInterferences", nullable = false) private int opponentCatcherInterferences; @Column(name = "LeftOnBase", nullable = false) private int leftOnBase; @Column(name = "PitchersUsed", nullable = false) private int pitchersUsed; @Column(name = "IndividualEarnedRuns", nullable = false) private int individualEarnedRuns; @Column(name = "TeamEarnedRuns", nullable = false) private int teamEarnedRuns; @Column(name = "WildPitches", nullable = false) private int wildPitches; @Column(name = "Balks", nullable = false) private int balks; @Column(name = "Putouts", nullable = false) private int putouts; @Column(name = "Assists", nullable = false) private int assists; @Column(name = "Errors", nullable = false) private int errors; @Column(name = "PassedBalls", nullable = false) private int passedBalls; @Column(name = "DoublePlays", nullable = false) private int doublePlays; @Column(name = "TriplePlays", nullable = false) private int triplePlays; @XmlInternalProperty("personId") @ManyToOne @JoinColumn(name = "ManagerId", nullable = false) private Person manager; @XmlInternalProperty("gamePlayerId") @ManyToOne @JoinColumn(name = "DecidingPitcherId", nullable = false) private GamePlayer decidingPitcher; @Column(name = "Winner", nullable = false) private boolean winner; @XmlInternalProperty("gamePlayerId") @ManyToOne @JoinColumn(name = "SavingPitcherId") private GamePlayer savingPitcher; @XmlInternalProperty("gamePlayerId") @ManyToOne @JoinColumn(name = "WinningBatterId") private GamePlayer winningBatter; @ManyToMany @JoinTable(name = "TeamGamePlayer", joinColumns = @JoinColumn(name = "TeamGameInfoId"), inverseJoinColumns = @JoinColumn(name = "GamePlayerId")) private List<GamePlayer> gamePlayers; public long getTeamGameInfoId() { return teamGameInfoId; } public void setTeamGameInfoId(long teamGameInfoId) { this.teamGameInfoId = teamGameInfoId; } public int getGameNumber() { return gameNumber; } public void setGameNumber(int gameNumber) { this.gameNumber = gameNumber; } public int getScore() { return score; } public void setScore(int score) { this.score = score; } public List<Integer> getInningScores() { return inningScores; } public void setInningScores(List<Integer> inningScores) { this.inningScores = inningScores; } public int getAtBats() { return atBats; } public void setAtBats(int atBats) { this.atBats = atBats; } public int getHits() { return hits; } public void setHits(int hits) { this.hits = hits; } public int getDoubles() { return doubles; } public void setDoubles(int doubles) { this.doubles = doubles; } public int getTriples() { return triples; } public void setTriples(int triples) { this.triples = triples; } public int getHomeruns() { return homeruns; } public void setHomeruns(int homeruns) { this.homeruns = homeruns; } public int getRbi() { return rbi; } public void setRbi(int rbi) { this.rbi = rbi; } public int getSacrificeHits() { return sacrificeHits; } public void setSacrificeHits(int sacrificeHits) { this.sacrificeHits = sacrificeHits; } public int getSacrificeFlies() { return sacrificeFlies; } public void setSacrificeFlies(int sacrificeFlies) { this.sacrificeFlies = sacrificeFlies; } public int getHitByPitch() { return hitByPitch; } public void setHitByPitch(int hitByPitch) { this.hitByPitch = hitByPitch; } public int getWalks() { return walks; } public void setWalks(int walks) { this.walks = walks; } public int getIntentionalWalks() { return intentionalWalks; } public void setIntentionalWalks(int intentionalWalks) { this.intentionalWalks = intentionalWalks; } public int getStrikeouts() { return strikeouts; } public void setStrikeouts(int strikeouts) { this.strikeouts = strikeouts; } public int getStolenBases() { return stolenBases; } public void setStolenBases(int stolenBases) { this.stolenBases = stolenBases; } public int getCaughtStealing() { return caughtStealing; } public void setCaughtStealing(int caughtStealing) { this.caughtStealing = caughtStealing; } public int getGroundedIntoDoublePlays() { return groundedIntoDoublePlays; } public void setGroundedIntoDoublePlays(int groundedIntoDoublePlays) { this.groundedIntoDoublePlays = groundedIntoDoublePlays; } public int getOpponentCatcherInterferences() { return opponentCatcherInterferences; } public void setOpponentCatcherInterferences(int opponentCatcherInterferences) { this.opponentCatcherInterferences = opponentCatcherInterferences; } public int getLeftOnBase() { return leftOnBase; } public void setLeftOnBase(int leftOnBase) { this.leftOnBase = leftOnBase; } public int getPitchersUsed() { return pitchersUsed; } public void setPitchersUsed(int pitchersUsed) { this.pitchersUsed = pitchersUsed; } public int getIndividualEarnedRuns() { return individualEarnedRuns; } public void setIndividualEarnedRuns(int individualEarnedRuns) { this.individualEarnedRuns = individualEarnedRuns; } public int getTeamEarnedRuns() { return teamEarnedRuns; } public void setTeamEarnedRuns(int teamEarnedRuns) { this.teamEarnedRuns = teamEarnedRuns; } public int getWildPitches() { return wildPitches; } public void setWildPitches(int wildPitches) { this.wildPitches = wildPitches; } public int getBalks() { return balks; } public void setBalks(int balks) { this.balks = balks; } public int getPutouts() { return putouts; } public void setPutouts(int putouts) { this.putouts = putouts; } public int getAssists() { return assists; } public void setAssists(int assists) { this.assists = assists; } public int getErrors() { return errors; } public void setErrors(int errors) { this.errors = errors; } public int getPassedBalls() { return passedBalls; } public void setPassedBalls(int passedBalls) { this.passedBalls = passedBalls; } public int getDoublePlays() { return doublePlays; } public void setDoublePlays(int doublePlays) { this.doublePlays = doublePlays; } public int getTriplePlays() { return triplePlays; } public void setTriplePlays(int triplePlays) { this.triplePlays = triplePlays; } public Person getManager() { return manager; } public void setManager(Person manager) { this.manager = manager; } public GamePlayer getDecidingPitcher() { return decidingPitcher; } public void setDecidingPitcher(GamePlayer decidingPitcher) { this.decidingPitcher = decidingPitcher; } public boolean isWinner() { return winner; } public void setWinner(boolean winner) { this.winner = winner; } public GamePlayer getSavingPitcher() { return savingPitcher; } public void setSavingPitcher(GamePlayer savingPitcher) { this.savingPitcher = savingPitcher; } public GamePlayer getWinningBatter() { return winningBatter; } public void setWinningBatter(GamePlayer winningBatter) { this.winningBatter = winningBatter; } public List<GamePlayer> getGamePlayers() { return gamePlayers; } public void setGamePlayers(List<GamePlayer> gamePlayers) { this.gamePlayers = gamePlayers; } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.util; import com.intellij.openapi.roots.FileIndexFacade; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.search.GlobalSearchScope; import consulo.annotation.access.RequiredReadAction; import consulo.util.collection.Maps; import org.jetbrains.annotations.NonNls; import javax.annotation.Nonnull; import java.util.*; public class PsiSuperMethodUtil { private PsiSuperMethodUtil() { } @RequiredReadAction public static PsiMethod findConstructorInSuper(PsiMethod constructor) { return findConstructorInSuper(constructor, new HashSet<PsiMethod>()); } @RequiredReadAction public static PsiMethod findConstructorInSuper(PsiMethod constructor, Set<PsiMethod> visited) { if(visited.contains(constructor)) { return null; } visited.add(constructor); final PsiCodeBlock body = constructor.getBody(); if(body != null) { PsiStatement[] statements = body.getStatements(); if(statements.length > 0) { PsiElement firstChild = statements[0].getFirstChild(); if(firstChild instanceof PsiMethodCallExpression) { PsiReferenceExpression methodExpr = ((PsiMethodCallExpression) firstChild).getMethodExpression(); @NonNls final String text = methodExpr.getText(); if(text.equals("super")) { PsiElement superConstructor = methodExpr.resolve(); if(superConstructor instanceof PsiMethod) { return (PsiMethod) superConstructor; } } else if(text.equals("this")) { final PsiElement resolved = methodExpr.resolve(); if(resolved instanceof PsiMethod) { return findConstructorInSuper((PsiMethod) resolved, visited); } return null; } } } } PsiClass containingClass = constructor.getContainingClass(); if(containingClass != null) { PsiClass superClass = containingClass.getSuperClass(); if(superClass != null) { MethodSignature defConstructor = MethodSignatureUtil.createMethodSignature(superClass.getName(), PsiType.EMPTY_ARRAY, PsiTypeParameter.EMPTY_ARRAY, PsiSubstitutor.EMPTY, true); return MethodSignatureUtil.findMethodBySignature(superClass, defConstructor, false); } } return null; } public static boolean isSuperMethod(@Nonnull PsiMethod method, @Nonnull PsiMethod superMethod) { HierarchicalMethodSignature signature = method.getHierarchicalMethodSignature(); List<HierarchicalMethodSignature> superSignatures = signature.getSuperSignatures(); for(int i = 0, superSignaturesSize = superSignatures.size(); i < superSignaturesSize; i++) { HierarchicalMethodSignature supsig = superSignatures.get(i); PsiMethod supsigme = supsig.getMethod(); if(superMethod.equals(supsigme) || isSuperMethod(supsigme, superMethod)) { return true; } } return false; } @Nonnull public static PsiSubstitutor obtainFinalSubstitutor(@Nonnull PsiClass superClass, @Nonnull PsiSubstitutor superSubstitutor, @Nonnull PsiSubstitutor derivedSubstitutor, boolean inRawContext) { if(inRawContext) { Set<PsiTypeParameter> typeParams = superSubstitutor.getSubstitutionMap().keySet(); PsiElementFactory factory = JavaPsiFacade.getElementFactory(superClass.getProject()); superSubstitutor = factory.createRawSubstitutor(derivedSubstitutor, typeParams.toArray(new PsiTypeParameter[typeParams.size()])); } Map<PsiTypeParameter, PsiType> map = null; for(PsiTypeParameter typeParameter : PsiUtil.typeParametersIterable(superClass)) { PsiType type = superSubstitutor.substitute(typeParameter); final PsiType t = derivedSubstitutor.substitute(type); if(map == null) { map = new HashMap<PsiTypeParameter, PsiType>(); } map.put(typeParameter, t); } return map == null ? PsiSubstitutor.EMPTY : JavaPsiFacade.getInstance(superClass.getProject()).getElementFactory().createSubstitutor(map); } @Nonnull public static Map<MethodSignature, Set<PsiMethod>> collectOverrideEquivalents(@Nonnull PsiClass aClass) { final Map<MethodSignature, Set<PsiMethod>> overrideEquivalent = Maps.newHashMap(MethodSignatureUtil.METHOD_PARAMETERS_ERASURE_EQUALITY); final GlobalSearchScope resolveScope = aClass.getResolveScope(); PsiClass[] supers = aClass.getSupers(); for(int i = 0; i < supers.length; i++) { PsiClass superClass = supers[i]; boolean subType = false; for(int j = 0; j < supers.length; j++) { if(j == i) { continue; } subType |= supers[j].isInheritor(supers[i], true); } if(subType) { continue; } final PsiSubstitutor superClassSubstitutor = TypeConversionUtil.getSuperClassSubstitutor(superClass, aClass, PsiSubstitutor.EMPTY); for(HierarchicalMethodSignature hms : superClass.getVisibleSignatures()) { PsiMethod method = hms.getMethod(); if(MethodSignatureUtil.findMethodBySignature(aClass, method.getSignature(superClassSubstitutor), false) != null) { continue; } final PsiClass containingClass = correctClassByScope(method.getContainingClass(), resolveScope); if(containingClass == null) { continue; } method = containingClass.findMethodBySignature(method, false); if(method == null) { continue; } final PsiSubstitutor containingClassSubstitutor = TypeConversionUtil.getClassSubstitutor(containingClass, aClass, PsiSubstitutor.EMPTY); if(containingClassSubstitutor == null) { continue; } final PsiSubstitutor finalSubstitutor = obtainFinalSubstitutor(containingClass, containingClassSubstitutor, hms.getSubstitutor(), false); final MethodSignatureBackedByPsiMethod signature = MethodSignatureBackedByPsiMethod.create(method, finalSubstitutor, false); Set<PsiMethod> methods = overrideEquivalent.get(signature); if(methods == null) { methods = new LinkedHashSet<PsiMethod>(); overrideEquivalent.put(signature, methods); } methods.add(method); } } return overrideEquivalent; } @javax.annotation.Nullable public static PsiClass correctClassByScope(PsiClass psiClass, final GlobalSearchScope resolveScope) { if(psiClass == null) { return null; } String qualifiedName = psiClass.getQualifiedName(); if(qualifiedName == null) { return psiClass; } PsiFile file = psiClass.getContainingFile(); if(file == null || !file.getViewProvider().isPhysical()) { return psiClass; } final VirtualFile vFile = file.getVirtualFile(); if(vFile == null) { return psiClass; } final FileIndexFacade index = FileIndexFacade.getInstance(file.getProject()); if(!index.isInSource(vFile) && !index.isInLibrarySource(vFile) && !index.isInLibraryClasses(vFile)) { return psiClass; } return JavaPsiFacade.getInstance(psiClass.getProject()).findClass(qualifiedName, resolveScope); } }
/* * Copyright 2013 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package feign.ribbon; import static com.netflix.config.ConfigurationManager.getConfigInstance; import static org.assertj.core.api.Assertions.assertThat; import static org.hamcrest.core.IsEqual.equalTo; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; import java.net.URI; import java.net.URL; import org.junit.After; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; import com.netflix.client.config.CommonClientConfigKey; import com.netflix.client.config.IClientConfig; import okhttp3.mockwebserver.MockResponse; import okhttp3.mockwebserver.SocketPolicy; import okhttp3.mockwebserver.MockWebServer; import feign.Client; import feign.Feign; import feign.Param; import feign.Request; import feign.RequestLine; import feign.RetryableException; import feign.Retryer; import feign.client.TrustingSSLSocketFactory; public class RibbonClientTest { @Rule public final TestName testName = new TestName(); @Rule public final MockWebServer server1 = new MockWebServer(); @Rule public final MockWebServer server2 = new MockWebServer(); private static String oldRetryConfig = null; private static final String SUN_RETRY_PROPERTY = "sun.net.http.retryPost"; @BeforeClass public static void disableSunRetry() throws Exception { // The Sun HTTP Client retries all requests once on an IOException, which makes testing retry code harder than would // be ideal. We can only disable it for post, so lets at least do that. oldRetryConfig = System.setProperty(SUN_RETRY_PROPERTY, "false"); } @AfterClass public static void resetSunRetry() throws Exception { if (oldRetryConfig == null) { System.clearProperty(SUN_RETRY_PROPERTY); } else { System.setProperty(SUN_RETRY_PROPERTY, oldRetryConfig); } } static String hostAndPort(URL url) { // our build slaves have underscores in their hostnames which aren't permitted by ribbon return "localhost:" + url.getPort(); } @Test public void loadBalancingDefaultPolicyRoundRobin() throws IOException, InterruptedException { server1.enqueue(new MockResponse().setBody("success!")); server2.enqueue(new MockResponse().setBody("success!")); getConfigInstance().setProperty(serverListKey(), hostAndPort(server1.url("").url()) + "," + hostAndPort( server2.url("").url())); TestInterface api = Feign.builder().client(RibbonClient.create()) .target(TestInterface.class, "http://" + client()); api.post(); api.post(); assertEquals(1, server1.getRequestCount()); assertEquals(1, server2.getRequestCount()); // TODO: verify ribbon stats match // assertEquals(target.lb().getLoadBalancerStats().getSingleServerStat()) } @Test public void ioExceptionRetry() throws IOException, InterruptedException { server1.enqueue(new MockResponse().setSocketPolicy(SocketPolicy.DISCONNECT_AT_START)); server1.enqueue(new MockResponse().setBody("success!")); getConfigInstance().setProperty(serverListKey(), hostAndPort(server1.url("").url())); TestInterface api = Feign.builder().client(RibbonClient.create()) .target(TestInterface.class, "http://" + client()); api.post(); assertEquals(2, server1.getRequestCount()); // TODO: verify ribbon stats match // assertEquals(target.lb().getLoadBalancerStats().getSingleServerStat()) } @Test public void ioExceptionFailsAfterTooManyFailures() throws IOException, InterruptedException { server1.enqueue(new MockResponse().setSocketPolicy(SocketPolicy.DISCONNECT_AT_START)); getConfigInstance().setProperty(serverListKey(), hostAndPort(server1.url("").url())); TestInterface api = Feign.builder().client(RibbonClient.create()).retryer(Retryer.NEVER_RETRY) .target(TestInterface.class, "http://" + client()); try { api.post(); fail("No exception thrown"); } catch (RetryableException ignored) { } //TODO: why are these retrying? assertThat(server1.getRequestCount()).isGreaterThanOrEqualTo(1); // TODO: verify ribbon stats match // assertEquals(target.lb().getLoadBalancerStats().getSingleServerStat()) } @Test public void ribbonRetryConfigurationOnSameServer() throws IOException, InterruptedException { server1.enqueue(new MockResponse().setSocketPolicy(SocketPolicy.DISCONNECT_AT_START)); server1.enqueue(new MockResponse().setSocketPolicy(SocketPolicy.DISCONNECT_AT_START)); server2.enqueue(new MockResponse().setSocketPolicy(SocketPolicy.DISCONNECT_AT_START)); server2.enqueue(new MockResponse().setSocketPolicy(SocketPolicy.DISCONNECT_AT_START)); getConfigInstance().setProperty(serverListKey(), hostAndPort(server1.url("").url()) + "," + hostAndPort(server2.url("").url())); getConfigInstance().setProperty(client() + ".ribbon.MaxAutoRetries", 1); TestInterface api = Feign.builder().client(RibbonClient.create()).retryer(Retryer.NEVER_RETRY) .target(TestInterface.class, "http://" + client()); try { api.post(); fail("No exception thrown"); } catch (RetryableException ignored) { } assertTrue(server1.getRequestCount() >= 2 || server2.getRequestCount() >= 2); assertThat(server1.getRequestCount() + server2.getRequestCount()).isGreaterThanOrEqualTo(2); // TODO: verify ribbon stats match // assertEquals(target.lb().getLoadBalancerStats().getSingleServerStat()) } @Test public void ribbonRetryConfigurationOnMultipleServers() throws IOException, InterruptedException { server1.enqueue(new MockResponse().setSocketPolicy(SocketPolicy.DISCONNECT_AT_START)); server1.enqueue(new MockResponse().setSocketPolicy(SocketPolicy.DISCONNECT_AT_START)); server2.enqueue(new MockResponse().setSocketPolicy(SocketPolicy.DISCONNECT_AT_START)); server2.enqueue(new MockResponse().setSocketPolicy(SocketPolicy.DISCONNECT_AT_START)); getConfigInstance().setProperty(serverListKey(), hostAndPort(server1.url("").url()) + "," + hostAndPort(server2.url("").url())); getConfigInstance().setProperty(client() + ".ribbon.MaxAutoRetriesNextServer", 1); TestInterface api = Feign.builder().client(RibbonClient.create()).retryer(Retryer.NEVER_RETRY) .target(TestInterface.class, "http://" + client()); try { api.post(); fail("No exception thrown"); } catch (RetryableException ignored) { } assertThat(server1.getRequestCount()).isGreaterThanOrEqualTo(1); assertThat(server1.getRequestCount()).isGreaterThanOrEqualTo(1); // TODO: verify ribbon stats match // assertEquals(target.lb().getLoadBalancerStats().getSingleServerStat()) } /* This test-case replicates a bug that occurs when using RibbonRequest with a query string. The querystrings would not be URL-encoded, leading to invalid HTTP-requests if the query string contained invalid characters (ex. space). */ @Test public void urlEncodeQueryStringParameters() throws IOException, InterruptedException { String queryStringValue = "some string with space"; String expectedQueryStringValue = "some+string+with+space"; String expectedRequestLine = String.format("GET /?a=%s HTTP/1.1", expectedQueryStringValue); server1.enqueue(new MockResponse().setBody("success!")); getConfigInstance().setProperty(serverListKey(), hostAndPort(server1.url("").url())); TestInterface api = Feign.builder().client(RibbonClient.create()) .target(TestInterface.class, "http://" + client()); api.getWithQueryParameters(queryStringValue); final String recordedRequestLine = server1.takeRequest().getRequestLine(); assertEquals(recordedRequestLine, expectedRequestLine); } @Test public void testHTTPSViaRibbon() { Client trustSSLSockets = new Client.Default(TrustingSSLSocketFactory.get(), null); server1.useHttps(TrustingSSLSocketFactory.get("localhost"), false); server1.enqueue(new MockResponse().setBody("success!")); getConfigInstance().setProperty(serverListKey(), hostAndPort(server1.url("").url())); TestInterface api = Feign.builder().client(RibbonClient.builder().delegate(trustSSLSockets).build()) .target(TestInterface.class, "https://" + client()); api.post(); assertEquals(1, server1.getRequestCount()); } @Test public void ioExceptionRetryWithBuilder() throws IOException, InterruptedException { server1.enqueue(new MockResponse().setSocketPolicy(SocketPolicy.DISCONNECT_AT_START)); server1.enqueue(new MockResponse().setBody("success!")); getConfigInstance().setProperty(serverListKey(), hostAndPort(server1.url("").url())); TestInterface api = Feign.builder().client(RibbonClient.create()) .target(TestInterface.class, "http://" + client()); api.post(); assertEquals(server1.getRequestCount(), 2); // TODO: verify ribbon stats match // assertEquals(target.lb().getLoadBalancerStats().getSingleServerStat()) } @Test public void testFeignOptionsClientConfig() { Request.Options options = new Request.Options(1111, 22222); IClientConfig config = new RibbonClient.FeignOptionsClientConfig(options); assertThat(config.get(CommonClientConfigKey.ConnectTimeout), equalTo(options.connectTimeoutMillis())); assertThat(config.get(CommonClientConfigKey.ReadTimeout), equalTo(options.readTimeoutMillis())); assertEquals(2, config.getProperties().size()); } @Test public void testCleanUrlWithMatchingHostAndPart() throws IOException { URI uri = RibbonClient.cleanUrl("http://questions/questions/answer/123", "questions"); assertEquals("http:///questions/answer/123", uri.toString()); } @Test public void testCleanUrl() throws IOException { URI uri = RibbonClient.cleanUrl("http://myservice/questions/answer/123", "myservice"); assertEquals("http:///questions/answer/123", uri.toString()); } private String client() { return testName.getMethodName(); } private String serverListKey() { return client() + ".ribbon.listOfServers"; } @After public void clearServerList() { getConfigInstance().clearProperty(serverListKey()); } interface TestInterface { @RequestLine("POST /") void post(); @RequestLine("GET /?a={a}") void getWithQueryParameters(@Param("a") String a); } }
/* * Copyright 2017 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.domain.materials.git; import com.thoughtworks.go.config.materials.git.GitMaterialConfig; import com.thoughtworks.go.domain.materials.Modification; import com.thoughtworks.go.domain.materials.ModifiedAction; import com.thoughtworks.go.domain.materials.ModifiedFile; import com.thoughtworks.go.domain.materials.TestSubprocessExecutionContext; import com.thoughtworks.go.domain.materials.mercurial.StringRevision; import com.thoughtworks.go.helper.GitSubmoduleRepos; import com.thoughtworks.go.helper.TestRepo; import com.thoughtworks.go.mail.SysOutStreamConsumer; import com.thoughtworks.go.matchers.RegexMatcher; import com.thoughtworks.go.util.DateUtils; import com.thoughtworks.go.util.FileUtil; import com.thoughtworks.go.util.ReflectionUtil; import com.thoughtworks.go.util.TestFileUtil; import com.thoughtworks.go.util.command.*; import org.apache.commons.io.FileUtils; import org.hamcrest.Description; import org.hamcrest.Matchers; import org.hamcrest.TypeSafeMatcher; import org.hamcrest.core.Is; import org.junit.*; import org.junit.rules.ExpectedException; import org.mockito.Mock; import java.io.File; import java.io.IOException; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; import static com.thoughtworks.go.domain.materials.git.GitTestRepo.*; import static com.thoughtworks.go.util.DateUtils.parseRFC822; import static com.thoughtworks.go.util.FileUtil.readLines; import static com.thoughtworks.go.util.command.ProcessOutputStreamConsumer.inMemoryConsumer; import static org.apache.commons.io.filefilter.FileFilterUtils.*; import static org.apache.commons.lang.time.DateUtils.addDays; import static org.apache.commons.lang.time.DateUtils.setMilliseconds; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.core.IsNot.not; import static org.hamcrest.core.StringContains.containsString; import static org.hamcrest.core.StringEndsWith.endsWith; import static org.junit.Assert.*; import static org.mockito.MockitoAnnotations.initMocks; public class GitCommandTest { private static final String BRANCH = "foo"; private static final String SUBMODULE = "submodule-1"; private GitCommand git; private String repoUrl; private File repoLocation; private static final Date THREE_DAYS_FROM_NOW = setMilliseconds(addDays(new Date(), 3), 0); private GitTestRepo gitRepo; private File gitLocalRepoDir; private GitTestRepo gitFooBranchBundle; @Mock private TestSubprocessExecutionContext testSubprocessExecutionContext; @Rule public final ExpectedException expectedException = ExpectedException.none(); @Before public void setup() throws Exception { gitRepo = new GitTestRepo(); gitLocalRepoDir = createTempWorkingDirectory(); git = new GitCommand(null, gitLocalRepoDir, GitMaterialConfig.DEFAULT_BRANCH, false, new HashMap<>()); repoLocation = gitRepo.gitRepository(); repoUrl = gitRepo.projectRepositoryUrl(); InMemoryStreamConsumer outputStreamConsumer = inMemoryConsumer(); int returnCode = git.cloneWithNoCheckout(outputStreamConsumer, repoUrl); if (returnCode > 0) { fail(outputStreamConsumer.getAllOutput()); } gitFooBranchBundle = GitTestRepo.testRepoAtBranch(GIT_FOO_BRANCH_BUNDLE, BRANCH); initMocks(this); } @After public void teardown() throws Exception { TestRepo.internalTearDown(); } @Test public void shouldDefaultToMasterIfNoBranchIsSpecified(){ assertThat(ReflectionUtil.getField(new GitCommand(null, gitLocalRepoDir, null, false, new HashMap<>()), "branch"), Is.is("master")); assertThat(ReflectionUtil.getField(new GitCommand(null, gitLocalRepoDir, " ", false, new HashMap<>()), "branch"), Is.is("master")); assertThat(ReflectionUtil.getField(new GitCommand(null, gitLocalRepoDir, "master", false, new HashMap<>()), "branch"), Is.is("master")); assertThat(ReflectionUtil.getField(new GitCommand(null, gitLocalRepoDir, "branch", false, new HashMap<>()), "branch"), Is.is("branch")); } @Test public void shouldCloneFromMasterWhenNoBranchIsSpecified(){ InMemoryStreamConsumer output = inMemoryConsumer(); git.clone(output, repoUrl); CommandLine commandLine = CommandLine.createCommandLine("git").withEncoding("UTF-8").withArg("branch").withWorkingDir(gitLocalRepoDir); commandLine.run(output, ""); assertThat(output.getStdOut(), Is.is("* master")); } @Test public void freshCloneDoesNotHaveWorkingCopy() { assertWorkingCopyNotCheckedOut(); } @Test public void freshCloneOnAgentSideShouldHaveWorkingCopyCheckedOut() { InMemoryStreamConsumer output = inMemoryConsumer(); File workingDir = createTempWorkingDirectory(); GitCommand git = new GitCommand(null, workingDir, GitMaterialConfig.DEFAULT_BRANCH, false, new HashMap<>()); git.clone(output, repoUrl); assertWorkingCopyCheckedOut(workingDir); } @Test public void fullCloneIsNotShallow() { assertThat(git.isShallow(), is(false)); } @Test public void shouldOnlyCloneLimitedRevisionsIfDepthSpecified() throws Exception { FileUtil.deleteFolder(this.gitLocalRepoDir); git.clone(inMemoryConsumer(), repoUrl, 2); assertThat(git.isShallow(), is(true)); assertThat(git.containsRevisionInBranch(GitTestRepo.REVISION_4), is(true)); assertThat(git.containsRevisionInBranch(GitTestRepo.REVISION_3), is(true)); // can not assert on revision_2, because on old version of git (1.7) // depth '2' actually clone 3 revisions assertThat(git.containsRevisionInBranch(GitTestRepo.REVISION_1), is(false)); assertThat(git.containsRevisionInBranch(GitTestRepo.REVISION_0), is(false)); } @Test public void unshallowALocalRepoWithArbitraryDepth() throws Exception { FileUtil.deleteFolder(this.gitLocalRepoDir); git.clone(inMemoryConsumer(), repoUrl, 2); git.unshallow(inMemoryConsumer(), 3); assertThat(git.isShallow(), is(true)); assertThat(git.containsRevisionInBranch(GitTestRepo.REVISION_2), is(true)); // can not assert on revision_1, because on old version of git (1.7) // depth '3' actually clone 4 revisions assertThat(git.containsRevisionInBranch(GitTestRepo.REVISION_0), is(false)); git.unshallow(inMemoryConsumer(), Integer.MAX_VALUE); assertThat(git.isShallow(), is(false)); assertThat(git.containsRevisionInBranch(GitTestRepo.REVISION_0), is(true)); } @Test public void unshallowShouldNotResultInWorkingCopyCheckout() { FileUtil.deleteFolder(this.gitLocalRepoDir); git.cloneWithNoCheckout(inMemoryConsumer(), repoUrl); git.unshallow(inMemoryConsumer(), 3); assertWorkingCopyNotCheckedOut(); } @Test public void shouldCloneFromBranchWhenMaterialPointsToABranch() throws IOException { gitLocalRepoDir = createTempWorkingDirectory(); git = new GitCommand(null, gitLocalRepoDir, BRANCH, false, new HashMap<>()); GitCommand branchedGit = new GitCommand(null, gitLocalRepoDir, BRANCH, false, new HashMap<>()); branchedGit.clone(inMemoryConsumer(), gitFooBranchBundle.projectRepositoryUrl()); InMemoryStreamConsumer output = inMemoryConsumer(); CommandLine.createCommandLine("git").withEncoding("UTF-8").withArg("branch").withWorkingDir(gitLocalRepoDir).run(output, ""); assertThat(output.getStdOut(), Is.is("* foo")); } @Test public void shouldGetTheCurrentBranchForTheCheckedOutRepo(){ gitLocalRepoDir = createTempWorkingDirectory(); CommandLine gitCloneCommand = CommandLine.createCommandLine("git").withEncoding("UTF-8").withArg("clone"); gitCloneCommand.withArg("--branch=" + BRANCH).withArg(new UrlArgument(gitFooBranchBundle.projectRepositoryUrl())).withArg(gitLocalRepoDir.getAbsolutePath()); gitCloneCommand.run(inMemoryConsumer(), ""); git = new GitCommand(null, gitLocalRepoDir, BRANCH, false, new HashMap<>()); assertThat(git.getCurrentBranch(), Is.is(BRANCH)); } @Test public void shouldBombForFetchFailure() throws IOException { executeOnGitRepo("git", "remote", "rm", "origin"); executeOnGitRepo("git", "remote", "add", "origin", "git://user:secret@foo.bar/baz"); try { InMemoryStreamConsumer output = new InMemoryStreamConsumer(); git.fetch(output); fail("should have failed for non 0 return code. Git output was:\n " + output.getAllOutput()); } catch (Exception e) { assertThat(e.getMessage(), is(String.format("git fetch failed for [git://user:******@foo.bar/baz]"))); } } @Test public void shouldBombForResettingFailure() throws IOException { try { git.resetWorkingDir(new SysOutStreamConsumer(), new StringRevision("abcdef")); fail("should have failed for non 0 return code"); } catch (Exception e) { assertThat(e.getMessage(), is(String.format("git reset failed for [%s]", gitLocalRepoDir))); } } @Test public void shouldOutputSubmoduleRevisionsAfterUpdate() throws Exception { GitSubmoduleRepos submoduleRepos = new GitSubmoduleRepos(); submoduleRepos.addSubmodule(SUBMODULE, "sub1"); GitCommand gitWithSubmodule = new GitCommand(null, createTempWorkingDirectory(), GitMaterialConfig.DEFAULT_BRANCH, false, new HashMap<>()); gitWithSubmodule.clone(inMemoryConsumer(), submoduleRepos.mainRepo().getUrl()); InMemoryStreamConsumer outConsumer = new InMemoryStreamConsumer(); gitWithSubmodule.resetWorkingDir(outConsumer, new StringRevision("HEAD")); Matcher matcher = Pattern.compile(".*^\\s[a-f0-9A-F]{40} sub1 \\(heads/master\\)$.*", Pattern.MULTILINE | Pattern.DOTALL).matcher(outConsumer.getAllOutput()); assertThat(matcher.matches(), is(true)); } @Test public void shouldBombForResetWorkingDirWhenSubmoduleUpdateFails() throws Exception { GitSubmoduleRepos submoduleRepos = new GitSubmoduleRepos(); File submoduleFolder = submoduleRepos.addSubmodule(SUBMODULE, "sub1"); GitCommand gitWithSubmodule = new GitCommand(null, createTempWorkingDirectory(), GitMaterialConfig.DEFAULT_BRANCH, false, new HashMap<>()); gitWithSubmodule.clone(inMemoryConsumer(), submoduleRepos.mainRepo().getUrl()); FileUtils.deleteDirectory(submoduleFolder); assertThat(submoduleFolder.exists(), is(false)); try { gitWithSubmodule.resetWorkingDir(new SysOutStreamConsumer(), new StringRevision("HEAD")); fail("should have failed for non 0 return code"); } catch (Exception e) { assertThat(e.getMessage(), new RegexMatcher(String.format("[Cc]lone of '%s' into submodule path '((.*)[\\/])?sub1' failed", Pattern.quote(submoduleFolder.getAbsolutePath())))); } } @Test public void shouldRetrieveLatestModification() throws Exception { Modification mod = git.latestModification().get(0); assertThat(mod.getUserName(), is("Chris Turner <cturner@thoughtworks.com>")); assertThat(mod.getComment(), is("Added 'run-till-file-exists' ant target")); assertThat(mod.getModifiedTime(), is(parseRFC822("Fri, 12 Feb 2010 16:12:04 -0800"))); assertThat(mod.getRevision(), is("5def073a425dfe239aabd4bf8039ffe3b0e8856b")); List<ModifiedFile> files = mod.getModifiedFiles(); assertThat(files.size(), is(1)); assertThat(files.get(0).getFileName(), is("build.xml")); assertThat(files.get(0).getAction(), Matchers.is(ModifiedAction.modified)); } @Test public void retrieveLatestModificationShouldNotResultInWorkingCopyCheckOut() throws Exception{ git.latestModification(); assertWorkingCopyNotCheckedOut(); } @Test public void getModificationsSinceShouldNotResultInWorkingCopyCheckOut() throws Exception{ git.modificationsSince(GitTestRepo.REVISION_2); assertWorkingCopyNotCheckedOut(); } @Test public void shouldReturnNothingForModificationsSinceIfARebasedCommitSHAIsPassed() throws IOException { GitTestRepo remoteRepo = new GitTestRepo(); executeOnGitRepo("git", "remote", "rm", "origin"); executeOnGitRepo("git", "remote", "add", "origin", remoteRepo.projectRepositoryUrl()); GitCommand command = new GitCommand(remoteRepo.createMaterial().getFingerprint(), gitLocalRepoDir, "master", false, new HashMap<>()); Modification modification = remoteRepo.addFileAndAmend("foo", "amendedCommit").get(0); assertThat(command.modificationsSince(new StringRevision(modification.getRevision())).isEmpty(), is(true)); } @Test public void shouldReturnTheRebasedCommitForModificationsSinceTheRevisionBeforeRebase() throws IOException { GitTestRepo remoteRepo = new GitTestRepo(); executeOnGitRepo("git", "remote", "rm", "origin"); executeOnGitRepo("git", "remote", "add", "origin", remoteRepo.projectRepositoryUrl()); GitCommand command = new GitCommand(remoteRepo.createMaterial().getFingerprint(), gitLocalRepoDir, "master", false, new HashMap<>()); Modification modification = remoteRepo.addFileAndAmend("foo", "amendedCommit").get(0); assertThat(command.modificationsSince(REVISION_4).get(0), is(modification)); } @Test(expected = CommandLineException.class) public void shouldBombIfCheckedForModificationsSinceWithASHAThatNoLongerExists() throws IOException { GitTestRepo remoteRepo = new GitTestRepo(); executeOnGitRepo("git", "remote", "rm", "origin"); executeOnGitRepo("git", "remote", "add", "origin", remoteRepo.projectRepositoryUrl()); GitCommand command = new GitCommand(remoteRepo.createMaterial().getFingerprint(), gitLocalRepoDir, "master", false, new HashMap<>()); Modification modification = remoteRepo.checkInOneFile("foo", "Adding a commit").get(0); remoteRepo.addFileAndAmend("bar", "amendedCommit"); command.modificationsSince(new StringRevision(modification.getRevision())); } @Test(expected = CommandLineException.class) public void shouldBombIfCheckedForModificationsSinceWithANonExistentRef() throws IOException { GitTestRepo remoteRepo = new GitTestRepo(); executeOnGitRepo("git", "remote", "rm", "origin"); executeOnGitRepo("git", "remote", "add", "origin", remoteRepo.projectRepositoryUrl()); GitCommand command = new GitCommand(remoteRepo.createMaterial().getFingerprint(), gitLocalRepoDir, "non-existent-branch", false, new HashMap<>()); Modification modification = remoteRepo.checkInOneFile("foo", "Adding a commit").get(0); command.modificationsSince(new StringRevision(modification.getRevision())); } @Test public void shouldBombWhileRetrievingLatestModificationFromANonExistentRef() throws IOException { expectedException.expect(CommandLineException.class); expectedException.expectMessage("ambiguous argument 'origin/non-existent-branch': unknown revision or path not in the working tree."); GitTestRepo remoteRepo = new GitTestRepo(); executeOnGitRepo("git", "remote", "rm", "origin"); executeOnGitRepo("git", "remote", "add", "origin", remoteRepo.projectRepositoryUrl()); GitCommand command = new GitCommand(remoteRepo.createMaterial().getFingerprint(), gitLocalRepoDir, "non-existent-branch", false, new HashMap<>()); command.latestModification(); } @Test public void shouldReturnTrueIfTheGivenBranchContainsTheRevision() { assertThat(git.containsRevisionInBranch(REVISION_4), is(true)); } @Test public void shouldReturnFalseIfTheGivenBranchDoesNotContainTheRevision() { assertThat(git.containsRevisionInBranch(NON_EXISTENT_REVISION), is(false)); } @Test public void shouldRetrieveFilenameForInitialRevision() throws IOException { GitTestRepo testRepo = new GitTestRepo(GitTestRepo.GIT_SUBMODULE_REF_BUNDLE); GitCommand gitCommand = new GitCommand(null, testRepo.gitRepository(), GitMaterialConfig.DEFAULT_BRANCH, false, new HashMap<>()); Modification modification = gitCommand.latestModification().get(0); assertThat(modification.getModifiedFiles().size(), is(1)); assertThat(modification.getModifiedFiles().get(0).getFileName(), is("remote.txt")); } @Test public void shouldRetrieveLatestModificationFromBranch() throws Exception { GitTestRepo branchedRepo = GitTestRepo.testRepoAtBranch(GIT_FOO_BRANCH_BUNDLE, BRANCH); GitCommand branchedGit = new GitCommand(null, createTempWorkingDirectory(), BRANCH, false, new HashMap<>()); branchedGit.clone(inMemoryConsumer(), branchedRepo.projectRepositoryUrl()); Modification mod = branchedGit.latestModification().get(0); assertThat(mod.getUserName(), is("Chris Turner <cturner@thoughtworks.com>")); assertThat(mod.getComment(), is("Started foo branch")); assertThat(mod.getModifiedTime(), is(parseRFC822("Tue, 05 Feb 2009 14:28:08 -0800"))); assertThat(mod.getRevision(), is("b4fa7271c3cef91822f7fa502b999b2eab2a380d")); List<ModifiedFile> files = mod.getModifiedFiles(); assertThat(files.size(), is(1)); assertThat(files.get(0).getFileName(), is("first.txt")); assertThat(files.get(0).getAction(), is(ModifiedAction.modified)); } @Test public void shouldRetrieveListOfSubmoduleFolders() throws Exception { GitSubmoduleRepos submoduleRepos = new GitSubmoduleRepos(); submoduleRepos.addSubmodule(SUBMODULE, "sub1"); GitCommand gitWithSubmodule = new GitCommand(null, createTempWorkingDirectory(), GitMaterialConfig.DEFAULT_BRANCH, false, new HashMap<>()); InMemoryStreamConsumer outputStreamConsumer = inMemoryConsumer(); gitWithSubmodule.clone(outputStreamConsumer, submoduleRepos.mainRepo().getUrl()); gitWithSubmodule.fetchAndResetToHead(outputStreamConsumer); gitWithSubmodule.updateSubmoduleWithInit(outputStreamConsumer); List<String> folders = gitWithSubmodule.submoduleFolders(); assertThat(folders.size(), is(1)); assertThat(folders.get(0), is("sub1")); } @Test public void shouldNotThrowErrorWhenConfigRemoveSectionFails() throws Exception { GitSubmoduleRepos submoduleRepos = new GitSubmoduleRepos(); submoduleRepos.addSubmodule(SUBMODULE, "sub1"); GitCommand gitWithSubmodule = new GitCommand(null, createTempWorkingDirectory(), GitMaterialConfig.DEFAULT_BRANCH, false, new HashMap<>()) { //hack to reproduce synchronization issue @Override public Map<String, String> submoduleUrls() { return Collections.singletonMap("submodule", "submodule"); } }; InMemoryStreamConsumer outputStreamConsumer = inMemoryConsumer(); gitWithSubmodule.clone(outputStreamConsumer, submoduleRepos.mainRepo().getUrl()); gitWithSubmodule.updateSubmoduleWithInit(outputStreamConsumer); } @Test public void shouldNotFailIfUnableToRemoveSubmoduleEntryFromConfig() throws Exception { GitSubmoduleRepos submoduleRepos = new GitSubmoduleRepos(); submoduleRepos.addSubmodule(SUBMODULE, "sub1"); GitCommand gitWithSubmodule = new GitCommand(null, createTempWorkingDirectory(), GitMaterialConfig.DEFAULT_BRANCH, false, new HashMap<>()); InMemoryStreamConsumer outputStreamConsumer = inMemoryConsumer(); gitWithSubmodule.clone(outputStreamConsumer, submoduleRepos.mainRepo().getUrl()); gitWithSubmodule.fetchAndResetToHead(outputStreamConsumer); gitWithSubmodule.updateSubmoduleWithInit(outputStreamConsumer); List<String> folders = gitWithSubmodule.submoduleFolders(); assertThat(folders.size(), is(1)); assertThat(folders.get(0), is("sub1")); } @Test public void shouldRetrieveSubmoduleUrls() throws Exception { GitSubmoduleRepos submoduleRepos = new GitSubmoduleRepos(); submoduleRepos.addSubmodule(SUBMODULE, "sub1"); GitCommand gitWithSubmodule = new GitCommand(null, createTempWorkingDirectory(), GitMaterialConfig.DEFAULT_BRANCH, false, new HashMap<>()); InMemoryStreamConsumer outputStreamConsumer = inMemoryConsumer(); gitWithSubmodule.clone(outputStreamConsumer, submoduleRepos.mainRepo().getUrl()); gitWithSubmodule.fetchAndResetToHead(outputStreamConsumer); gitWithSubmodule.updateSubmoduleWithInit(outputStreamConsumer); Map<String, String> urls = gitWithSubmodule.submoduleUrls(); assertThat(urls.size(), is(1)); assertThat(urls.containsKey("sub1"), is(true)); assertThat(urls.get("sub1"), endsWith(SUBMODULE)); } @Test public void shouldRetrieveZeroSubmoduleUrlsIfTheyAreNotConfigured() throws Exception { Map<String, String> submoduleUrls = git.submoduleUrls(); assertThat(submoduleUrls.size(), is(0)); } @Test public void shouldRetrieveRemoteRepoValue() throws Exception { assertThat(git.workingRepositoryUrl().forCommandline(), startsWith(repoUrl)); } @Test public void shouldCheckIfRemoteRepoExists() throws Exception { final TestSubprocessExecutionContext executionContext = new TestSubprocessExecutionContext(); GitCommand.checkConnection(git.workingRepositoryUrl(), "master", executionContext.getDefaultEnvironmentVariables()); } @Test(expected = Exception.class) public void shouldThrowExceptionWhenRepoNotExist() throws Exception { final TestSubprocessExecutionContext executionContext = new TestSubprocessExecutionContext(); GitCommand.checkConnection(new UrlArgument("git://somewhere.is.not.exist"), "master", executionContext.getDefaultEnvironmentVariables()); } @Test(expected = Exception.class) public void shouldThrowExceptionWhenRemoteBranchDoesNotExist() throws Exception { GitCommand.checkConnection(new UrlArgument(gitRepo.projectRepositoryUrl()), "Invalid_Branch", testSubprocessExecutionContext.getDefaultEnvironmentVariables()); } @Test public void shouldExecuteGitLsWhenCheckingToSeeIfWeCanConnectToTheRepo() throws Exception { UrlArgument url = new UrlArgument("git://github.com/xli/dtr.git"); final TestSubprocessExecutionContext executionContext = new TestSubprocessExecutionContext(); CommandLine commandLine = GitCommand.commandToCheckConnection(url, executionContext.getDefaultEnvironmentVariables()); assertThat(commandLine.getExecutable(), is("git")); List<CommandArgument> arguments = commandLine.getArguments(); assertThat(arguments.get(0), is(new StringArgument("ls-remote"))); assertThat(arguments.get(1), is(url)); } @Test public void shouldIncludeNewChangesInModificationCheck() throws Exception { String originalNode = git.latestModification().get(0).getRevision(); File testingFile = checkInNewRemoteFile(); Modification modification = git.latestModification().get(0); assertThat(modification.getRevision(), is(not(originalNode))); assertThat(modification.getComment(), is("New checkin of " + testingFile.getName())); assertThat(modification.getModifiedFiles().size(), is(1)); assertThat(modification.getModifiedFiles().get(0).getFileName(), is(testingFile.getName())); } @Test public void shouldIncludeChangesFromTheFutureInModificationCheck() throws Exception { String originalNode = git.latestModification().get(0).getRevision(); File testingFile = checkInNewRemoteFileInFuture(THREE_DAYS_FROM_NOW); Modification modification = git.latestModification().get(0); assertThat(modification.getRevision(), is(not(originalNode))); assertThat(modification.getComment(), is("New checkin of " + testingFile.getName())); assertThat(modification.getModifiedTime(), is(THREE_DAYS_FROM_NOW)); } @Test public void shouldThrowExceptionIfRepoCanNotConnectWhenModificationCheck() throws Exception { FileUtil.deleteFolder(repoLocation); try { git.latestModification(); fail("Should throw exception when repo cannot connected"); } catch (Exception e) { assertThat(e.getMessage(), anyOf(containsString("The remote end hung up unexpectedly"), containsString("Could not read from remote repository"))); } } @Test public void shouldParseGitOutputCorrectly() throws IOException { List<String> stringList = readLines(getClass().getResourceAsStream("git_sample_output.text")); GitModificationParser parser = new GitModificationParser(); List<Modification> mods = parser.parse(stringList); assertThat(mods.size(), is(3)); Modification mod = mods.get(2); assertThat(mod.getRevision(), is("46cceff864c830bbeab0a7aaa31707ae2302762f")); assertThat(mod.getModifiedTime(), is(DateUtils.parseISO8601("2009-08-11 12:37:09 -0700"))); assertThat(mod.getUserDisplayName(), is("Cruise Developer <cruise@cruise-sf3.(none)>")); assertThat(mod.getComment(), is("author:cruise <cceuser@CceDev01.(none)>\n" + "node:ecfab84dd4953105e3301c5992528c2d381c1b8a\n" + "date:2008-12-31 14:32:40 +0800\n" + "description:Moving rakefile to build subdirectory for #2266\n" + "\n" + "author:CceUser <cceuser@CceDev01.(none)>\n" + "node:fd16efeb70fcdbe63338c49995ce9ff7659e6e77\n" + "date:2008-12-31 14:17:06 +0800\n" + "description:Adding rakefile")); } @Test public void shouldCleanUnversionedFilesInsideSubmodulesBeforeUpdating() throws Exception { GitSubmoduleRepos submoduleRepos = new GitSubmoduleRepos(); String submoduleDirectoryName = "local-submodule"; submoduleRepos.addSubmodule(SUBMODULE, submoduleDirectoryName); File cloneDirectory = createTempWorkingDirectory(); GitCommand clonedCopy = new GitCommand(null, cloneDirectory, GitMaterialConfig.DEFAULT_BRANCH, false, new HashMap<>()); InMemoryStreamConsumer outputStreamConsumer = inMemoryConsumer(); clonedCopy.clone(outputStreamConsumer, submoduleRepos.mainRepo().getUrl()); // Clone repository without submodules clonedCopy.resetWorkingDir(outputStreamConsumer, new StringRevision("HEAD")); // Pull submodules to working copy - Pipeline counter 1 File unversionedFile = new File(new File(cloneDirectory, submoduleDirectoryName), "unversioned_file.txt"); FileUtils.writeStringToFile(unversionedFile, "this is an unversioned file. lets see you deleting me.. come on.. I dare you!!!!"); clonedCopy.resetWorkingDir(outputStreamConsumer, new StringRevision("HEAD")); // Should clean unversioned file on next fetch - Pipeline counter 2 assertThat(unversionedFile.exists(), is(false)); } @Test public void shouldRemoveChangesToModifiedFilesInsideSubmodulesBeforeUpdating() throws Exception { InMemoryStreamConsumer outputStreamConsumer = inMemoryConsumer(); GitSubmoduleRepos submoduleRepos = new GitSubmoduleRepos(); String submoduleDirectoryName = "local-submodule"; File cloneDirectory = createTempWorkingDirectory(); File remoteSubmoduleLocation = submoduleRepos.addSubmodule(SUBMODULE, submoduleDirectoryName); /* Simulate an agent checkout of code. */ GitCommand clonedCopy = new GitCommand(null, cloneDirectory, GitMaterialConfig.DEFAULT_BRANCH, false, new HashMap<>()); clonedCopy.clone(outputStreamConsumer, submoduleRepos.mainRepo().getUrl()); clonedCopy.resetWorkingDir(outputStreamConsumer, new StringRevision("HEAD")); /* Simulate a local modification of file inside submodule, on agent side. */ File fileInSubmodule = allFilesIn(new File(cloneDirectory, submoduleDirectoryName), "file-").get(0); FileUtils.writeStringToFile(fileInSubmodule, "Some other new content."); /* Commit a change to the file on the repo. */ List<Modification> modifications = submoduleRepos.modifyOneFileInSubmoduleAndUpdateMainRepo( remoteSubmoduleLocation, submoduleDirectoryName, fileInSubmodule.getName(), "NEW CONTENT OF FILE"); /* Simulate start of a new build on agent. */ clonedCopy.fetch(outputStreamConsumer); clonedCopy.resetWorkingDir(outputStreamConsumer, new StringRevision(modifications.get(0).getRevision())); assertThat(FileUtils.readFileToString(fileInSubmodule), is("NEW CONTENT OF FILE")); } @Test public void shouldAllowSubmoduleUrlstoChange() throws Exception { InMemoryStreamConsumer outputStreamConsumer = inMemoryConsumer(); GitSubmoduleRepos submoduleRepos = new GitSubmoduleRepos(); String submoduleDirectoryName = "local-submodule"; File cloneDirectory = createTempWorkingDirectory(); File remoteSubmoduleLocation = submoduleRepos.addSubmodule(SUBMODULE, submoduleDirectoryName); GitCommand clonedCopy = new GitCommand(null, cloneDirectory, GitMaterialConfig.DEFAULT_BRANCH, false, new HashMap<>()); clonedCopy.clone(outputStreamConsumer, submoduleRepos.mainRepo().getUrl()); clonedCopy.fetchAndResetToHead(outputStreamConsumer); submoduleRepos.changeSubmoduleUrl(submoduleDirectoryName); clonedCopy.fetchAndResetToHead(outputStreamConsumer); } private List<File> allFilesIn(File directory, String prefixOfFiles) { return new ArrayList<>(FileUtils.listFiles(directory, andFileFilter(fileFileFilter(), prefixFileFilter(prefixOfFiles)), null)); } private File createTempWorkingDirectory() { File tempFile = TestFileUtil.createTempFolder("GitCommandTest" + System.currentTimeMillis()); return new File(tempFile, "repo"); } private File checkInNewRemoteFile() throws IOException { GitCommand remoteGit = new GitCommand(null, repoLocation, GitMaterialConfig.DEFAULT_BRANCH, false, new HashMap<>()); File testingFile = new File(repoLocation, "testing-file" + System.currentTimeMillis() + ".txt"); testingFile.createNewFile(); remoteGit.add(testingFile); remoteGit.commit("New checkin of " + testingFile.getName()); return testingFile; } private File checkInNewRemoteFileInFuture(Date checkinDate) throws IOException { GitCommand remoteGit = new GitCommand(null, repoLocation, GitMaterialConfig.DEFAULT_BRANCH, false, new HashMap<>()); File testingFile = new File(repoLocation, "testing-file" + System.currentTimeMillis() + ".txt"); testingFile.createNewFile(); remoteGit.add(testingFile); remoteGit.commitOnDate("New checkin of " + testingFile.getName(), checkinDate); return testingFile; } private TypeSafeMatcher<String> startsWith(final String repoUrl) { return new TypeSafeMatcher<String>() { public boolean matchesSafely(String item) { return item.startsWith(repoUrl); } public void describeTo(Description description) { description.appendText("to start with \"" + repoUrl + "\""); } }; } private void executeOnGitRepo(String command, String... args) throws IOException { executeOnDir(gitLocalRepoDir, command, args); } private void executeOnDir(File dir, String command, String... args) { CommandLine commandLine = CommandLine.createCommandLine(command); commandLine.withArgs(args); assertThat(dir.exists(), is(true)); commandLine.setWorkingDir(dir); commandLine.runOrBomb(true, null); } private void assertWorkingCopyNotCheckedOut() { assertThat(gitLocalRepoDir.listFiles(), Is.is(new File[]{new File(gitLocalRepoDir, ".git")})); } private void assertWorkingCopyCheckedOut(File workingDir) { assertTrue(workingDir.listFiles().length > 1); } }
package org.spongycastle.math.ec.custom.sec; import org.spongycastle.math.ec.ECConstants; import org.spongycastle.math.ec.ECCurve; import org.spongycastle.math.ec.ECFieldElement; import org.spongycastle.math.ec.ECPoint; import org.spongycastle.math.ec.ECPoint.AbstractF2m; public class SecT113R1Point extends AbstractF2m { /** * @deprecated Use ECCurve.createPoint to construct points */ public SecT113R1Point(ECCurve curve, ECFieldElement x, ECFieldElement y) { this(curve, x, y, false); } /** * @deprecated per-point compression property will be removed, refer {@link #getEncoded(boolean)} */ public SecT113R1Point(ECCurve curve, ECFieldElement x, ECFieldElement y, boolean withCompression) { super(curve, x, y); if ((x == null) != (y == null)) { throw new IllegalArgumentException("Exactly one of the field elements is null"); } this.withCompression = withCompression; } SecT113R1Point(ECCurve curve, ECFieldElement x, ECFieldElement y, ECFieldElement[] zs, boolean withCompression) { super(curve, x, y, zs); this.withCompression = withCompression; } protected ECPoint detach() { return new SecT113R1Point(null, getAffineXCoord(), getAffineYCoord()); } public ECFieldElement getYCoord() { ECFieldElement X = x, L = y; if (this.isInfinity() || X.isZero()) { return L; } // Y is actually Lambda (X + Y/X) here; convert to affine value on the fly ECFieldElement Y = L.add(X).multiply(X); ECFieldElement Z = zs[0]; if (!Z.isOne()) { Y = Y.divide(Z); } return Y; } protected boolean getCompressionYTilde() { ECFieldElement X = this.getRawXCoord(); if (X.isZero()) { return false; } ECFieldElement Y = this.getRawYCoord(); // Y is actually Lambda (X + Y/X) here return Y.testBitZero() != X.testBitZero(); } public ECPoint add(ECPoint b) { if (this.isInfinity()) { return b; } if (b.isInfinity()) { return this; } ECCurve curve = this.getCurve(); ECFieldElement X1 = this.x; ECFieldElement X2 = b.getRawXCoord(); if (X1.isZero()) { if (X2.isZero()) { return curve.getInfinity(); } return b.add(this); } ECFieldElement L1 = this.y, Z1 = this.zs[0]; ECFieldElement L2 = b.getRawYCoord(), Z2 = b.getZCoord(0); boolean Z1IsOne = Z1.isOne(); ECFieldElement U2 = X2, S2 = L2; if (!Z1IsOne) { U2 = U2.multiply(Z1); S2 = S2.multiply(Z1); } boolean Z2IsOne = Z2.isOne(); ECFieldElement U1 = X1, S1 = L1; if (!Z2IsOne) { U1 = U1.multiply(Z2); S1 = S1.multiply(Z2); } ECFieldElement A = S1.add(S2); ECFieldElement B = U1.add(U2); if (B.isZero()) { if (A.isZero()) { return twice(); } return curve.getInfinity(); } ECFieldElement X3, L3, Z3; if (X2.isZero()) { // TODO This can probably be optimized quite a bit ECPoint p = this.normalize(); X1 = p.getXCoord(); ECFieldElement Y1 = p.getYCoord(); ECFieldElement Y2 = L2; ECFieldElement L = Y1.add(Y2).divide(X1); X3 = L.square().add(L).add(X1).add(curve.getA()); if (X3.isZero()) { return new SecT113R1Point(curve, X3, curve.getB().sqrt(), this.withCompression); } ECFieldElement Y3 = L.multiply(X1.add(X3)).add(X3).add(Y1); L3 = Y3.divide(X3).add(X3); Z3 = curve.fromBigInteger(ECConstants.ONE); } else { B = B.square(); ECFieldElement AU1 = A.multiply(U1); ECFieldElement AU2 = A.multiply(U2); X3 = AU1.multiply(AU2); if (X3.isZero()) { return new SecT113R1Point(curve, X3, curve.getB().sqrt(), this.withCompression); } ECFieldElement ABZ2 = A.multiply(B); if (!Z2IsOne) { ABZ2 = ABZ2.multiply(Z2); } L3 = AU2.add(B).squarePlusProduct(ABZ2, L1.add(Z1)); Z3 = ABZ2; if (!Z1IsOne) { Z3 = Z3.multiply(Z1); } } return new SecT113R1Point(curve, X3, L3, new ECFieldElement[]{ Z3 }, this.withCompression); } public ECPoint twice() { if (this.isInfinity()) { return this; } ECCurve curve = this.getCurve(); ECFieldElement X1 = this.x; if (X1.isZero()) { // A point with X == 0 is it's own additive inverse return curve.getInfinity(); } ECFieldElement L1 = this.y, Z1 = this.zs[0]; boolean Z1IsOne = Z1.isOne(); ECFieldElement L1Z1 = Z1IsOne ? L1 : L1.multiply(Z1); ECFieldElement Z1Sq = Z1IsOne ? Z1 : Z1.square(); ECFieldElement a = curve.getA(); ECFieldElement aZ1Sq = Z1IsOne ? a : a.multiply(Z1Sq); ECFieldElement T = L1.square().add(L1Z1).add(aZ1Sq); if (T.isZero()) { return new SecT113R1Point(curve, T, curve.getB().sqrt(), withCompression); } ECFieldElement X3 = T.square(); ECFieldElement Z3 = Z1IsOne ? T : T.multiply(Z1Sq); ECFieldElement X1Z1 = Z1IsOne ? X1 : X1.multiply(Z1); ECFieldElement L3 = X1Z1.squarePlusProduct(T, L1Z1).add(X3).add(Z3); return new SecT113R1Point(curve, X3, L3, new ECFieldElement[]{ Z3 }, this.withCompression); } public ECPoint twicePlus(ECPoint b) { if (this.isInfinity()) { return b; } if (b.isInfinity()) { return twice(); } ECCurve curve = this.getCurve(); ECFieldElement X1 = this.x; if (X1.isZero()) { // A point with X == 0 is it's own additive inverse return b; } ECFieldElement X2 = b.getRawXCoord(), Z2 = b.getZCoord(0); if (X2.isZero() || !Z2.isOne()) { return twice().add(b); } ECFieldElement L1 = this.y, Z1 = this.zs[0]; ECFieldElement L2 = b.getRawYCoord(); ECFieldElement X1Sq = X1.square(); ECFieldElement L1Sq = L1.square(); ECFieldElement Z1Sq = Z1.square(); ECFieldElement L1Z1 = L1.multiply(Z1); ECFieldElement T = curve.getA().multiply(Z1Sq).add(L1Sq).add(L1Z1); ECFieldElement L2plus1 = L2.addOne(); ECFieldElement A = curve.getA().add(L2plus1).multiply(Z1Sq).add(L1Sq).multiplyPlusProduct(T, X1Sq, Z1Sq); ECFieldElement X2Z1Sq = X2.multiply(Z1Sq); ECFieldElement B = X2Z1Sq.add(T).square(); if (B.isZero()) { if (A.isZero()) { return b.twice(); } return curve.getInfinity(); } if (A.isZero()) { return new SecT113R1Point(curve, A, curve.getB().sqrt(), withCompression); } ECFieldElement X3 = A.square().multiply(X2Z1Sq); ECFieldElement Z3 = A.multiply(B).multiply(Z1Sq); ECFieldElement L3 = A.add(B).square().multiplyPlusProduct(T, L2plus1, Z3); return new SecT113R1Point(curve, X3, L3, new ECFieldElement[]{ Z3 }, this.withCompression); } public ECPoint negate() { if (this.isInfinity()) { return this; } ECFieldElement X = this.x; if (X.isZero()) { return this; } // L is actually Lambda (X + Y/X) here ECFieldElement L = this.y, Z = this.zs[0]; return new SecT113R1Point(curve, X, L.add(Z), new ECFieldElement[]{ Z }, this.withCompression); } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.impl.source.tree.java; import com.intellij.lang.ASTNode; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.TextRange; import com.intellij.psi.*; import com.intellij.psi.impl.source.Constants; import com.intellij.psi.impl.source.PsiClassReferenceType; import com.intellij.psi.impl.source.SourceTreeToPsiMap; import com.intellij.psi.impl.source.resolve.reference.impl.PsiPolyVariantCachingReference; import com.intellij.psi.impl.source.tree.*; import com.intellij.psi.tree.ChildRoleBase; import com.intellij.psi.tree.IElementType; import com.intellij.psi.tree.TokenSet; import com.intellij.psi.util.PsiUtil; import com.intellij.util.ArrayUtil; import com.intellij.util.SmartList; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.List; public class PsiNewExpressionImpl extends ExpressionPsiElement implements PsiNewExpression { private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.source.tree.java.PsiNewExpressionImpl"); public PsiNewExpressionImpl() { super(JavaElementType.NEW_EXPRESSION); } public PsiType getType(){ PsiType type = null; List<PsiAnnotation> annotations = new SmartList<PsiAnnotation>(); for(ASTNode child = getFirstChildNode(); child != null; child = child.getTreeNext()){ IElementType elementType = child.getElementType(); if (elementType == JavaElementType.ANNOTATION) { annotations.add((PsiAnnotation)child.getPsi()); continue; } if (elementType == JavaElementType.JAVA_CODE_REFERENCE){ LOG.assertTrue(type == null); type = new PsiClassReferenceType((PsiJavaCodeReferenceElement)SourceTreeToPsiMap.treeElementToPsi(child), null); } else if (ElementType.PRIMITIVE_TYPE_BIT_SET.contains(elementType)){ LOG.assertTrue(type == null); PsiAnnotation[] annos = annotations.toArray(new PsiAnnotation[annotations.size()]); type = JavaPsiFacade.getInstance(getProject()).getElementFactory().createPrimitiveType(child.getText(), annos); } else if (elementType == JavaTokenType.LBRACKET){ LOG.assertTrue(type != null); PsiAnnotation[] annos = annotations.toArray(new PsiAnnotation[annotations.size()]); type = type.createArrayType(annos); } else if (elementType == JavaElementType.ANONYMOUS_CLASS){ PsiAnnotation[] annos = annotations.toArray(new PsiAnnotation[annotations.size()]); PsiElementFactory factory = JavaPsiFacade.getInstance(getProject()).getElementFactory(); PsiClass aClass = (PsiClass)SourceTreeToPsiMap.treeElementToPsi(child); PsiSubstitutor substitutor = aClass instanceof PsiTypeParameter ? PsiSubstitutor.EMPTY : factory.createRawSubstitutor(aClass); type = factory.createType(aClass, substitutor, PsiUtil.getLanguageLevel(aClass),annos); } } return type; } public PsiExpressionList getArgumentList() { PsiExpressionList list = (PsiExpressionList)findChildByRoleAsPsiElement(ChildRole.ARGUMENT_LIST); if (list != null) return list; CompositeElement anonymousClass = (CompositeElement)SourceTreeToPsiMap.psiElementToTree(findChildByRoleAsPsiElement(ChildRole.ANONYMOUS_CLASS)); if (anonymousClass != null){ return (PsiExpressionList)anonymousClass.findChildByRoleAsPsiElement(ChildRole.ARGUMENT_LIST); } return null; } @NotNull public PsiExpression[] getArrayDimensions() { PsiExpression[] expressions = getChildrenAsPsiElements(ElementType.ARRAY_DIMENSION_BIT_SET, Constants.PSI_EXPRESSION_ARRAY_CONSTRUCTOR); PsiExpression qualifier = getQualifier(); if (qualifier == null){ return expressions; } else{ LOG.assertTrue(expressions[0] == qualifier); PsiExpression[] expressions1 = new PsiExpression[expressions.length - 1]; System.arraycopy(expressions, 1, expressions1, 0, expressions1.length); return expressions1; } } public PsiArrayInitializerExpression getArrayInitializer() { return (PsiArrayInitializerExpression)findChildByRoleAsPsiElement(ChildRole.ARRAY_INITIALIZER); } public PsiMethod resolveMethod() { return resolveConstructor(); } private PsiPolyVariantCachingReference getConstructorFakeReference() { return new PsiPolyVariantCachingReference() { @NotNull public JavaResolveResult[] resolveInner(boolean incompleteCode) { ASTNode classRef = findChildByRole(ChildRole.TYPE_REFERENCE); if (classRef != null) { ASTNode argumentList = TreeUtil.skipElements(classRef.getTreeNext(), StdTokenSets.WHITE_SPACE_OR_COMMENT_BIT_SET); if (argumentList != null && argumentList.getElementType() == JavaElementType.EXPRESSION_LIST) { final JavaPsiFacade facade = JavaPsiFacade.getInstance(getProject()); PsiType aClass = facade.getElementFactory().createType((PsiJavaCodeReferenceElement)SourceTreeToPsiMap.treeElementToPsi(classRef)); return facade.getResolveHelper().multiResolveConstructor((PsiClassType)aClass, (PsiExpressionList)SourceTreeToPsiMap.treeElementToPsi(argumentList), PsiNewExpressionImpl.this); } } else{ ASTNode anonymousClassElement = findChildByType(JavaElementType.ANONYMOUS_CLASS); if (anonymousClassElement != null) { final JavaPsiFacade facade = JavaPsiFacade.getInstance(getProject()); final PsiAnonymousClass anonymousClass = (PsiAnonymousClass)SourceTreeToPsiMap.treeElementToPsi(anonymousClassElement); PsiType aClass = anonymousClass.getBaseClassType(); ASTNode argumentList = anonymousClassElement.findChildByType(JavaElementType.EXPRESSION_LIST); return facade.getResolveHelper().multiResolveConstructor((PsiClassType)aClass, (PsiExpressionList)SourceTreeToPsiMap.treeElementToPsi(argumentList), anonymousClass); } } return JavaResolveResult.EMPTY_ARRAY; } public PsiElement getElement() { return PsiNewExpressionImpl.this; } public TextRange getRangeInElement() { return null; } @NotNull public String getCanonicalText() { return null; } public PsiElement handleElementRename(String newElementName) { return null; } public PsiElement bindToElement(@NotNull PsiElement element) { return null; } @NotNull public Object[] getVariants() { return ArrayUtil.EMPTY_OBJECT_ARRAY; } @Override public int hashCode() { PsiJavaCodeReferenceElement ref = getClassOrAnonymousClassReference(); return ref == null ? 0 : ref.hashCode(); } @Override public boolean equals(Object obj) { return obj instanceof PsiPolyVariantCachingReference && getElement() == ((PsiReference)obj).getElement(); } }; } @NotNull public JavaResolveResult resolveMethodGenerics() { ResolveResult[] results = getConstructorFakeReference().multiResolve(false); return results.length == 1 ? (JavaResolveResult)results[0] : JavaResolveResult.EMPTY; } public PsiExpression getQualifier() { return (PsiExpression)findChildByRoleAsPsiElement(ChildRole.QUALIFIER); } @NotNull public PsiReferenceParameterList getTypeArgumentList() { return (PsiReferenceParameterList) findChildByRoleAsPsiElement(ChildRole.REFERENCE_PARAMETER_LIST); } @NotNull public PsiType[] getTypeArguments() { return getTypeArgumentList().getTypeArguments(); } public PsiMethod resolveConstructor(){ return (PsiMethod)resolveMethodGenerics().getElement(); } public PsiJavaCodeReferenceElement getClassReference() { return (PsiJavaCodeReferenceElement)findChildByRoleAsPsiElement(ChildRole.TYPE_REFERENCE); } public PsiAnonymousClass getAnonymousClass() { ASTNode anonymousClass = findChildByType(JavaElementType.ANONYMOUS_CLASS); if (anonymousClass == null) return null; return (PsiAnonymousClass)SourceTreeToPsiMap.treeElementToPsi(anonymousClass); } private static final TokenSet CLASS_REF = TokenSet.create(JavaElementType.JAVA_CODE_REFERENCE, JavaElementType.ANONYMOUS_CLASS); @Nullable public PsiJavaCodeReferenceElement getClassOrAnonymousClassReference() { ASTNode ref = findChildByType(CLASS_REF); if (ref == null) return null; if (ref instanceof PsiJavaCodeReferenceElement) return (PsiJavaCodeReferenceElement)ref; PsiAnonymousClass anonymousClass = (PsiAnonymousClass)ref.getPsi(); return anonymousClass.getBaseClassReference(); } public void deleteChildInternal(@NotNull ASTNode child) { if (getChildRole(child) == ChildRole.QUALIFIER){ ASTNode dot = findChildByRole(ChildRole.DOT); super.deleteChildInternal(child); deleteChildInternal(dot); } else{ super.deleteChildInternal(child); } } public ASTNode findChildByRole(int role){ LOG.assertTrue(ChildRole.isUnique(role)); switch(role){ default: return null; case ChildRole.REFERENCE_PARAMETER_LIST: return findChildByType(JavaElementType.REFERENCE_PARAMETER_LIST); case ChildRole.QUALIFIER: TreeElement firstChild = getFirstChildNode(); if (firstChild != null && firstChild.getElementType() != JavaTokenType.NEW_KEYWORD) { return firstChild; } else { return null; } case ChildRole.DOT: return findChildByType(JavaTokenType.DOT); case ChildRole.NEW_KEYWORD: return findChildByType(JavaTokenType.NEW_KEYWORD); case ChildRole.ANONYMOUS_CLASS: return findChildByType(JavaElementType.ANONYMOUS_CLASS); case ChildRole.TYPE_REFERENCE: return findChildByType(JavaElementType.JAVA_CODE_REFERENCE); case ChildRole.TYPE_KEYWORD: return findChildByType(ElementType.PRIMITIVE_TYPE_BIT_SET); case ChildRole.ARGUMENT_LIST: return findChildByType(JavaElementType.EXPRESSION_LIST); case ChildRole.LBRACKET: return findChildByType(JavaTokenType.LBRACKET); case ChildRole.RBRACKET: return findChildByType(JavaTokenType.RBRACKET); case ChildRole.ARRAY_INITIALIZER: if (getLastChildNode().getElementType() == JavaElementType.ARRAY_INITIALIZER_EXPRESSION){ return getLastChildNode(); } else{ return null; } } } public int getChildRole(ASTNode child) { LOG.assertTrue(child.getTreeParent() == this); IElementType i = child.getElementType(); if (i == JavaElementType.REFERENCE_PARAMETER_LIST) { return ChildRole.REFERENCE_PARAMETER_LIST; } else if (i == JavaTokenType.NEW_KEYWORD) { return ChildRole.NEW_KEYWORD; } else if (i == JavaTokenType.DOT) { return ChildRole.DOT; } else if (i == JavaElementType.JAVA_CODE_REFERENCE) { return ChildRole.TYPE_REFERENCE; } else if (i == JavaElementType.EXPRESSION_LIST) { return ChildRole.ARGUMENT_LIST; } else if (i == JavaTokenType.LBRACKET) { return ChildRole.LBRACKET; } else if (i == JavaTokenType.RBRACKET) { return ChildRole.RBRACKET; } else if (i == JavaElementType.ARRAY_INITIALIZER_EXPRESSION) { if (child == getLastChildNode()) { return ChildRole.ARRAY_INITIALIZER; } else if (child == getFirstChildNode()) { return ChildRole.QUALIFIER; } else { return ChildRole.ARRAY_DIMENSION; } } else if (i == JavaElementType.ANONYMOUS_CLASS) { return ChildRole.ANONYMOUS_CLASS; } else { if (ElementType.PRIMITIVE_TYPE_BIT_SET.contains(child.getElementType())) { return ChildRole.TYPE_KEYWORD; } else if (ElementType.EXPRESSION_BIT_SET.contains(child.getElementType())) { return child == getFirstChildNode() ? ChildRole.QUALIFIER : ChildRole.ARRAY_DIMENSION; } else { return ChildRoleBase.NONE; } } } public void accept(@NotNull PsiElementVisitor visitor){ if (visitor instanceof JavaElementVisitor) { ((JavaElementVisitor)visitor).visitNewExpression(this); } else { visitor.visitElement(this); } } public String toString(){ return "PsiNewExpression:" + getText(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.sparql.core ; import static org.apache.jena.atlas.iterator.Iter.take ; import java.util.Iterator ; import java.util.List ; import org.apache.jena.graph.Graph ; import org.apache.jena.graph.Node ; import org.apache.jena.graph.Triple ; import org.apache.jena.sparql.SystemARQ ; import org.apache.jena.util.iterator.ExtendedIterator ; /** Connect a DatasetGraph to a DatasetChanges monitor. * Any add or delete to the DatasetGraph is notified to the * monitoring object with a {@link QuadAction} to indicate * the change made. */ public class DatasetGraphMonitor extends DatasetGraphWrapper { /** Whether to see if a quad action will change the dataset - test before add for existence, test before delete for absence */ private boolean CheckFirst = true ; /** Whether to record a no-op (maybe as a comment) */ private boolean RecordNoAction = true ; /** Where to send the notifications */ private final DatasetChanges monitor ; /** * Create a DatasetGraph wrapper that monitors the dataset for changes (add or delete quads). * Use this DatasetGraph for all operations in order to record changes. * Note whether additions of deletions cause an actual change to the dataset or not. * @param dsg The DatasetGraph to monitor * @param monitor The handler for a change * * @see DatasetChanges * @see QuadAction */ public DatasetGraphMonitor(DatasetGraph dsg, DatasetChanges monitor) { super(dsg) ; this.monitor = monitor ; } /** * Create a DatasetGraph wrapper that monitors the dataset for changes (add or delete quads). * Use this DatasetGraph for all operations in order to record changes. * @param dsg The DatasetGraph to monitor * @param monitor The handler for a change * @param recordOnlyIfRealChange * If true, check to see if the change would have an effect (e.g. add is a new quad). * If false, log changes as ADD/DELETE regardless of whether the dataset actually changes. * * @see DatasetChanges * @see QuadAction */ public DatasetGraphMonitor(DatasetGraph dsg, DatasetChanges monitor, boolean recordOnlyIfRealChange) { super(dsg) ; CheckFirst = recordOnlyIfRealChange ; this.monitor = monitor ; } /** Return the monitor */ public DatasetChanges getMonitor() { return monitor ; } /** Return the monitored DatasetGraph */ public DatasetGraph monitored() { return getWrapped() ; } @Override public void add(Quad quad) { if ( CheckFirst && contains(quad) ) { if ( RecordNoAction ) record(QuadAction.NO_ADD, quad.getGraph(), quad.getSubject(), quad.getPredicate(), quad.getObject()) ; return ; } add$(quad) ; } @Override public void add(Node g, Node s, Node p, Node o) { if ( CheckFirst && contains(g,s,p,o) ) { if ( RecordNoAction ) record(QuadAction.NO_ADD,g,s,p,o) ; return ; } add$(g,s,p,o) ; } private void add$(Node g, Node s, Node p, Node o) { super.add(g,s,p,o) ; record(QuadAction.ADD,g,s,p,o) ; } private void add$(Quad quad) { super.add(quad) ; record(QuadAction.ADD, quad.getGraph(), quad.getSubject(), quad.getPredicate(), quad.getObject()) ; } @Override public void delete(Quad quad) { if ( CheckFirst && ! contains(quad) ) { if ( RecordNoAction ) record(QuadAction.NO_DELETE, quad.getGraph(), quad.getSubject(), quad.getPredicate(), quad.getObject()) ; return ; } delete$(quad) ; } @Override public void delete(Node g, Node s, Node p, Node o) { if ( CheckFirst && ! contains(g,s,p,o) ) { if ( RecordNoAction ) record(QuadAction.NO_DELETE, g,s,p,o) ; return ; } delete$(g,s,p,o) ; } private void delete$(Quad quad) { super.delete(quad) ; record(QuadAction.DELETE, quad.getGraph(), quad.getSubject(), quad.getPredicate(), quad.getObject()) ; } private void delete$(Node g, Node s, Node p, Node o) { super.delete(g,s,p,o) ; record(QuadAction.DELETE,g,s,p,o) ; } private static int SLICE = 1000 ; @Override public void deleteAny(Node g, Node s, Node p, Node o) { while (true) { Iterator<Quad> iter = find(g, s, p, o) ; // Materialize - stops possible ConcurrentModificationExceptions List<Quad> some = take(iter, SLICE) ; for (Quad q : some) delete$(q) ; if (some.size() < SLICE) break ; } } @Override public void addGraph(Node gn, Graph g) { // Convert to quads. //super.addGraph(gn, g) ; ExtendedIterator<Triple> iter = g.find(Node.ANY, Node.ANY, Node.ANY) ; for ( ; iter.hasNext(); ) { Triple t = iter.next() ; add(gn, t.getSubject(), t.getPredicate(), t.getObject()) ; } } @Override public void removeGraph(Node gn) { //super.removeGraph(gn) ; deleteAny(gn, Node.ANY, Node.ANY, Node.ANY) ; } private void record(QuadAction action, Node g, Node s, Node p, Node o) { monitor.change(action, g, s, p, o) ; } @Override public void sync() { SystemARQ.syncObject(monitor) ; super.sync() ; } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package com.jme3.gde.materialdefinition.editor; import com.jme3.gde.materialdefinition.icons.Icons; import com.jme3.shader.Shader; import com.jme3.shader.ShaderUtils; import java.awt.Component; import java.awt.Dimension; import java.awt.Graphics; import java.awt.Point; import java.awt.event.MouseEvent; import javax.swing.ImageIcon; import javax.swing.JPanel; import javax.swing.SwingUtilities; import javax.swing.event.MouseInputListener; /** * * @author Nehon */ public class Dot extends JPanel implements MouseInputListener { public static boolean pressed = false; protected ImageIcon img; protected ImageIcon prevImg; private String type; private ParamType paramType; protected Shader.ShaderType shaderType; private String text = ""; private DraggablePanel node; private int index = 1; public String getText() { return text; } public void setText(String text) { this.text = text; } public enum ParamType { Input, Output, Both } @SuppressWarnings("LeakingThisInConstructor") public Dot() { super(); setMaximumSize(new Dimension(10, 10)); setMinimumSize(new Dimension(10, 10)); setPreferredSize(new Dimension(10, 10)); setSize(10, 10); addMouseMotionListener(this); addMouseListener(this); } public void setShaderTypr(Shader.ShaderType shaderType){ this.shaderType = shaderType; } @Override protected void paintComponent(Graphics g) { if (img == null) { img = Icons.imgGrey; } g.drawImage(img.getImage(), 0, 0, this); } @Override public void mouseClicked(MouseEvent e) { } @Override public void mousePressed(MouseEvent e) { prevImg = img; img = Icons.imgOrange; Diagram diag = getDiagram(); diag.draggedFrom = this; repaint(); e.consume(); } @Override public void repaint() { if (getNode() != null) { getDiagram().repaint(); } else { super.repaint(); } } public Diagram getDiagram() { return node.getDiagram(); } public DraggablePanel getNode() { return node; } public void setNode(DraggablePanel node) { this.node = node; } @Override public void mouseReleased(MouseEvent e) { Diagram diag = getDiagram(); if (diag.draggedFrom == this && diag.draggedTo != null) { if (this.canConnect(diag.draggedTo)) { diag.notifyMappingCreation(diag.connect(this, diag.draggedTo)); } else { diag.draggedTo.reset(); this.reset(); } diag.draggedFrom = null; diag.draggedTo = null; } else { reset(); diag.draggedFrom = null; } e.consume(); } public void reset() { img = prevImg; repaint(); } public void disconnect() { img = Icons.imgGrey; repaint(); } @Override public void mouseEntered(MouseEvent e) { Diagram diag = getDiagram(); if (diag.draggedFrom != null && diag.draggedFrom != this) { prevImg = img; canConnect(diag.draggedFrom); diag.draggedTo = this; diag.draggedFrom.canConnect(this); } } public boolean canConnect(Dot pair) { if (pair == null || paramType == ParamType.Input || ((pair.getNode() instanceof OutBusPanel || node instanceof OutBusPanel) && shaderType != pair.shaderType)) { img = Icons.imgOrange; repaint(); return false; } if (matches(pair.getType(), type) && (pair.getParamType() != paramType || pair.getParamType() == ParamType.Both || paramType == ParamType.Both) || ShaderUtils.isSwizzlable(pair.getType()) && ShaderUtils.isSwizzlable(type)) { img = Icons.imgGreen; repaint(); return true; } img = Icons.imgRed; repaint(); return false; } private boolean matches(String type1, String type2) { String[] s1 = type1.split("\\|"); String[] s2 = type2.split("\\|"); for (String string : s1) { for (String string1 : s2) { if (string.equals(string1)) { return true; } } } return false; } protected void connect(Connection connection) { img = Icons.imgGreen; getNode().addComponentListener(connection); repaint(); } @Override public void mouseExited(MouseEvent e) { Diagram diag = getDiagram(); if (diag.draggedFrom != null) { diag.draggedFrom.canConnect(null); if (diag.draggedFrom != this) { reset(); } if (diag.draggedTo == this) { diag.draggedTo = null; } } } public Point getStartLocation() { Point p = getLocation(); Component parent = getParent(); while (parent != getNode()) { p.x += parent.getLocation().x; p.y += parent.getLocation().y; parent = parent.getParent(); } p.x += 10 + getNode().getLocation().x; p.y += 5 + getNode().getLocation().y; return p; } public Point getEndLocation() { Point p = getLocation(); Component parent = getParent(); while (parent != getNode()) { p.x += parent.getLocation().x; p.y += parent.getLocation().y; parent = parent.getParent(); } p.x += getNode().getLocation().x + 2; p.y += 5 + getNode().getLocation().y; return p; } @Override public void mouseDragged(MouseEvent e) { MouseEvent me = SwingUtilities.convertMouseEvent(this, e, getDiagram()); getDiagram().draggingDot(me); } @Override public void mouseMoved(MouseEvent e) { } public String getType() { return type; } public void setType(String type) { this.type = type; } public ParamType getParamType() { return paramType; } public void setParamType(ParamType paramType) { this.paramType = paramType; } public int getIndex() { return index; } public void setIndex(int index) { this.index = index; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.bookkeeper.common.util; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Stream; import lombok.Data; import lombok.ToString; import org.apache.bookkeeper.common.util.Backoff.Jitter.Type; /** * Implements various backoff strategies. * * <p>Strategies are defined by a {@link java.util.stream.Stream} of durations * and are intended to determine the duration after which a task is to be * retried. */ public class Backoff { public static final Policy DEFAULT = Jitter.of( Type.EXPONENTIAL, 200, 2000, 3); private static final int MaxBitShift = 62; /** * Back off policy. * * <p>It defines a stream of time durations that will be used for backing off. */ public interface Policy { Policy NONE = () -> Stream.empty(); /** * Convert the policy into a series of backoff values. * * @return a series of backoff values. */ Stream<Long> toBackoffs(); } /** * A constant backoff policy. */ @Data(staticConstructor = "of") @ToString public static class Constant implements Policy { /** * Create infinite constant backoff stream. * * <p>It is the infinite version of {@link #of(long, long)}. * * @param ms constant backoff time in milliseconds. * @return constant backoff policy. */ public static Constant of(long ms) { return of(ms, -1); } private final long ms; private final long limit; @Override public Stream<Long> toBackoffs() { if (limit >= 0) { return constant(ms).limit(limit); } else { return constant(ms); } } } /** * A Jittered backoff policy. * * <p>It is an implementation of {@link http://www.awsarchitectureblog.com/2015/03/backoff.html} */ @Data(staticConstructor = "of") @ToString public static class Jitter implements Policy { /** * Jitter type. */ public enum Type { DECORRELATED, EQUAL, EXPONENTIAL } /** * Create infinite jittered backoff stream. * * <p>It is the infinite version of {@link #of(Type, long, long, long)}. * * @param type jittered backoff type * @param startMs the start backoff time in milliseconds. * @param maxMs the max backoff time in milliseconds. * @return jittered backoff policy. */ public static Jitter of(Type type, long startMs, long maxMs) { return of(type, startMs, maxMs, -1); } private final Type type; private final long startMs; private final long maxMs; private final long limit; @Override public Stream<Long> toBackoffs() { Stream<Long> backoffStream; switch (type) { case DECORRELATED: backoffStream = decorrelatedJittered(startMs, maxMs); break; case EQUAL: backoffStream = equalJittered(startMs, maxMs); break; case EXPONENTIAL: default: backoffStream = exponentialJittered(startMs, maxMs); break; } if (limit >= 0) { return backoffStream.limit(limit); } else { return backoffStream; } } } /** * A exponential backoff policy. */ @Data(staticConstructor = "of") @ToString public static class Exponential implements Policy { /** * Create an infinite exponential backoff policy. * * <p>It is the infinite version of {@link #of(long, long, int, int)}. * * @param startMs start backoff time in milliseconds. * @param maxMs max backoff time in milliseconds. * @param multiplier the backoff multiplier * @return the exponential backoff policy. */ public static Exponential of(long startMs, long maxMs, int multiplier) { return of(startMs, maxMs, multiplier, -1); } private final long startMs; private final long maxMs; private final int multiplier; private final int limit; @Override public Stream<Long> toBackoffs() { if (limit >= 0) { return exponential(startMs, multiplier, maxMs).limit(limit); } else { return exponential(startMs, multiplier, maxMs); } } } /** * Create a stream with constant backoffs. * * @param startMs initial backoff in milliseconds * @return a stream with constant backoff values. */ public static Stream<Long> constant(long startMs) { return Stream.iterate(startMs, lastMs -> startMs); } /** * Create a stream with exponential backoffs. * * @param startMs initial backoff in milliseconds. * @param multiplier the multiplier for next backoff. * @param maxMs max backoff in milliseconds. * @return a stream with exponential backoffs. */ public static Stream<Long> exponential(long startMs, int multiplier, long maxMs) { return Stream.iterate(startMs, lastMs -> Math.min(lastMs * multiplier, maxMs)); } /** * Create a stream of exponential backoffs with jitters. * * <p>This is "full jitter" via http://www.awsarchitectureblog.com/2015/03/backoff.html * * @param startMs initial backoff in milliseconds. * @param maxMs max backoff in milliseconds. * @return a stream of exponential backoffs with jitters. */ public static Stream<Long> exponentialJittered(long startMs, long maxMs) { final long startNanos = TimeUnit.NANOSECONDS.convert(startMs, TimeUnit.MILLISECONDS); final long maxNanos = TimeUnit.NANOSECONDS.convert(maxMs, TimeUnit.MILLISECONDS); final AtomicLong attempts = new AtomicLong(1); return Stream.iterate(startMs, lastMs -> { long shift = Math.min(attempts.get(), MaxBitShift); long maxBackoffNanos = Math.min(maxNanos, startNanos * (1L << shift)); long randomMs = TimeUnit.MILLISECONDS.convert( ThreadLocalRandom.current().nextLong(startNanos, maxBackoffNanos), TimeUnit.NANOSECONDS); attempts.incrementAndGet(); return randomMs; }); } /** * Create an infinite backoffs that have jitter with a random distribution * between {@code startMs} and 3 times the previously selected value, capped at {@code maxMs}. * * <p>this is "decorrelated jitter" via http://www.awsarchitectureblog.com/2015/03/backoff.html * * @param startMs initial backoff in milliseconds * @param maxMs max backoff in milliseconds * @return a stream of jitter backoffs. */ public static Stream<Long> decorrelatedJittered(long startMs, long maxMs) { final long startNanos = TimeUnit.NANOSECONDS.convert(startMs, TimeUnit.MILLISECONDS); final long maxNanos = TimeUnit.NANOSECONDS.convert(maxMs, TimeUnit.MILLISECONDS); return Stream.iterate(startMs, lastMs -> { long lastNanos = TimeUnit.MILLISECONDS.convert(lastMs, TimeUnit.NANOSECONDS); long randRange = Math.abs(lastNanos * 3 - startNanos); long randBackoff; if (0L == randRange) { randBackoff = startNanos; } else { randBackoff = startNanos + ThreadLocalRandom.current().nextLong(randRange); } long backOffNanos = Math.min(maxNanos, randBackoff); return TimeUnit.MILLISECONDS.convert(backOffNanos, TimeUnit.NANOSECONDS); }); } /** * Create infinite backoffs that keep half of the exponential growth, and jitter * between 0 and that amount. * * <p>this is "equal jitter" via http://www.awsarchitectureblog.com/2015/03/backoff.html * * @param startMs initial backoff in milliseconds. * @param maxMs max backoff in milliseconds. * @return a stream of exponential backoffs with jitters. */ public static Stream<Long> equalJittered(long startMs, long maxMs) { final long startNanos = TimeUnit.NANOSECONDS.convert(startMs, TimeUnit.MILLISECONDS); final long maxNanos = TimeUnit.NANOSECONDS.convert(maxMs, TimeUnit.MILLISECONDS); final AtomicLong attempts = new AtomicLong(1); return Stream.iterate(startMs, lastMs -> { long shift = Math.min(attempts.get() - 1, MaxBitShift); long halfExpNanos = startNanos * (1L << shift); long backoffNanos = halfExpNanos + ThreadLocalRandom.current().nextLong(halfExpNanos); attempts.incrementAndGet(); if (backoffNanos < maxNanos) { return TimeUnit.MILLISECONDS.convert(backoffNanos, TimeUnit.NANOSECONDS); } else { return maxMs; } }); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.visor.util; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileFilter; import java.io.FileNotFoundException; import java.io.IOException; import java.io.RandomAccessFile; import java.net.InetAddress; import java.net.URL; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.nio.charset.CharacterCodingException; import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; import java.nio.file.Path; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.SortedMap; import java.util.UUID; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteFileSystem; import org.apache.ignite.IgniteLogger; import org.apache.ignite.cache.eviction.EvictionPolicy; import org.apache.ignite.cache.eviction.fifo.FifoEvictionPolicyMBean; import org.apache.ignite.cache.eviction.lru.LruEvictionPolicyMBean; import org.apache.ignite.cache.eviction.random.RandomEvictionPolicyMBean; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.events.DiscoveryEvent; import org.apache.ignite.events.Event; import org.apache.ignite.internal.processors.igfs.IgfsEx; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.internal.util.typedef.internal.SB; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.internal.visor.event.VisorGridDiscoveryEventV2; import org.apache.ignite.internal.visor.event.VisorGridEvent; import org.apache.ignite.internal.visor.event.VisorGridEventsLost; import org.apache.ignite.internal.visor.file.VisorFileBlock; import org.apache.ignite.internal.visor.log.VisorLogFile; import org.apache.ignite.lang.IgniteClosure; import org.apache.ignite.lang.IgnitePredicate; import org.apache.ignite.lang.IgniteUuid; import org.jetbrains.annotations.Nullable; import static java.lang.System.getProperty; import static org.apache.ignite.configuration.FileSystemConfiguration.DFLT_IGFS_LOG_DIR; import static org.apache.ignite.events.EventType.EVTS_DISCOVERY; import static org.apache.ignite.events.EventType.EVT_CLASS_DEPLOY_FAILED; import static org.apache.ignite.events.EventType.EVT_JOB_CANCELLED; import static org.apache.ignite.events.EventType.EVT_JOB_FAILED; import static org.apache.ignite.events.EventType.EVT_JOB_FAILED_OVER; import static org.apache.ignite.events.EventType.EVT_JOB_FINISHED; import static org.apache.ignite.events.EventType.EVT_JOB_REJECTED; import static org.apache.ignite.events.EventType.EVT_JOB_STARTED; import static org.apache.ignite.events.EventType.EVT_JOB_TIMEDOUT; import static org.apache.ignite.events.EventType.EVT_TASK_DEPLOY_FAILED; import static org.apache.ignite.events.EventType.EVT_TASK_FAILED; import static org.apache.ignite.events.EventType.EVT_TASK_FINISHED; import static org.apache.ignite.events.EventType.EVT_TASK_STARTED; import static org.apache.ignite.events.EventType.EVT_TASK_TIMEDOUT; /** * Contains utility methods for Visor tasks and jobs. */ public class VisorTaskUtils { /** Default substitute for {@code null} names. */ private static final String DFLT_EMPTY_NAME = "<default>"; /** Throttle count for lost events. */ private static final int EVENTS_LOST_THROTTLE = 10; /** Period to grab events. */ private static final int EVENTS_COLLECT_TIME_WINDOW = 10 * 60 * 1000; /** Empty buffer for file block. */ private static final byte[] EMPTY_FILE_BUF = new byte[0]; /** Log files count limit */ public static final int LOG_FILES_COUNT_LIMIT = 5000; /** */ private static final int DFLT_BUFFER_SIZE = 4096; /** Only task event types that Visor should collect. */ public static final int[] VISOR_TASK_EVTS = { EVT_JOB_STARTED, EVT_JOB_FINISHED, EVT_JOB_TIMEDOUT, EVT_JOB_FAILED, EVT_JOB_FAILED_OVER, EVT_JOB_REJECTED, EVT_JOB_CANCELLED, EVT_TASK_STARTED, EVT_TASK_FINISHED, EVT_TASK_FAILED, EVT_TASK_TIMEDOUT }; /** Only non task event types that Visor should collect. */ public static final int[] VISOR_NON_TASK_EVTS = { EVT_CLASS_DEPLOY_FAILED, EVT_TASK_DEPLOY_FAILED }; /** Only non task event types that Visor should collect. */ public static final int[] VISOR_ALL_EVTS = concat(VISOR_TASK_EVTS, VISOR_NON_TASK_EVTS); /** Maximum folder depth. I.e. if depth is 4 we look in starting folder and 3 levels of sub-folders. */ public static final int MAX_FOLDER_DEPTH = 4; /** Comparator for log files by last modified date. */ private static final Comparator<VisorLogFile> LAST_MODIFIED = new Comparator<VisorLogFile>() { @Override public int compare(VisorLogFile f1, VisorLogFile f2) { return Long.compare(f2.lastModified(), f1.lastModified()); } }; /** Debug date format. */ private static final ThreadLocal<SimpleDateFormat> DEBUG_DATE_FMT = new ThreadLocal<SimpleDateFormat>() { /** {@inheritDoc} */ @Override protected SimpleDateFormat initialValue() { return new SimpleDateFormat("HH:mm:ss,SSS"); } }; /** * @param name Grid-style nullable name. * @return Name with {@code null} replaced to &lt;default&gt;. */ public static String escapeName(@Nullable Object name) { return name == null ? DFLT_EMPTY_NAME : name.toString(); } /** * @param name Escaped name. * @return Name or {@code null} for default name. */ public static String unescapeName(String name) { assert name != null; return DFLT_EMPTY_NAME.equals(name) ? null : name; } /** * Concat arrays in one. * * @param arrays Arrays. * @return Summary array. */ public static int[] concat(int[]... arrays) { assert arrays != null; assert arrays.length > 1; int len = 0; for (int[] a : arrays) len += a.length; int[] r = Arrays.copyOf(arrays[0], len); for (int i = 1, shift = 0; i < arrays.length; i++) { shift += arrays[i - 1].length; System.arraycopy(arrays[i], 0, r, shift, arrays[i].length); } return r; } /** * Returns compact class host. * * @param obj Object to compact. * @return String. */ @Nullable public static Object compactObject(Object obj) { if (obj == null) return null; if (obj instanceof Enum) return obj.toString(); if (obj instanceof String || obj instanceof Boolean || obj instanceof Number) return obj; if (obj instanceof Collection) { Collection col = (Collection)obj; Object[] res = new Object[col.size()]; int i = 0; for (Object elm : col) res[i++] = compactObject(elm); return res; } if (obj.getClass().isArray()) { Class<?> arrType = obj.getClass().getComponentType(); if (arrType.isPrimitive()) { if (obj instanceof boolean[]) return Arrays.toString((boolean[])obj); if (obj instanceof byte[]) return Arrays.toString((byte[])obj); if (obj instanceof short[]) return Arrays.toString((short[])obj); if (obj instanceof int[]) return Arrays.toString((int[])obj); if (obj instanceof long[]) return Arrays.toString((long[])obj); if (obj instanceof float[]) return Arrays.toString((float[])obj); if (obj instanceof double[]) return Arrays.toString((double[])obj); } Object[] arr = (Object[])obj; int iMax = arr.length - 1; StringBuilder sb = new StringBuilder("["); for (int i = 0; i <= iMax; i++) { sb.append(compactObject(arr[i])); if (i != iMax) sb.append(", "); } sb.append("]"); return sb.toString(); } return U.compact(obj.getClass().getName()); } /** * Compact class names. * * @param cls Class object for compact. * @return Compacted string. */ @Nullable public static String compactClass(Class cls) { if (cls == null) return null; return U.compact(cls.getName()); } /** * Compact class names. * * @param obj Object for compact. * @return Compacted string. */ @Nullable public static String compactClass(@Nullable Object obj) { if (obj == null) return null; return compactClass(obj.getClass()); } /** * Joins array elements to string. * * @param arr Array. * @return String. */ @Nullable public static String compactArray(Object[] arr) { if (arr == null || arr.length == 0) return null; String sep = ", "; StringBuilder sb = new StringBuilder(); for (Object s : arr) sb.append(s).append(sep); if (sb.length() > 0) sb.setLength(sb.length() - sep.length()); return U.compact(sb.toString()); } /** * Returns boolean value from system property or provided function. * * @param propName System property name. * @param dflt Function that returns {@code Integer}. * @return {@code Integer} value */ public static Integer intValue(String propName, Integer dflt) { String sysProp = getProperty(propName); return (sysProp != null && !sysProp.isEmpty()) ? Integer.getInteger(sysProp) : dflt; } /** * Returns boolean value from system property or provided function. * * @param propName System property host. * @param dflt Function that returns {@code Boolean}. * @return {@code Boolean} value */ public static boolean boolValue(String propName, boolean dflt) { String sysProp = getProperty(propName); return (sysProp != null && !sysProp.isEmpty()) ? Boolean.getBoolean(sysProp) : dflt; } /** * Helper function to get value from map. * * @param map Map to take value from. * @param key Key to search in map. * @param ifNull Default value if {@code null} was returned by map. * @param <K> Key type. * @param <V> Value type. * @return Value from map or default value if map return {@code null}. */ public static <K, V> V getOrElse(Map<K, V> map, K key, V ifNull) { assert map != null; V res = map.get(key); return res != null ? res : ifNull; } /** * Checks for explicit events configuration. * * @param ignite Grid instance. * @return {@code true} if all task events explicitly specified in configuration. */ public static boolean checkExplicitTaskMonitoring(Ignite ignite) { int[] evts = ignite.configuration().getIncludeEventTypes(); if (F.isEmpty(evts)) return false; for (int evt : VISOR_TASK_EVTS) { if (!F.contains(evts, evt)) return false; } return true; } /** Events comparator by event local order. */ private static final Comparator<Event> EVTS_ORDER_COMPARATOR = new Comparator<Event>() { @Override public int compare(Event o1, Event o2) { return Long.compare(o1.localOrder(), o2.localOrder()); } }; /** Mapper from grid event to Visor data transfer object. */ public static final VisorEventMapper EVT_MAPPER = new VisorEventMapper(); /** Mapper from grid event to Visor data transfer object. */ public static final VisorEventMapper EVT_MAPPER_V2 = new VisorEventMapper() { @Override protected VisorGridEvent discoveryEvent(DiscoveryEvent de, int type, IgniteUuid id, String name, UUID nid, long ts, String msg, String shortDisplay) { ClusterNode node = de.eventNode(); return new VisorGridDiscoveryEventV2(type, id, name, nid, ts, msg, shortDisplay, node.id(), F.first(node.addresses()), node.isDaemon(), de.topologyVersion()); } }; /** * Grabs local events and detects if events was lost since last poll. * * @param ignite Target grid. * @param evtOrderKey Unique key to take last order key from node local map. * @param evtThrottleCntrKey Unique key to take throttle count from node local map. * @param all If {@code true} then collect all events otherwise collect only non task events. * @param evtMapper Closure to map grid events to Visor data transfer objects. * @return Collections of node events */ public static Collection<VisorGridEvent> collectEvents(Ignite ignite, String evtOrderKey, String evtThrottleCntrKey, boolean all, IgniteClosure<Event, VisorGridEvent> evtMapper) { int[] evtTypes = all ? VISOR_ALL_EVTS : VISOR_NON_TASK_EVTS; // Collect discovery events for Web Console. if (evtOrderKey.startsWith("CONSOLE_")) evtTypes = concat(evtTypes, EVTS_DISCOVERY); return collectEvents(ignite, evtOrderKey, evtThrottleCntrKey, evtTypes, evtMapper); } /** * Grabs local events and detects if events was lost since last poll. * * @param ignite Target grid. * @param evtOrderKey Unique key to take last order key from node local map. * @param evtThrottleCntrKey Unique key to take throttle count from node local map. * @param evtTypes Event types to collect. * @param evtMapper Closure to map grid events to Visor data transfer objects. * @return Collections of node events */ public static Collection<VisorGridEvent> collectEvents(Ignite ignite, String evtOrderKey, String evtThrottleCntrKey, int[] evtTypes, IgniteClosure<Event, VisorGridEvent> evtMapper) { assert ignite != null; assert evtTypes != null && evtTypes.length > 0; ConcurrentMap<String, Long> nl = ignite.cluster().nodeLocalMap(); final long lastOrder = getOrElse(nl, evtOrderKey, -1L); final long throttle = getOrElse(nl, evtThrottleCntrKey, 0L); // When we first time arrive onto a node to get its local events, // we'll grab only last those events that not older than given period to make sure we are // not grabbing GBs of data accidentally. final long notOlderThan = System.currentTimeMillis() - EVENTS_COLLECT_TIME_WINDOW; // Flag for detecting gaps between events. final AtomicBoolean lastFound = new AtomicBoolean(lastOrder < 0); IgnitePredicate<Event> p = new IgnitePredicate<Event>() { /** */ private static final long serialVersionUID = 0L; @Override public boolean apply(Event e) { // Detects that events were lost. if (!lastFound.get() && (lastOrder == e.localOrder())) lastFound.set(true); // Retains events by lastOrder, period and type. return e.localOrder() > lastOrder && e.timestamp() > notOlderThan; } }; Collection<Event> evts = ignite.events().localQuery(p, evtTypes); // Update latest order in node local, if not empty. if (!evts.isEmpty()) { Event maxEvt = Collections.max(evts, EVTS_ORDER_COMPARATOR); nl.put(evtOrderKey, maxEvt.localOrder()); } // Update throttle counter. if (!lastFound.get()) nl.put(evtThrottleCntrKey, throttle == 0 ? EVENTS_LOST_THROTTLE : throttle - 1); boolean lost = !lastFound.get() && throttle == 0; Collection<VisorGridEvent> res = new ArrayList<>(evts.size() + (lost ? 1 : 0)); if (lost) res.add(new VisorGridEventsLost(ignite.cluster().localNode().id())); for (Event e : evts) { VisorGridEvent visorEvt = evtMapper.apply(e); if (visorEvt != null) res.add(visorEvt); } return res; } /** * Finds all files in folder and in it's sub-tree of specified depth. * * @param file Starting folder * @param maxDepth Depth of the tree. If 1 - just look in the folder, no sub-folders. * @param filter file filter. * @return List of found files. */ public static List<VisorLogFile> fileTree(File file, int maxDepth, @Nullable FileFilter filter) { if (file.isDirectory()) { File[] files = (filter == null) ? file.listFiles() : file.listFiles(filter); if (files == null) return Collections.emptyList(); List<VisorLogFile> res = new ArrayList<>(files.length); for (File f : files) { if (f.isFile() && f.length() > 0) res.add(new VisorLogFile(f)); else if (maxDepth > 1) res.addAll(fileTree(f, maxDepth - 1, filter)); } return res; } return F.asList(new VisorLogFile(file)); } /** * @param fld Folder with files to match. * @param ptrn Pattern to match against file name. * @return Collection of matched files. */ public static List<VisorLogFile> matchedFiles(File fld, final String ptrn) { List<VisorLogFile> files = fileTree(fld, MAX_FOLDER_DEPTH, new FileFilter() { @Override public boolean accept(File f) { return !f.isHidden() && (f.isDirectory() || f.isFile() && f.getName().matches(ptrn)); } } ); Collections.sort(files, LAST_MODIFIED); return files; } /** Text files mime types. */ private static final String[] TEXT_MIME_TYPE = new String[] {"text/plain", "application/xml", "text/html", "x-sh"}; /** * Check is text file. * * @param f file reference. * @param emptyOk default value if empty file. * @return Is text file. */ public static boolean textFile(File f, boolean emptyOk) { if (f.length() == 0) return emptyOk; String detected = VisorMimeTypes.getContentType(f); for (String mime : TEXT_MIME_TYPE) if (mime.equals(detected)) return true; return false; } /** * Decode file charset. * * @param f File to process. * @return File charset. * @throws IOException in case of error. */ public static Charset decode(File f) throws IOException { SortedMap<String, Charset> charsets = Charset.availableCharsets(); String[] firstCharsets = {Charset.defaultCharset().name(), "US-ASCII", "UTF-8", "UTF-16BE", "UTF-16LE"}; Collection<Charset> orderedCharsets = U.newLinkedHashSet(charsets.size()); for (String c : firstCharsets) if (charsets.containsKey(c)) orderedCharsets.add(charsets.get(c)); orderedCharsets.addAll(charsets.values()); try (RandomAccessFile raf = new RandomAccessFile(f, "r")) { FileChannel ch = raf.getChannel(); ByteBuffer buf = ByteBuffer.allocate(DFLT_BUFFER_SIZE); ch.read(buf); buf.flip(); for (Charset charset : orderedCharsets) { CharsetDecoder decoder = charset.newDecoder(); decoder.reset(); try { decoder.decode(buf); return charset; } catch (CharacterCodingException ignored) { } } } return Charset.defaultCharset(); } /** * Read block from file. * * @param file - File to read. * @param off - Marker position in file to start read from if {@code -1} read last blockSz bytes. * @param blockSz - Maximum number of chars to read. * @param lastModified - File last modification time. * @return Read file block. * @throws IOException In case of error. */ public static VisorFileBlock readBlock(File file, long off, int blockSz, long lastModified) throws IOException { RandomAccessFile raf = null; try { long fSz = file.length(); long fLastModified = file.lastModified(); long pos = off >= 0 ? off : Math.max(fSz - blockSz, 0); // Try read more that file length. if (fLastModified == lastModified && fSz != 0 && pos >= fSz) throw new IOException("Trying to read file block with wrong offset: " + pos + " while file size: " + fSz); if (fSz == 0) return new VisorFileBlock(file.getPath(), pos, fLastModified, 0, false, EMPTY_FILE_BUF); else { int toRead = Math.min(blockSz, (int)(fSz - pos)); raf = new RandomAccessFile(file, "r"); raf.seek(pos); byte[] buf = new byte[toRead]; int cntRead = raf.read(buf, 0, toRead); if (cntRead != toRead) throw new IOException("Count of requested and actually read bytes does not match [cntRead=" + cntRead + ", toRead=" + toRead + ']'); boolean zipped = buf.length > 512; return new VisorFileBlock(file.getPath(), pos, fSz, fLastModified, zipped, zipped ? zipBytes(buf) : buf); } } finally { U.close(raf, null); } } /** * Resolve IGFS profiler logs directory. * * @param igfs IGFS instance to resolve logs dir for. * @return {@link Path} to log dir or {@code null} if not found. * @throws IgniteCheckedException if failed to resolve. */ public static Path resolveIgfsProfilerLogsDir(IgniteFileSystem igfs) throws IgniteCheckedException { String logsDir; if (igfs instanceof IgfsEx) logsDir = ((IgfsEx)igfs).clientLogDirectory(); else if (igfs == null) throw new IgniteCheckedException("Failed to get profiler log folder (IGFS instance not found)"); else throw new IgniteCheckedException("Failed to get profiler log folder (unexpected IGFS instance type)"); URL logsDirUrl = U.resolveIgniteUrl(logsDir != null ? logsDir : DFLT_IGFS_LOG_DIR); return logsDirUrl != null ? new File(logsDirUrl.getPath()).toPath() : null; } /** * Extract max size from eviction policy if available. * * @param plc Eviction policy. * @return Extracted max size. */ public static Integer evictionPolicyMaxSize(@Nullable EvictionPolicy plc) { if (plc instanceof LruEvictionPolicyMBean) return ((LruEvictionPolicyMBean)plc).getMaxSize(); if (plc instanceof RandomEvictionPolicyMBean) return ((RandomEvictionPolicyMBean)plc).getMaxSize(); if (plc instanceof FifoEvictionPolicyMBean) return ((FifoEvictionPolicyMBean)plc).getMaxSize(); return null; } /** * Pretty-formatting for duration. * * @param ms Millisecond to format. * @return Formatted presentation. */ private static String formatDuration(long ms) { assert ms >= 0; if (ms == 0) return "< 1 ms"; SB sb = new SB(); long dd = ms / 1440000; // 1440 mins = 60 mins * 24 hours if (dd > 0) sb.a(dd).a(dd == 1 ? " day " : " days "); ms %= 1440000; long hh = ms / 60000; if (hh > 0) sb.a(hh).a(hh == 1 ? " hour " : " hours "); long min = ms / 60000; if (min > 0) sb.a(min).a(min == 1 ? " min " : " mins "); ms %= 60000; if (ms > 0) sb.a(ms).a(" ms "); return sb.toString().trim(); } /** * @param log Logger. * @param time Time. * @param msg Message. */ private static void log0(@Nullable IgniteLogger log, long time, String msg) { if (log != null) { if (log.isDebugEnabled()) log.debug(msg); else log.warning(msg); } else X.println(String.format("[%s][%s]%s", DEBUG_DATE_FMT.get().format(time), Thread.currentThread().getName(), msg)); } /** * Log start. * * @param log Logger. * @param clazz Class. * @param start Start time. */ public static void logStart(@Nullable IgniteLogger log, Class<?> clazz, long start) { log0(log, start, "[" + clazz.getSimpleName() + "]: STARTED"); } /** * Log finished. * * @param log Logger. * @param clazz Class. * @param start Start time. */ public static void logFinish(@Nullable IgniteLogger log, Class<?> clazz, long start) { final long end = U.currentTimeMillis(); log0(log, end, String.format("[%s]: FINISHED, duration: %s", clazz.getSimpleName(), formatDuration(end - start))); } /** * Log task mapped. * * @param log Logger. * @param clazz Task class. * @param nodes Mapped nodes. */ public static void logMapped(@Nullable IgniteLogger log, Class<?> clazz, Collection<ClusterNode> nodes) { log0(log, U.currentTimeMillis(), String.format("[%s]: MAPPED: %s", clazz.getSimpleName(), U.toShortString(nodes))); } /** * Log message. * * @param log Logger. * @param msg Message to log. * @param clazz class. * @param start start time. * @return Time when message was logged. */ public static long log(@Nullable IgniteLogger log, String msg, Class<?> clazz, long start) { final long end = U.currentTimeMillis(); log0(log, end, String.format("[%s]: %s, duration: %s", clazz.getSimpleName(), msg, formatDuration(end - start))); return end; } /** * Log message. * * @param log Logger. * @param msg Message. */ public static void log(@Nullable IgniteLogger log, String msg) { log0(log, U.currentTimeMillis(), " " + msg); } /** * Checks if address can be reached using one argument InetAddress.isReachable() version or ping command if failed. * * @param addr Address to check. * @param reachTimeout Timeout for the check. * @return {@code True} if address is reachable. */ public static boolean reachableByPing(InetAddress addr, int reachTimeout) { try { if (addr.isReachable(reachTimeout)) return true; String cmd = String.format("ping -%s 1 %s", U.isWindows() ? "n" : "c", addr.getHostAddress()); Process myProc = Runtime.getRuntime().exec(cmd); myProc.waitFor(); return myProc.exitValue() == 0; } catch (IOException ignore) { return false; } catch (InterruptedException ignored) { Thread.currentThread().interrupt(); return false; } } /** * Start local node in terminal. * * @param log Logger. * @param cfgPath Path to node configuration to start with. * @param nodesToStart Number of nodes to start. * @param quite If {@code true} then start node in quiet mode. * @param envVars Optional map with environment variables. * @return List of started processes. * @throws IOException If failed to start. */ public static List<Process> startLocalNode(@Nullable IgniteLogger log, String cfgPath, int nodesToStart, boolean quite, Map<String, String> envVars) throws IOException { String quitePar = quite ? "" : "-v"; String cmdFile = new File("bin", U.isWindows() ? "ignite.bat" : "ignite.sh").getPath(); File cmdFilePath = U.resolveIgnitePath(cmdFile); if (cmdFilePath == null || !cmdFilePath.exists()) throw new FileNotFoundException(String.format("File not found: %s", cmdFile)); String ignite = cmdFilePath.getCanonicalPath(); File nodesCfgPath = U.resolveIgnitePath(cfgPath); if (nodesCfgPath == null || !nodesCfgPath.exists()) throw new FileNotFoundException(String.format("File not found: %s", cfgPath)); String nodeCfg = nodesCfgPath.getCanonicalPath(); log(log, String.format("Starting %s local %s with '%s' config", nodesToStart, nodesToStart > 1 ? "nodes" : "node", nodeCfg)); List<Process> run = new ArrayList<>(); try { for (int i = 0; i < nodesToStart; i++) { if (U.isMacOs()) { Map<String, String> macEnv = new HashMap<>(System.getenv()); if (envVars != null) { for (Map.Entry<String, String> ent : envVars.entrySet()) if (macEnv.containsKey(ent.getKey())) { String old = macEnv.get(ent.getKey()); if (old == null || old.isEmpty()) macEnv.put(ent.getKey(), ent.getValue()); else macEnv.put(ent.getKey(), old + ':' + ent.getValue()); } else macEnv.put(ent.getKey(), ent.getValue()); } StringBuilder envs = new StringBuilder(); for (Map.Entry<String, String> entry : macEnv.entrySet()) { String val = entry.getValue(); if (val.indexOf(';') < 0 && val.indexOf('\'') < 0) envs.append(String.format("export %s='%s'; ", entry.getKey(), val.replace('\n', ' ').replace("'", "\'"))); } run.add(openInConsole(envs.toString(), ignite, quitePar, nodeCfg)); } else run.add(openInConsole(null, envVars, ignite, quitePar, nodeCfg)); } return run; } catch (Exception e) { for (Process proc: run) proc.destroy(); throw e; } } /** * Run command in separated console. * * @param args A string array containing the program and its arguments. * @return Started process. * @throws IOException in case of error. */ public static Process openInConsole(String... args) throws IOException { return openInConsole(null, args); } /** * Run command in separated console. * * @param workFolder Work folder for command. * @param args A string array containing the program and its arguments. * @return Started process. * @throws IOException in case of error. */ public static Process openInConsole(@Nullable File workFolder, String... args) throws IOException { return openInConsole(workFolder, null, args); } /** * Run command in separated console. * * @param workFolder Work folder for command. * @param envVars Optional map with environment variables. * @param args A string array containing the program and its arguments. * @return Started process. * @throws IOException If failed to start process. */ public static Process openInConsole(@Nullable File workFolder, Map<String, String> envVars, String... args) throws IOException { String[] commands = args; String cmd = F.concat(Arrays.asList(args), " "); if (U.isWindows()) commands = F.asArray("cmd", "/c", String.format("start %s", cmd)); if (U.isMacOs()) commands = F.asArray("osascript", "-e", String.format("tell application \"Terminal\" to do script \"%s\"", cmd)); if (U.isUnix()) commands = F.asArray("xterm", "-sl", "1024", "-geometry", "200x50", "-e", cmd); ProcessBuilder pb = new ProcessBuilder(commands); if (workFolder != null) pb.directory(workFolder); if (envVars != null) { String sep = U.isWindows() ? ";" : ":"; Map<String, String> goalVars = pb.environment(); for (Map.Entry<String, String> var: envVars.entrySet()) { String envVar = goalVars.get(var.getKey()); if (envVar == null || envVar.isEmpty()) envVar = var.getValue(); else envVar += sep + var.getValue(); goalVars.put(var.getKey(), envVar); } } return pb.start(); } /** * Zips byte array. * * @param input Input bytes. * @return Zipped byte array. * @throws IOException If failed. */ public static byte[] zipBytes(byte[] input) throws IOException { return zipBytes(input, DFLT_BUFFER_SIZE); } /** * Zips byte array. * * @param input Input bytes. * @param initBufSize Initial buffer size. * @return Zipped byte array. * @throws IOException If failed. */ public static byte[] zipBytes(byte[] input, int initBufSize) throws IOException { ByteArrayOutputStream bos = new ByteArrayOutputStream(initBufSize); try (ZipOutputStream zos = new ZipOutputStream(bos)) { try { ZipEntry entry = new ZipEntry(""); entry.setSize(input.length); zos.putNextEntry(entry); zos.write(input); } finally { zos.closeEntry(); } } return bos.toByteArray(); } /** * @param msg Exception message. * @return {@code true} if node failed to join grid. */ public static boolean joinTimedOut(String msg) { return msg != null && msg.startsWith("Join process timed out."); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.util; import java.io.DataInput; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.charset.CharacterCodingException; import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; import java.nio.charset.CharsetEncoder; import java.nio.charset.CodingErrorAction; import java.nio.charset.MalformedInputException; import java.text.CharacterIterator; import java.text.StringCharacterIterator; import java.util.Arrays; import com.fasterxml.jackson.core.JsonGenerationException; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.databind.SerializerProvider; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import com.fasterxml.jackson.databind.ser.std.StdSerializer; /** * A simplified byte wrapper similar to Hadoop's Text class without all the dependencies. Lifted from Hadoop 2.7.1 */ @JsonSerialize(using = Text.TextSerializer.class) public class Text { private static ThreadLocal<CharsetEncoder> ENCODER_FACTORY = new ThreadLocal<CharsetEncoder>() { @Override protected CharsetEncoder initialValue() { return Charset.forName("UTF-8").newEncoder(). onMalformedInput(CodingErrorAction.REPORT). onUnmappableCharacter(CodingErrorAction.REPORT); } }; private static ThreadLocal<CharsetDecoder> DECODER_FACTORY = new ThreadLocal<CharsetDecoder>() { @Override protected CharsetDecoder initialValue() { return Charset.forName("UTF-8").newDecoder(). onMalformedInput(CodingErrorAction.REPORT). onUnmappableCharacter(CodingErrorAction.REPORT); } }; private static final byte[] EMPTY_BYTES = new byte[0]; private byte[] bytes; private int length; public Text() { bytes = EMPTY_BYTES; } /** * Construct from a string. */ public Text(String string) { set(string); } /** Construct from another text. */ public Text(Text utf8) { set(utf8); } /** * Construct from a byte array. */ public Text(byte[] utf8) { set(utf8); } /** * Get a copy of the bytes that is exactly the length of the data. See {@link #getBytes()} for faster access to the * underlying array. */ public byte[] copyBytes() { byte[] result = new byte[length]; System.arraycopy(bytes, 0, result, 0, length); return result; } /** * Returns the raw bytes; however, only data up to {@link #getLength()} is valid. Please use {@link #copyBytes()} if * you need the returned array to be precisely the length of the data. */ public byte[] getBytes() { return bytes; } /** Returns the number of bytes in the byte array */ public int getLength() { return length; } /** * Returns the Unicode Scalar Value (32-bit integer value) for the character at <code>position</code>. Note that this * method avoids using the converter or doing String instantiation * * @return the Unicode scalar value at position or -1 if the position is invalid or points to a trailing byte */ public int charAt(int position) { if (position > this.length) { return -1; // too long } if (position < 0) { return -1; // duh. } ByteBuffer bb = (ByteBuffer) ByteBuffer.wrap(bytes).position(position); return bytesToCodePoint(bb.slice()); } public int find(String what) { return find(what, 0); } /** * Finds any occurence of <code>what</code> in the backing buffer, starting as position <code>start</code>. The * starting position is measured in bytes and the return value is in terms of byte position in the buffer. The backing * buffer is not converted to a string for this operation. * * @return byte position of the first occurence of the search string in the UTF-8 buffer or -1 if not found */ public int find(String what, int start) { try { ByteBuffer src = ByteBuffer.wrap(this.bytes, 0, this.length); ByteBuffer tgt = encode(what); byte b = tgt.get(); src.position(start); while (src.hasRemaining()) { if (b == src.get()) { // matching first byte src.mark(); // save position in loop tgt.mark(); // save position in target boolean found = true; int pos = src.position() - 1; while (tgt.hasRemaining()) { if (!src.hasRemaining()) { // src expired first tgt.reset(); src.reset(); found = false; break; } if (!(tgt.get() == src.get())) { tgt.reset(); src.reset(); found = false; break; // no match } } if (found) { return pos; } } } return -1; // not found } catch (CharacterCodingException e) { // can't get here e.printStackTrace(); return -1; } } /** * Set to contain the contents of a string. */ public void set(String string) { try { ByteBuffer bb = encode(string, true); bytes = bb.array(); length = bb.limit(); } catch (CharacterCodingException e) { throw new RuntimeException("Should not have happened ", e); } } /** * Set to a utf8 byte array */ public void set(byte[] utf8) { set(utf8, 0, utf8.length); } /** copy a text. */ public void set(Text other) { set(other.getBytes(), 0, other.getLength()); } /** * Set the Text to range of bytes * * @param utf8 * the data to copy from * @param start * the first position of the new string * @param len * the number of bytes of the new string */ public void set(byte[] utf8, int start, int len) { setCapacity(len, false); System.arraycopy(utf8, start, bytes, 0, len); this.length = len; } /** * Append a range of bytes to the end of the given text * * @param utf8 * the data to copy from * @param start * the first position to append from utf8 * @param len * the number of bytes to append */ public void append(byte[] utf8, int start, int len) { setCapacity(length + len, true); System.arraycopy(utf8, start, bytes, length, len); length += len; } /** * Clear the string to empty. * * <em>Note</em>: For performance reasons, this call does not clear the underlying byte array that is retrievable via * {@link #getBytes()}. In order to free the byte-array memory, call {@link #set(byte[])} with an empty byte array * (For example, <code>new byte[0]</code>). */ public void clear() { length = 0; } /* * Sets the capacity of this Text object to <em>at least</em> <code>len</code> bytes. If the current buffer is longer, * then the capacity and existing content of the buffer are unchanged. If <code>len</code> is larger than the current * capacity, the Text object's capacity is increased to match. * * @param len the number of bytes we need * * @param keepData should the old data be kept */ private void setCapacity(int len, boolean keepData) { if (bytes == null || bytes.length < len) { if (bytes != null && keepData) { bytes = Arrays.copyOf(bytes, Math.max(len, length << 1)); } else { bytes = new byte[len]; } } } /** * Convert text back to string * * @see java.lang.Object#toString() */ @Override public String toString() { try { return decode(bytes, 0, length); } catch (CharacterCodingException e) { throw new RuntimeException("Should not have happened ", e); } } /** * Read a Text object whose length is already known. This allows creating Text from a stream which uses a different * serialization format. */ public void readWithKnownLength(DataInput in, int len) throws IOException { setCapacity(len, false); in.readFully(bytes, 0, len); length = len; } /** Returns true iff <code>o</code> is a Text with the same contents. */ @Override public boolean equals(Object o) { if (!(o instanceof Text)) { return false; } final Text that = (Text) o; if (this.getLength() != that.getLength()) { return false; } byte[] thisBytes = Arrays.copyOf(this.getBytes(), getLength()); byte[] thatBytes = Arrays.copyOf(that.getBytes(), getLength()); return Arrays.equals(thisBytes, thatBytes); } @Override public int hashCode() { return super.hashCode(); } // / STATIC UTILITIES FROM HERE DOWN /** * Converts the provided byte array to a String using the UTF-8 encoding. If the input is malformed, replace by a * default value. */ public static String decode(byte[] utf8) throws CharacterCodingException { return decode(ByteBuffer.wrap(utf8), true); } public static String decode(byte[] utf8, int start, int length) throws CharacterCodingException { return decode(ByteBuffer.wrap(utf8, start, length), true); } /** * Converts the provided byte array to a String using the UTF-8 encoding. If <code>replace</code> is true, then * malformed input is replaced with the substitution character, which is U+FFFD. Otherwise the method throws a * MalformedInputException. */ public static String decode(byte[] utf8, int start, int length, boolean replace) throws CharacterCodingException { return decode(ByteBuffer.wrap(utf8, start, length), replace); } private static String decode(ByteBuffer utf8, boolean replace) throws CharacterCodingException { CharsetDecoder decoder = DECODER_FACTORY.get(); if (replace) { decoder.onMalformedInput( java.nio.charset.CodingErrorAction.REPLACE); decoder.onUnmappableCharacter(CodingErrorAction.REPLACE); } String str = decoder.decode(utf8).toString(); // set decoder back to its default value: REPORT if (replace) { decoder.onMalformedInput(CodingErrorAction.REPORT); decoder.onUnmappableCharacter(CodingErrorAction.REPORT); } return str; } /** * Converts the provided String to bytes using the UTF-8 encoding. If the input is malformed, invalid chars are * replaced by a default value. * * @return ByteBuffer: bytes stores at ByteBuffer.array() and length is ByteBuffer.limit() */ public static ByteBuffer encode(String string) throws CharacterCodingException { return encode(string, true); } /** * Converts the provided String to bytes using the UTF-8 encoding. If <code>replace</code> is true, then malformed * input is replaced with the substitution character, which is U+FFFD. Otherwise the method throws a * MalformedInputException. * * @return ByteBuffer: bytes stores at ByteBuffer.array() and length is ByteBuffer.limit() */ public static ByteBuffer encode(String string, boolean replace) throws CharacterCodingException { CharsetEncoder encoder = ENCODER_FACTORY.get(); if (replace) { encoder.onMalformedInput(CodingErrorAction.REPLACE); encoder.onUnmappableCharacter(CodingErrorAction.REPLACE); } ByteBuffer bytes = encoder.encode(CharBuffer.wrap(string.toCharArray())); if (replace) { encoder.onMalformedInput(CodingErrorAction.REPORT); encoder.onUnmappableCharacter(CodingErrorAction.REPORT); } return bytes; } static final public int DEFAULT_MAX_LEN = 1024 * 1024; // //// states for validateUTF8 private static final int LEAD_BYTE = 0; private static final int TRAIL_BYTE_1 = 1; private static final int TRAIL_BYTE = 2; /** * Check if a byte array contains valid utf-8 * * @param utf8 * byte array * @throws MalformedInputException * if the byte array contains invalid utf-8 */ public static void validateUTF8(byte[] utf8) throws MalformedInputException { validateUTF8(utf8, 0, utf8.length); } /** * Check to see if a byte array is valid utf-8 * * @param utf8 * the array of bytes * @param start * the offset of the first byte in the array * @param len * the length of the byte sequence * @throws MalformedInputException * if the byte array contains invalid bytes */ public static void validateUTF8(byte[] utf8, int start, int len) throws MalformedInputException { int count = start; int leadByte = 0; int length = 0; int state = LEAD_BYTE; while (count < start + len) { int aByte = utf8[count] & 0xFF; switch (state) { case LEAD_BYTE: leadByte = aByte; length = bytesFromUTF8[aByte]; switch (length) { case 0: // check for ASCII if (leadByte > 0x7F) { throw new MalformedInputException(count); } break; case 1: if (leadByte < 0xC2 || leadByte > 0xDF) { throw new MalformedInputException(count); } state = TRAIL_BYTE_1; break; case 2: if (leadByte < 0xE0 || leadByte > 0xEF) { throw new MalformedInputException(count); } state = TRAIL_BYTE_1; break; case 3: if (leadByte < 0xF0 || leadByte > 0xF4) { throw new MalformedInputException(count); } state = TRAIL_BYTE_1; break; default: // too long! Longest valid UTF-8 is 4 bytes (lead + three) // or if < 0 we got a trail byte in the lead byte position throw new MalformedInputException(count); } // switch (length) break; case TRAIL_BYTE_1: if (leadByte == 0xF0 && aByte < 0x90) { throw new MalformedInputException(count); } if (leadByte == 0xF4 && aByte > 0x8F) { throw new MalformedInputException(count); } if (leadByte == 0xE0 && aByte < 0xA0) { throw new MalformedInputException(count); } if (leadByte == 0xED && aByte > 0x9F) { throw new MalformedInputException(count); } // falls through to regular trail-byte test!! case TRAIL_BYTE: if (aByte < 0x80 || aByte > 0xBF) { throw new MalformedInputException(count); } if (--length == 0) { state = LEAD_BYTE; } else { state = TRAIL_BYTE; } break; default: break; } // switch (state) count++; } } /** * Magic numbers for UTF-8. These are the number of bytes that <em>follow</em> a given lead byte. Trailing bytes have * the value -1. The values 4 and 5 are presented in this table, even though valid UTF-8 cannot include the five and * six byte sequences. */ static final int[] bytesFromUTF8 = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // trail bytes -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5 }; /** * Returns the next code point at the current position in the buffer. The buffer's position will be incremented. Any * mark set on this buffer will be changed by this method! */ public static int bytesToCodePoint(ByteBuffer bytes) { bytes.mark(); byte b = bytes.get(); bytes.reset(); int extraBytesToRead = bytesFromUTF8[(b & 0xFF)]; if (extraBytesToRead < 0) { return -1; // trailing byte! } int ch = 0; switch (extraBytesToRead) { case 5: ch += (bytes.get() & 0xFF); ch <<= 6; /* remember, illegal UTF-8 */ case 4: ch += (bytes.get() & 0xFF); ch <<= 6; /* remember, illegal UTF-8 */ case 3: ch += (bytes.get() & 0xFF); ch <<= 6; case 2: ch += (bytes.get() & 0xFF); ch <<= 6; case 1: ch += (bytes.get() & 0xFF); ch <<= 6; case 0: ch += (bytes.get() & 0xFF); } ch -= offsetsFromUTF8[extraBytesToRead]; return ch; } static final int offsetsFromUTF8[] = { 0x00000000, 0x00003080, 0x000E2080, 0x03C82080, 0xFA082080, 0x82082080 }; /** * For the given string, returns the number of UTF-8 bytes required to encode the string. * * @param string * text to encode * @return number of UTF-8 bytes required to encode */ public static int utf8Length(String string) { CharacterIterator iter = new StringCharacterIterator(string); char ch = iter.first(); int size = 0; while (ch != CharacterIterator.DONE) { if ((ch >= 0xD800) && (ch < 0xDC00)) { // surrogate pair? char trail = iter.next(); if ((trail > 0xDBFF) && (trail < 0xE000)) { // valid pair size += 4; } else { // invalid pair size += 3; iter.previous(); // rewind one } } else if (ch < 0x80) { size++; } else if (ch < 0x800) { size += 2; } else { // ch < 0x10000, that is, the largest char value size += 3; } ch = iter.next(); } return size; } public static class TextSerializer extends StdSerializer<Text> { public TextSerializer() { super(Text.class); } @Override public void serialize(Text text, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException, JsonGenerationException { jsonGenerator.writeString(text.toString()); } } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/security/privateca/v1/service.proto package com.google.cloud.security.privateca.v1; /** * * * <pre> * Request message for [CertificateAuthorityService.GetCertificateAuthority][google.cloud.security.privateca.v1.CertificateAuthorityService.GetCertificateAuthority]. * </pre> * * Protobuf type {@code google.cloud.security.privateca.v1.GetCertificateAuthorityRequest} */ public final class GetCertificateAuthorityRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.security.privateca.v1.GetCertificateAuthorityRequest) GetCertificateAuthorityRequestOrBuilder { private static final long serialVersionUID = 0L; // Use GetCertificateAuthorityRequest.newBuilder() to construct. private GetCertificateAuthorityRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private GetCertificateAuthorityRequest() { name_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new GetCertificateAuthorityRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetCertificateAuthorityRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); name_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.security.privateca.v1.PrivateCaProto .internal_static_google_cloud_security_privateca_v1_GetCertificateAuthorityRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.security.privateca.v1.PrivateCaProto .internal_static_google_cloud_security_privateca_v1_GetCertificateAuthorityRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest.class, com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * * * <pre> * Required. The [name][google.cloud.security.privateca.v1.CertificateAuthority.name] of the [CertificateAuthority][google.cloud.security.privateca.v1.CertificateAuthority] to * get. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Required. The [name][google.cloud.security.privateca.v1.CertificateAuthority.name] of the [CertificateAuthority][google.cloud.security.privateca.v1.CertificateAuthority] to * get. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest)) { return super.equals(obj); } com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest other = (com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest) obj; if (!getName().equals(other.getName())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for [CertificateAuthorityService.GetCertificateAuthority][google.cloud.security.privateca.v1.CertificateAuthorityService.GetCertificateAuthority]. * </pre> * * Protobuf type {@code google.cloud.security.privateca.v1.GetCertificateAuthorityRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.security.privateca.v1.GetCertificateAuthorityRequest) com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.security.privateca.v1.PrivateCaProto .internal_static_google_cloud_security_privateca_v1_GetCertificateAuthorityRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.security.privateca.v1.PrivateCaProto .internal_static_google_cloud_security_privateca_v1_GetCertificateAuthorityRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest.class, com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest.Builder.class); } // Construct using // com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); name_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.security.privateca.v1.PrivateCaProto .internal_static_google_cloud_security_privateca_v1_GetCertificateAuthorityRequest_descriptor; } @java.lang.Override public com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest getDefaultInstanceForType() { return com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest build() { com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest buildPartial() { com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest result = new com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest(this); result.name_ = name_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest) { return mergeFrom( (com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest other) { if (other == com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest .getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object name_ = ""; /** * * * <pre> * Required. The [name][google.cloud.security.privateca.v1.CertificateAuthority.name] of the [CertificateAuthority][google.cloud.security.privateca.v1.CertificateAuthority] to * get. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The [name][google.cloud.security.privateca.v1.CertificateAuthority.name] of the [CertificateAuthority][google.cloud.security.privateca.v1.CertificateAuthority] to * get. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The [name][google.cloud.security.privateca.v1.CertificateAuthority.name] of the [CertificateAuthority][google.cloud.security.privateca.v1.CertificateAuthority] to * get. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; onChanged(); return this; } /** * * * <pre> * Required. The [name][google.cloud.security.privateca.v1.CertificateAuthority.name] of the [CertificateAuthority][google.cloud.security.privateca.v1.CertificateAuthority] to * get. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * * * <pre> * Required. The [name][google.cloud.security.privateca.v1.CertificateAuthority.name] of the [CertificateAuthority][google.cloud.security.privateca.v1.CertificateAuthority] to * get. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.security.privateca.v1.GetCertificateAuthorityRequest) } // @@protoc_insertion_point(class_scope:google.cloud.security.privateca.v1.GetCertificateAuthorityRequest) private static final com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest(); } public static com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<GetCertificateAuthorityRequest> PARSER = new com.google.protobuf.AbstractParser<GetCertificateAuthorityRequest>() { @java.lang.Override public GetCertificateAuthorityRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new GetCertificateAuthorityRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<GetCertificateAuthorityRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<GetCertificateAuthorityRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.security.privateca.v1.GetCertificateAuthorityRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * #%L * ImageJ software for multidimensional image processing and analysis. * %% * Copyright (C) 2014 - 2015 Board of Regents of the University of * Wisconsin-Madison, University of Konstanz and Brian Northan. * %% * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * #L% */ package net.imglib2.ops.operation.img.unary; import java.util.Iterator; import net.imglib2.Cursor; import net.imglib2.RandomAccess; import net.imglib2.img.Img; import net.imglib2.ops.operation.UnaryOperation; import net.imglib2.ops.operation.UnaryOutputOperation; import net.imglib2.ops.operation.iterable.unary.Max; import net.imglib2.ops.operation.iterable.unary.Mean; import net.imglib2.ops.operation.iterable.unary.MedianOp; import net.imglib2.ops.operation.iterable.unary.Min; import net.imglib2.ops.operation.iterable.unary.StdDeviation; import net.imglib2.type.numeric.RealType; import net.imglib2.type.numeric.real.DoubleType; /** * Projects an {@link Img} in a given dimension. * * @author Christian Dietz (University of Konstanz) * @author Martin Horn (University of Konstanz) * @deprecated Use net.imagej.ops instead. */ @Deprecated public class ImgProject< T extends RealType< T >> implements UnaryOutputOperation< Img< T >, Img< T >> { /** * Different projection types which can be used by ImageJ */ public enum ProjectionType { MAX_INTENSITY, MEDIAN_INTENSITY, AVG_INTENSITY, MIN_INTENSITY, STD_DEVIATION; } /* Type of projection */ private final ProjectionType m_projectionType; /* Dimension of projection */ private final int m_projectionDim; /** * Projects the pixels onto all dimensions in the direction of * <code>projectionDim</code> * * @param type * @param imgFactory * @param projectionDim */ public ImgProject( ProjectionType type, int projectionDim ) { m_projectionDim = projectionDim; m_projectionType = type; } /** * {@inheritDoc} */ @Override public Img< T > createEmptyOutput( Img< T > op ) { /* The new dimensions of the projected image */ long[] projectedImgDimSizes = new long[ op.numDimensions() - 1 ]; for ( int d = 0; d < op.numDimensions(); d++ ) { if ( d < m_projectionDim ) { projectedImgDimSizes[ d ] = op.dimension( d ); } if ( d > m_projectionDim ) { projectedImgDimSizes[ d - 1 ] = op.dimension( d ); } } /* The projected Image */ Img< T > projectedImage = op.factory().create( projectedImgDimSizes, op.randomAccess().get().createVariable() ); return projectedImage; } /** * {@inheritDoc} */ @Override public Img< T > compute( final Img< T > op, final Img< T > r ) { Cursor< T > projCur = r.localizingCursor(); final RandomAccess< T > srcRA = op.randomAccess(); while ( projCur.hasNext() ) { projCur.fwd(); for ( int d = 0; d < op.numDimensions(); d++ ) { if ( d < m_projectionDim ) { srcRA.setPosition( projCur.getIntPosition( d ), d ); } if ( d > m_projectionDim ) { srcRA.setPosition( projCur.getIntPosition( d - 1 ), d ); } } projCur.get().setReal( handleProjection( new Iterator< T >() { int k = -1; @Override public boolean hasNext() { return k < op.dimension( m_projectionDim ) - 1; } @Override public T next() { k++; srcRA.setPosition( k, m_projectionDim ); return srcRA.get(); } @Override public void remove() { } } ) ); } return r; } /* * Projection is handled according to the projection type m_d has the size * same size as the amount of points which are projected * * @param resultList * * @param type */ private final double handleProjection( Iterator< T > iterable ) { UnaryOperation< Iterator< T >, DoubleType > op; switch ( m_projectionType ) { case AVG_INTENSITY: op = new Mean< T, DoubleType >(); break; case MEDIAN_INTENSITY: op = new MedianOp< T, DoubleType >(); break; case MAX_INTENSITY: op = new Max< T, DoubleType >(); break; case MIN_INTENSITY: op = new Min< T, DoubleType >(); break; case STD_DEVIATION: op = new StdDeviation< T, DoubleType >(); break; default: throw new IllegalArgumentException( "Projection Method doesn't exist" ); } return op.compute( iterable, new DoubleType() ).get(); } @Override public UnaryOutputOperation< Img< T >, Img< T >> copy() { return new ImgProject< T >( m_projectionType, m_projectionDim ); } @Override public Img< T > compute( Img< T > arg0 ) { return compute( arg0, createEmptyOutput( arg0 ) ); } }
package net.minecraft.world.level.storage; /** * Copyright Mojang AB. * * Don't do evil. */ import java.io.*; import java.util.ArrayList; import net.minecraft.world.level.biome.BiomeSource; import net.minecraft.world.level.chunk.storage.*; import net.minecraft.world.level.chunk.storage.OldChunkStorage.OldLevelChunk; import com.mojang.nbt.*; public class AnvilLevelStorageSource { private File baseDir; public AnvilLevelStorageSource(File dir) { baseDir = dir; } public boolean isConvertible(String levelId) { // check if there is old file format level data CompoundTag levelData = getDataTagFor(levelId); if (levelData == null || levelData.getInt("version") != AnvilLevelStorage.MCREGION_VERSION_ID) { return false; } return true; } private CompoundTag getDataTagFor(String levelId) { File dir = new File(baseDir, levelId); if (!dir.exists()) return null; File dataFile = new File(dir, "level.dat"); if (dataFile.exists()) { try { CompoundTag root = NbtIo.readCompressed(new FileInputStream(dataFile)); CompoundTag tag = root.getCompound("Data"); return tag; } catch (Exception e) { e.printStackTrace(); } } dataFile = new File(dir, "level.dat_old"); if (dataFile.exists()) { try { CompoundTag root = NbtIo.readCompressed(new FileInputStream(dataFile)); CompoundTag tag = root.getCompound("Data"); return tag; } catch (Exception e) { e.printStackTrace(); } } return null; } private void saveDataTag(String levelId, CompoundTag dataTag) { File dir = new File(baseDir, levelId); if (!dir.exists()) return; File dataFile = new File(dir, "level.dat"); if (dataFile.exists()) { try { CompoundTag root = new CompoundTag(); root.put("Data", dataTag); NbtIo.writeCompressed(root, new FileOutputStream(dataFile)); } catch (Exception e) { e.printStackTrace(); } } } public boolean convertLevel(String levelId, ProgressListener progress) { progress.progressStagePercentage(0); ArrayList<File> normalRegions = new ArrayList<File>(); ArrayList<File> netherRegions = new ArrayList<File>(); ArrayList<File> enderRegions = new ArrayList<File>(); // File baseFolder = new File(baseDir, levelId); File netherFolder = new File(baseFolder, LevelStorage.NETHER_FOLDER); File enderFolder = new File(baseFolder, LevelStorage.ENDER_FOLDER); System.out.println("Scanning folders..."); // find normal world addRegionFiles(baseFolder, normalRegions); // find hell world if (netherFolder.exists()) { addRegionFiles(netherFolder, netherRegions); } if (enderFolder.exists()) { addRegionFiles(enderFolder, enderRegions); } int totalCount = normalRegions.size() + netherRegions.size() + enderRegions.size(); System.out.println("Total conversion count is " + totalCount); CompoundTag levelData = getDataTagFor(levelId); // convert normal world convertRegions(new File(baseFolder, "region"), normalRegions, null, 0, totalCount, progress); // convert hell world convertRegions(new File(netherFolder, "region"), netherRegions, null, normalRegions.size(), totalCount, progress); // convert end world convertRegions(new File(enderFolder, "region"), enderRegions, null, normalRegions.size() + netherRegions.size(), totalCount, progress); makeMcrLevelDatBackup(levelId); levelData.putInt("version", AnvilLevelStorage.ANVIL_VERSION_ID); saveDataTag(levelId, levelData); return true; } private void makeMcrLevelDatBackup(String levelId) { File dir = new File(baseDir, levelId); if (!dir.exists()) { System.out.println("Warning: Unable to create level.dat_mcr backup"); return; } File dataFile = new File(dir, "level.dat"); if (!dataFile.exists()) { System.out.println("Warning: Unable to create level.dat_mcr backup"); return; } File newName = new File(dir, "level.dat_mcr"); if (!dataFile.renameTo(newName)) { System.out.println("Warning: Unable to create level.dat_mcr backup"); } } private void convertRegions(File baseFolder, ArrayList<File> regionFiles, BiomeSource biomeSource, int currentCount, int totalCount, ProgressListener progress) { for (File regionFile : regionFiles) { convertRegion(baseFolder, regionFile, biomeSource, currentCount, totalCount, progress); currentCount++; int percent = (int) Math.round(100.0d * (double) currentCount / (double) totalCount); progress.progressStagePercentage(percent); } } private void convertRegion(File baseFolder, File regionFile, BiomeSource biomeSource, int currentCount, int totalCount, ProgressListener progress) { try { String name = regionFile.getName(); RegionFile regionSource = new RegionFile(regionFile); RegionFile regionDest = new RegionFile(new File(baseFolder, name.substring(0, name.length() - RegionFile.MCREGION_EXTENSION.length()) + RegionFile.ANVIL_EXTENSION)); for (int x = 0; x < 32; x++) { for (int z = 0; z < 32; z++) { if (regionSource.hasChunk(x, z) && !regionDest.hasChunk(x, z)) { DataInputStream regionChunkInputStream = regionSource.getChunkDataInputStream(x, z); if (regionChunkInputStream == null) { System.out.println("Failed to fetch input stream"); continue; } CompoundTag chunkData = NbtIo.read(regionChunkInputStream); regionChunkInputStream.close(); CompoundTag compound = chunkData.getCompound("Level"); { OldLevelChunk oldChunk = OldChunkStorage.load(compound); CompoundTag tag = new CompoundTag(); CompoundTag levelData = new CompoundTag(); tag.put("Level", levelData); OldChunkStorage.convertToAnvilFormat(oldChunk, levelData, biomeSource); DataOutputStream chunkDataOutputStream = regionDest.getChunkDataOutputStream(x, z); NbtIo.write(tag, chunkDataOutputStream); chunkDataOutputStream.close(); } } } int basePercent = (int) Math.round(100.0d * (double) (currentCount * 1024) / (double) (totalCount * 1024)); int newPercent = (int) Math.round(100.0d * (double) ((x + 1) * 32 + currentCount * 1024) / (double) (totalCount * 1024)); if (newPercent > basePercent) { progress.progressStagePercentage(newPercent); } } regionSource.close(); regionDest.close(); } catch (IOException e) { e.printStackTrace(); } } private void addRegionFiles(File baseFolder, ArrayList<File> regionFiles) { File regionFolder = new File(baseFolder, "region"); File[] list = regionFolder.listFiles(new FilenameFilter() { public boolean accept(File dir, String name) { return name.endsWith(RegionFile.MCREGION_EXTENSION); } }); if (list != null) { for (File file : list) { regionFiles.add(file); } } } }
/* * Copyright 2016 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.server.dao; import com.thoughtworks.go.config.ArtifactPlans; import com.thoughtworks.go.config.ArtifactPropertiesGenerators; import com.thoughtworks.go.config.EnvironmentVariablesConfig; import com.thoughtworks.go.config.Resources; import com.thoughtworks.go.domain.*; import com.thoughtworks.go.helper.JobInstanceMother; import com.thoughtworks.go.server.cache.GoCache; import com.thoughtworks.go.server.domain.JobStatusListener; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.orm.ibatis.SqlMapClientTemplate; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import java.util.*; import static com.thoughtworks.go.util.ArrayUtil.asList; import static com.thoughtworks.go.util.IBatisUtil.arguments; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.*; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(locations = { "classpath:WEB-INF/applicationContext-global.xml", "classpath:WEB-INF/applicationContext-dataLocalAccess.xml", "classpath:WEB-INF/applicationContext-acegi-security.xml" }) public class JobInstanceSqlMapDaoCachingTest { @Autowired private GoCache goCache; @Autowired private JobInstanceSqlMapDao jobInstanceDao; private SqlMapClientTemplate mockTemplate; @Before public void setup() { mockTemplate = mock(SqlMapClientTemplate.class); } @After public void tearDown() { goCache.clear(); } @Test public void buildByIdWithTransitions_shouldCacheWhenQueriedFor() { jobInstanceDao.setSqlMapClientTemplate(mockTemplate); JobInstance job = JobInstanceMother.assigned("job"); job.setId(1L); when(mockTemplate.queryForObject("buildByIdWithTransitions", 1L)).thenReturn(job); JobInstance actual = jobInstanceDao.buildByIdWithTransitions(1L); assertThat(actual, is(job)); assertThat(actual == job, is(false)); jobInstanceDao.buildByIdWithTransitions(1L); verify(mockTemplate, times(1)).queryForObject("buildByIdWithTransitions", 1L); } @Test public void buildByIdWithTransitions_shouldClearFromCacheOnUpdateStatusOfJob() { jobInstanceDao.setSqlMapClientTemplate(mockTemplate); JobInstance job = JobInstanceMother.assigned("job"); job.setId(1L); when(mockTemplate.queryForObject("buildByIdWithTransitions", 1L)).thenReturn(job); JobInstance actual = jobInstanceDao.buildByIdWithTransitions(1L); assertThat(actual, is(job)); assertThat(actual == job, is(false)); jobInstanceDao.updateStateAndResult(job); //Must clear cahced job instance jobInstanceDao.buildByIdWithTransitions(1L); verify(mockTemplate, times(2)).queryForObject("buildByIdWithTransitions", 1L); } @Test public void orderedScheduledBuilds_shouldNotCacheJobPlanWhichIsNoLongerScheduled() { when(mockTemplate.queryForList(eq("scheduledPlanIds"))).thenReturn(Arrays.asList(1L, 2L)); final DefaultJobPlan firstJob = jobPlan(1); List<JobPlan> expectedPlans = new ArrayList<JobPlan>() {{ add(firstJob); }}; when(mockTemplate.queryForObject("scheduledPlan", arguments("id", 1L).asMap())).thenReturn(firstJob); when(mockTemplate.queryForObject("scheduledPlan", arguments("id", 2L).asMap())).thenReturn(null); jobInstanceDao.setSqlMapClientTemplate(mockTemplate); List<JobPlan> plans = jobInstanceDao.orderedScheduledBuilds(); assertThat(plans, is(expectedPlans)); verify(mockTemplate, times(2)).queryForObject(eq("scheduledPlan"), any()); verify(mockTemplate, times(1)).queryForList(eq("scheduledPlanIds")); } @Test public void orderedScheduledBuilds_shouldCacheJobPlan() { when(mockTemplate.queryForList(eq("scheduledPlanIds"))).thenReturn(Arrays.asList(1L, 2L)); final DefaultJobPlan firstJob = jobPlan(1); final DefaultJobPlan secondJob = jobPlan(2); List<JobPlan> expectedPlans = new ArrayList<JobPlan>() {{ add(firstJob); add(secondJob); }}; when(mockTemplate.queryForObject("scheduledPlan", arguments("id", 1L).asMap())).thenReturn(firstJob); when(mockTemplate.queryForObject("scheduledPlan", arguments("id", 2L).asMap())).thenReturn(secondJob); jobInstanceDao.setSqlMapClientTemplate(mockTemplate); jobInstanceDao.orderedScheduledBuilds(); List<JobPlan> plans = jobInstanceDao.orderedScheduledBuilds(); assertThat(plans, is(expectedPlans)); verify(mockTemplate, times(2)).queryForObject(eq("scheduledPlan"), any()); verify(mockTemplate, times(2)).queryForList(eq("scheduledPlanIds")); } @Test public void updateStatus_shouldRemoveCachedJobPlan() { when(mockTemplate.queryForList(eq("scheduledPlanIds"))).thenReturn(Arrays.asList(1L)); final DefaultJobPlan firstJob = jobPlan(1); List<JobPlan> expectedPlans = new ArrayList<JobPlan>() {{ add(firstJob);}}; when(mockTemplate.queryForObject("scheduledPlan", arguments("id", 1L).asMap())).thenReturn(firstJob); jobInstanceDao.setSqlMapClientTemplate(mockTemplate); jobInstanceDao.orderedScheduledBuilds();//populate the cache JobInstance instance = instance(1); jobInstanceDao.updateStateAndResult(instance); List<JobPlan> plans = jobInstanceDao.orderedScheduledBuilds(); assertThat(plans, is(expectedPlans)); verify(mockTemplate, times(2)).queryForObject("scheduledPlan", arguments("id", 1L).asMap());//because the cache is cleared verify(mockTemplate, times(2)).queryForList(eq("scheduledPlanIds")); } private JobInstance instance(long id) { JobInstance instance = JobInstanceMother.jobInstance("first", "resource"); instance.setId(id); return instance; } @Test public void activeJobs_shouldCacheCurrentlyActiveJobIds() { final ActiveJob first = new ActiveJob(1L, "pipeline", 1, "label", "stage", "job1"); final ActiveJob second = new ActiveJob(2L, "another", 2, "label", "stage", "job1"); List<ActiveJob> expectedJobs = Arrays.asList(first, second); when(mockTemplate.queryForList("getActiveJobIds")).thenReturn(Arrays.asList(1L, 2L)); when(mockTemplate.queryForObject("getActiveJobById", arguments("id", 1L).asMap())).thenReturn(first); when(mockTemplate.queryForObject("getActiveJobById", arguments("id", 2L).asMap())).thenReturn(second); jobInstanceDao.setSqlMapClientTemplate(mockTemplate); jobInstanceDao.activeJobs();//populate the cache List<ActiveJob> activeJobs = jobInstanceDao.activeJobs(); assertThat(expectedJobs, is(activeJobs)); verify(mockTemplate, times(1)).queryForList("getActiveJobIds"); verify(mockTemplate, times(1)).queryForObject("getActiveJobById", arguments("id", 1L).asMap()); verify(mockTemplate, times(1)).queryForObject("getActiveJobById", arguments("id", 2L).asMap()); } @Test public void activeJobs_shouldRemoveCacheActiveJobOnUpdateJobStatus() { final ActiveJob first = new ActiveJob(1L, "pipeline", 1, "label", "stage", "first"); final ActiveJob second = new ActiveJob(2L, "another", 2, "label", "stage", "job1"); List<ActiveJob> expectedJobs = Arrays.asList(first, second); when(mockTemplate.queryForList("getActiveJobIds")).thenReturn(Arrays.asList(1L, 2L)); when(mockTemplate.queryForObject("getActiveJobById", arguments("id", 1L).asMap())).thenReturn(first); when(mockTemplate.queryForObject("getActiveJobById", arguments("id", 2L).asMap())).thenReturn(second); jobInstanceDao.setSqlMapClientTemplate(mockTemplate); jobInstanceDao.activeJobs();//cache it first jobInstanceDao.updateStateAndResult(instance(1L));//should remove from cache List<ActiveJob> activeJobs = jobInstanceDao.activeJobs(); assertThat(expectedJobs, is(activeJobs)); verify(mockTemplate, times(2)).queryForList("getActiveJobIds"); verify(mockTemplate, times(2)).queryForObject("getActiveJobById", arguments("id", 1L).asMap()); verify(mockTemplate, times(1)).queryForObject("getActiveJobById", arguments("id", 2L).asMap()); } @Test public void activeJobs_shouldNotCacheAJobThatsNoLongerActive() { final ActiveJob first = new ActiveJob(1L, "pipeline", 1, "label", "stage", "first"); List<ActiveJob> expectedJobs = Arrays.asList(first); when(mockTemplate.queryForList("getActiveJobIds")).thenReturn(Arrays.asList(1L, 2L)); when(mockTemplate.queryForObject("getActiveJobById", arguments("id", 1L).asMap())).thenReturn(first); when(mockTemplate.queryForObject("getActiveJobById", arguments("id", 2L).asMap())).thenReturn(null); jobInstanceDao.setSqlMapClientTemplate(mockTemplate); jobInstanceDao.activeJobs();//cache it first jobInstanceDao.updateStateAndResult(instance(1L));//should remove from cache List<ActiveJob> activeJobs = jobInstanceDao.activeJobs(); assertThat(expectedJobs, is(activeJobs)); verify(mockTemplate, times(2)).queryForList("getActiveJobIds"); verify(mockTemplate, times(2)).queryForObject("getActiveJobById", arguments("id", 1L).asMap()); } @Test public void shouldCacheJobIdentifier() throws Exception { jobInstanceDao.setSqlMapClientTemplate(mockTemplate); JobInstance job = JobInstanceMother.buildEndingWithState(JobState.Building, JobResult.Unknown, "config"); when(mockTemplate.queryForObject(eq("findJobId"), any(Map.class))).thenReturn(job.getIdentifier()); jobInstanceDao.findOriginalJobIdentifier(job.getIdentifier().getStageIdentifier(), job.getName()); jobInstanceDao.findOriginalJobIdentifier(job.getIdentifier().getStageIdentifier(), job.getName()); verify(mockTemplate, times(1)).queryForObject(eq("findJobId"), any(Map.class)); } @Test public void shouldClearJobIdentifierFromCacheWhenJobIsRescheduled() throws Exception { jobInstanceDao.setSqlMapClientTemplate(mockTemplate); JobInstance job = JobInstanceMother.buildEndingWithState(JobState.Building, JobResult.Unknown, "config"); when(mockTemplate.queryForObject(eq("findJobId"), any(Map.class))).thenReturn(job.getIdentifier()); jobInstanceDao.findOriginalJobIdentifier(job.getIdentifier().getStageIdentifier(), job.getName()); job.changeState(JobState.Rescheduled, new Date()); JobStatusListener listener = jobInstanceDao; listener.jobStatusChanged(job); jobInstanceDao.findOriginalJobIdentifier(job.getIdentifier().getStageIdentifier(), job.getName()); verify(mockTemplate, times(2)).queryForObject(eq("findJobId"), any(Map.class)); } @Test public void shouldnotClearJobIdentifierFromCacheForAnyOtherJobStateChangeOtherThanRescheduledAsTheBuildIdDoesNotChange() throws Exception { jobInstanceDao.setSqlMapClientTemplate(mockTemplate); JobInstance job = JobInstanceMother.buildEndingWithState(JobState.Building, JobResult.Unknown, "config"); when(mockTemplate.queryForObject(eq("findJobId"), any(Map.class))).thenReturn(job.getIdentifier()); jobInstanceDao.findOriginalJobIdentifier(job.getIdentifier().getStageIdentifier(), job.getName()); List<JobState> jobStatesForWhichCacheNeedsToBeMaintained = asList(JobState.Assigned, JobState.Building, JobState.Completed, JobState.Discontinued, JobState.Paused, JobState.Scheduled, JobState.Preparing, JobState.Assigned.Unknown); JobStatusListener listener = jobInstanceDao; for (JobState jobState : jobStatesForWhichCacheNeedsToBeMaintained) { job.changeState(jobState, new Date()); listener.jobStatusChanged(job); } jobInstanceDao.findOriginalJobIdentifier(job.getIdentifier().getStageIdentifier(), job.getName()); verify(mockTemplate, times(1)).queryForObject(eq("findJobId"), any(Map.class)); } private DefaultJobPlan jobPlan(long id) { return new DefaultJobPlan(new Resources(), new ArtifactPlans(), new ArtifactPropertiesGenerators(), id, null, null, new EnvironmentVariablesConfig(), new EnvironmentVariablesConfig(), null); } }
/** * Copyright (c) 2012, Ben Fortuna * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * o Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * o Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * o Neither the name of Ben Fortuna nor the names of any other contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package net.fortuna.ical4j.model; import java.io.IOException; import java.net.URISyntaxException; import java.text.ParseException; import junit.framework.TestCase; import junit.framework.TestSuite; import net.fortuna.ical4j.model.component.VEvent; import net.fortuna.ical4j.model.component.VFreeBusy; import net.fortuna.ical4j.model.component.VTimeZone; import net.fortuna.ical4j.model.parameter.TzId; import net.fortuna.ical4j.model.parameter.Value; import net.fortuna.ical4j.model.property.CalScale; import net.fortuna.ical4j.model.property.ProdId; import net.fortuna.ical4j.model.property.RRule; import net.fortuna.ical4j.model.property.Uid; import net.fortuna.ical4j.model.property.Version; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * Created on 16/03/2005 * * $Id$ * * @author Ben * * A test case for creating calendars. */ public class CalendarTest extends TestCase { private static Log log = LogFactory.getLog(Calendar.class); private Calendar calendar; /** * @param testMethod * @param calendar */ public CalendarTest(String testMethod, Calendar calendar) { super(testMethod); this.calendar = calendar; } /** * @throws ValidationException */ public void testValid() throws ValidationException { calendar.validate(); } /** * */ public void testInvalid() { try { calendar.validate(); fail("Should throw a ValidationException"); } catch (ValidationException ve) { log.trace(ve); } } /** * @return * @throws URISyntaxException * @throws IOException * @throws ParseException */ public static TestSuite suite() throws ParseException, IOException, URISyntaxException { TestSuite suite = new TestSuite(); TimeZoneRegistry registry = TimeZoneRegistryFactory.getInstance().createRegistry(); Calendar baseCalendar = new Calendar(); baseCalendar.getProperties().add(new ProdId("-//Ben Fortuna//iCal4j 1.0//EN")); baseCalendar.getProperties().add(Version.VERSION_2_0); baseCalendar.getProperties().add(CalScale.GREGORIAN); suite.addTest(new CalendarTest("testValid", baseCalendar)); VTimeZone tz = registry.getTimeZone("Australia/Melbourne").getVTimeZone(); TzId tzParam = new TzId(tz.getProperty(Property.TZID).getValue()); baseCalendar.getComponents().add(tz); // Add events, etc.. Calendar calendar = new Calendar(baseCalendar); java.util.Calendar calStart = java.util.Calendar.getInstance(); calStart.set(java.util.Calendar.DAY_OF_WEEK, java.util.Calendar.MONDAY); calStart.set(java.util.Calendar.HOUR_OF_DAY, 9); calStart.clear(java.util.Calendar.MINUTE); calStart.clear(java.util.Calendar.SECOND); java.util.Calendar calEnd = java.util.Calendar.getInstance(); calEnd.setTime(calStart.getTime()); calEnd.add(java.util.Calendar.YEAR, 1); VEvent week1UserA = new VEvent( new Date(calStart.getTime().getTime()), new Dur(0, 8, 0, 0), "Week 1 - User A"); week1UserA.getProperty(Property.DTSTART).getParameters().replace(tzParam); week1UserA.getProperty(Property.DTSTART).getParameters().replace(Value.DATE); Recur week1UserARecur = new Recur( Recur.WEEKLY, new Date(calEnd.getTime().getTime())); week1UserARecur.setInterval(3); week1UserARecur.getDayList().add(WeekDay.MO); week1UserARecur.getDayList().add(WeekDay.TU); week1UserARecur.getDayList().add(WeekDay.WE); week1UserARecur.getDayList().add(WeekDay.TH); week1UserARecur.getDayList().add(WeekDay.FR); week1UserARecur.getHourList().add(new Integer(9)); week1UserA.getProperties().add(new RRule(week1UserARecur)); week1UserA.getProperties().add(new Uid("000001@modularity.net.au")); calStart.add(java.util.Calendar.WEEK_OF_YEAR, 1); calEnd.add(java.util.Calendar.WEEK_OF_YEAR, 1); VEvent week2UserB = new VEvent( new Date(calStart.getTime().getTime()), new Dur(0, 8, 0, 0), "Week 2 - User B"); week2UserB.getProperty(Property.DTSTART).getParameters().replace(tzParam); week2UserB.getProperty(Property.DTSTART).getParameters().replace(Value.DATE); Recur week2UserBRecur = new Recur( Recur.WEEKLY, new Date(calEnd.getTime().getTime())); week2UserBRecur.setInterval(3); week2UserBRecur.getDayList().add(WeekDay.MO); week2UserBRecur.getDayList().add(WeekDay.TU); week2UserBRecur.getDayList().add(WeekDay.WE); week2UserBRecur.getDayList().add(WeekDay.TH); week2UserBRecur.getDayList().add(WeekDay.FR); week2UserBRecur.getHourList().add(new Integer(9)); week2UserB.getProperties().add(new RRule(week2UserBRecur)); week2UserB.getProperties().add(new Uid("000002@modularity.net.au")); calStart.add(java.util.Calendar.WEEK_OF_YEAR, 1); calEnd.add(java.util.Calendar.WEEK_OF_YEAR, 1); VEvent week3UserC = new VEvent( new Date(calStart.getTime().getTime()), new Dur(0, 8, 0, 0), "Week 3 - User C"); week3UserC.getProperty(Property.DTSTART).getParameters().replace(tzParam); week3UserC.getProperty(Property.DTSTART).getParameters().replace(Value.DATE); Recur week3UserCRecur = new Recur( Recur.WEEKLY, new Date(calEnd.getTime().getTime())); week3UserCRecur.setInterval(3); week3UserCRecur.getDayList().add(WeekDay.MO); week3UserCRecur.getDayList().add(WeekDay.TU); week3UserCRecur.getDayList().add(WeekDay.WE); week3UserCRecur.getDayList().add(WeekDay.TH); week3UserCRecur.getDayList().add(WeekDay.FR); week3UserCRecur.getHourList().add(new Integer(9)); week3UserC.getProperties().add(new RRule(week3UserCRecur)); week3UserC.getProperties().add(new Uid("000003@modularity.net.au")); calendar.getComponents().add(week1UserA); calendar.getComponents().add(week2UserB); calendar.getComponents().add(week3UserC); suite.addTest(new CalendarTest("testValid", calendar)); // test event date ranges.. calendar = new Calendar(baseCalendar); calStart = java.util.Calendar.getInstance(); calStart.set(java.util.Calendar.YEAR, 2006); calStart.set(java.util.Calendar.MONTH, java.util.Calendar.JANUARY); calStart.set(java.util.Calendar.DAY_OF_MONTH, 1); calStart.set(java.util.Calendar.HOUR_OF_DAY, 9); calStart.clear(java.util.Calendar.MINUTE); calStart.clear(java.util.Calendar.SECOND); calEnd = java.util.Calendar.getInstance(); calEnd.setTime(calStart.getTime()); calEnd.add(java.util.Calendar.YEAR, 1); week1UserA = new VEvent( new Date(calStart.getTime().getTime()), new Dur(0, 8, 0, 0), "Week 1 - User A"); week1UserA.getProperty(Property.DTSTART).getParameters().replace(tzParam); week1UserA.getProperty(Property.DTSTART).getParameters().replace(Value.DATE); week1UserARecur = new Recur( Recur.WEEKLY, new Date(calEnd.getTime().getTime())); week1UserARecur.setInterval(3); week1UserARecur.getDayList().add(new WeekDay(WeekDay.MO, 0)); week1UserARecur.getDayList().add(new WeekDay(WeekDay.TU, 0)); week1UserARecur.getDayList().add(new WeekDay(WeekDay.WE, 0)); week1UserARecur.getDayList().add(new WeekDay(WeekDay.TH, 0)); week1UserARecur.getDayList().add(new WeekDay(WeekDay.FR, 0)); week1UserARecur.getHourList().add(new Integer(9)); week1UserA.getProperties().add(new RRule(week1UserARecur)); week1UserA.getProperties().add(new Uid("000001@modularity.net.au")); calStart.add(java.util.Calendar.WEEK_OF_YEAR, 1); calEnd.add(java.util.Calendar.WEEK_OF_YEAR, 1); week2UserB = new VEvent( new Date(calStart.getTime().getTime()), new Dur(0, 8, 0, 0), "Week 2 - User B"); week2UserB.getProperty(Property.DTSTART).getParameters().replace(tzParam); week2UserB.getProperty(Property.DTSTART).getParameters().replace(Value.DATE); week2UserBRecur = new Recur( Recur.WEEKLY, new Date(calEnd.getTime().getTime())); week2UserBRecur.setInterval(3); week2UserBRecur.getDayList().add(new WeekDay(WeekDay.MO, 0)); week2UserBRecur.getDayList().add(new WeekDay(WeekDay.TU, 0)); week2UserBRecur.getDayList().add(new WeekDay(WeekDay.WE, 0)); week2UserBRecur.getDayList().add(new WeekDay(WeekDay.TH, 0)); week2UserBRecur.getDayList().add(new WeekDay(WeekDay.FR, 0)); week2UserBRecur.getHourList().add(new Integer(9)); week2UserB.getProperties().add(new RRule(week2UserBRecur)); week2UserB.getProperties().add(new Uid("000002@modularity.net.au")); calStart.add(java.util.Calendar.WEEK_OF_YEAR, 1); calEnd.add(java.util.Calendar.WEEK_OF_YEAR, 1); week3UserC = new VEvent( new Date(calStart.getTime().getTime()), new Dur(0, 8, 0, 0), "Week 3 - User C"); week3UserC.getProperty(Property.DTSTART).getParameters().replace(tzParam); week3UserC.getProperty(Property.DTSTART).getParameters().replace(Value.DATE); week3UserCRecur = new Recur( Recur.WEEKLY, new Date(calEnd.getTime().getTime())); week3UserCRecur.setInterval(3); week3UserCRecur.getDayList().add(new WeekDay(WeekDay.MO, 0)); week3UserCRecur.getDayList().add(new WeekDay(WeekDay.TU, 0)); week3UserCRecur.getDayList().add(new WeekDay(WeekDay.WE, 0)); week3UserCRecur.getDayList().add(new WeekDay(WeekDay.TH, 0)); week3UserCRecur.getDayList().add(new WeekDay(WeekDay.FR, 0)); week3UserCRecur.getHourList().add(new Integer(9)); week3UserC.getProperties().add(new RRule(week3UserCRecur)); week3UserC.getProperties().add(new Uid("000003@modularity.net.au")); calendar.getComponents().add(week1UserA); calendar.getComponents().add(week2UserB); calendar.getComponents().add(week3UserC); suite.addTest(new CalendarTest("testValid", calendar)); // test invalid calendar.. // calendar = new Calendar(baseCalendar); // calendar.getComponents().add(new Daylight()); // suite.addTest(new CalendarTest("testInvalid", calendar)); return suite; } /** * @throws ValidationException */ public void testGetEventDateRanges() throws ValidationException { // Add events, etc.. // VTimeZone tz = registry.getTimeZone("Australia/Melbourne").getVTimeZone(); // TzId tzParam = new TzId(tz.getProperty(Property.TZID).getValue()); calendar.validate(); // Start the logic testing. java.util.Calendar queryStartCal = java.util.Calendar.getInstance(); java.util.Calendar queryEndCal = java.util.Calendar.getInstance(); queryStartCal.set(2006, java.util.Calendar.JULY, 1, 9, 0, 0); queryEndCal.set(2006, java.util.Calendar.AUGUST, 1, 9, 0, 0); VFreeBusy request = new VFreeBusy( new DateTime(queryStartCal.getTime()), new DateTime(queryEndCal.getTime())); VFreeBusy reply = new VFreeBusy(request, calendar.getComponents()); /* SortedSet dateRangeSet = calendar.getEventDateRanges(queryStartCal.getTime(), queryEndCal.getTime()); */ log.info(reply); } }
package com.hazelcast.simulator.provisioner; import com.hazelcast.simulator.protocol.registry.ComponentRegistry; import com.hazelcast.simulator.utils.helper.ExitStatusZeroException; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import java.util.ArrayList; import java.util.List; import static com.hazelcast.simulator.TestEnvironmentUtils.createAgentsFileWithLocalhost; import static com.hazelcast.simulator.TestEnvironmentUtils.createCloudCredentialFiles; import static com.hazelcast.simulator.TestEnvironmentUtils.createPublicPrivateKeyFiles; import static com.hazelcast.simulator.TestEnvironmentUtils.deleteAgentsFile; import static com.hazelcast.simulator.TestEnvironmentUtils.deleteCloudCredentialFiles; import static com.hazelcast.simulator.TestEnvironmentUtils.deleteLogs; import static com.hazelcast.simulator.TestEnvironmentUtils.deletePublicPrivateKeyFiles; import static com.hazelcast.simulator.TestEnvironmentUtils.resetSecurityManager; import static com.hazelcast.simulator.TestEnvironmentUtils.resetUserDir; import static com.hazelcast.simulator.TestEnvironmentUtils.setDistributionUserDir; import static com.hazelcast.simulator.TestEnvironmentUtils.setExitExceptionSecurityManagerWithStatusZero; import static com.hazelcast.simulator.provisioner.ProvisionerCli.init; import static com.hazelcast.simulator.provisioner.ProvisionerCli.run; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; public class ProvisionerCliTest { private final List<String> args = new ArrayList<String>(); private Provisioner provisioner = mock(Provisioner.class); @BeforeClass public static void setUp() { setExitExceptionSecurityManagerWithStatusZero(); setDistributionUserDir(); createAgentsFileWithLocalhost(); createCloudCredentialFiles(); createPublicPrivateKeyFiles(); } @AfterClass public static void tearDown() { resetSecurityManager(); resetUserDir(); deleteLogs(); deleteAgentsFile(); deleteCloudCredentialFiles(); deletePublicPrivateKeyFiles(); } @Test public void testInit() { provisioner = init(getArgs()); ComponentRegistry componentRegistry = provisioner.getComponentRegistry(); assertEquals(1, componentRegistry.agentCount()); assertEquals("127.0.0.1", componentRegistry.getFirstAgent().getPublicAddress()); assertNull(provisioner.getHazelcastJARs()); } @Test public void testInit_withHazelcastUpload() { args.add("--uploadHazelcast"); provisioner = init(getArgs()); ComponentRegistry componentRegistry = provisioner.getComponentRegistry(); assertEquals(1, componentRegistry.agentCount()); assertEquals("127.0.0.1", componentRegistry.getFirstAgent().getPublicAddress()); assertNotNull(provisioner.getHazelcastJARs()); } @Test public void testInit_withHazelcastUpload_withEnterpriseEnabled_withOutOfTheBox() { args.add("--uploadHazelcast"); args.add("--enterpriseEnabled"); args.add("true"); provisioner = init(getArgs()); ComponentRegistry componentRegistry = provisioner.getComponentRegistry(); assertEquals(1, componentRegistry.agentCount()); assertEquals("127.0.0.1", componentRegistry.getFirstAgent().getPublicAddress()); assertNull(provisioner.getHazelcastJARs()); } @Test(expected = ExitStatusZeroException.class) public void testRun_withoutArguments() { run(getArgs(), provisioner); } @Test(expected = ExitStatusZeroException.class) public void testRun_withHelp() { args.add("--help"); run(getArgs(), provisioner); } @Test public void testRun_scaleZero() { args.add("--scale"); args.add("0"); run(getArgs(), provisioner); verify(provisioner).scale(0); verify(provisioner).shutdown(); verifyNoMoreInteractions(provisioner); } @Test public void testRun_scalePositiveNumber() { args.add("--scale"); args.add("10"); run(getArgs(), provisioner); verify(provisioner).scale(10); verify(provisioner).shutdown(); verifyNoMoreInteractions(provisioner); } @Test public void testRun_install() { args.add("--install"); run(getArgs(), provisioner); verify(provisioner).installSimulator(); verify(provisioner).shutdown(); verifyNoMoreInteractions(provisioner); } @Test public void testRun_list() { args.add("--list"); run(getArgs(), provisioner); verify(provisioner).listMachines(); verify(provisioner).shutdown(); verifyNoMoreInteractions(provisioner); } @Test public void testRun_download_defaultDirectory() { args.add("--download"); run(getArgs(), provisioner); verify(provisioner).download("workers"); verify(provisioner).shutdown(); verifyNoMoreInteractions(provisioner); } @Test public void testRun_download_customDirectory() { args.add("--download"); args.add("outputDir"); run(getArgs(), provisioner); verify(provisioner).download("outputDir"); verify(provisioner).shutdown(); verifyNoMoreInteractions(provisioner); } @Test public void testRun_clean() { args.add("--clean"); run(getArgs(), provisioner); verify(provisioner).clean(); verify(provisioner).shutdown(); verifyNoMoreInteractions(provisioner); } @Test public void testRun_kill() { args.add("--kill"); run(getArgs(), provisioner); verify(provisioner).killJavaProcesses(); verify(provisioner).shutdown(); verifyNoMoreInteractions(provisioner); } @Test public void testRun_terminate() { args.add("--terminate"); run(getArgs(), provisioner); verify(provisioner).terminate(); verify(provisioner).shutdown(); verifyNoMoreInteractions(provisioner); } private String[] getArgs() { String[] argsArray = new String[args.size()]; args.toArray(argsArray); return argsArray; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sling.ide.eclipse.ui.internal; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.InvocationTargetException; import java.net.URISyntaxException; import org.apache.sling.ide.artifacts.EmbeddedArtifact; import org.apache.sling.ide.artifacts.EmbeddedArtifactLocator; import org.apache.sling.ide.eclipse.core.ISlingLaunchpadConfiguration; import org.apache.sling.ide.eclipse.core.ISlingLaunchpadServer; import org.apache.sling.ide.eclipse.core.ServerUtil; import org.apache.sling.ide.eclipse.core.SetBundleInstallLocallyCommand; import org.apache.sling.ide.eclipse.core.SetBundleVersionCommand; import org.apache.sling.ide.osgi.OsgiClient; import org.apache.sling.ide.osgi.OsgiClientException; import org.apache.sling.ide.osgi.OsgiClientFactory; import org.apache.sling.ide.transport.RepositoryInfo; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.NullProgressMonitor; import org.eclipse.core.runtime.Status; import org.eclipse.jface.dialogs.ErrorDialog; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.jface.dialogs.ProgressMonitorDialog; import org.eclipse.jface.operation.IRunnableWithProgress; import org.eclipse.swt.SWT; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.SelectionListener; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.layout.RowLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Label; import org.eclipse.ui.IEditorInput; import org.eclipse.ui.IEditorSite; import org.eclipse.ui.forms.events.HyperlinkAdapter; import org.eclipse.ui.forms.events.HyperlinkEvent; import org.eclipse.ui.forms.widgets.ExpandableComposite; import org.eclipse.ui.forms.widgets.FormToolkit; import org.eclipse.ui.forms.widgets.Hyperlink; import org.eclipse.ui.forms.widgets.Section; import org.eclipse.wst.server.ui.editor.ServerEditorSection; import org.osgi.framework.Version; public class InstallEditorSection extends ServerEditorSection { protected boolean _updating; protected PropertyChangeListener _listener; private Button bundleLocalInstallButton; private Button quickLocalInstallButton; private Hyperlink installOrUpdateSupportBundleLink; private ISlingLaunchpadServer launchpadServer; private PropertyChangeListener serverListener; private Label supportBundleVersionLabel; private Composite actionArea; private EmbeddedArtifactLocator artifactLocator; private OsgiClientFactory osgiClientFactory; @Override public void createSection(Composite parent) { super.createSection(parent); FormToolkit toolkit = getFormToolkit(parent.getDisplay()); Section section = toolkit.createSection(parent, ExpandableComposite.TWISTIE | ExpandableComposite.EXPANDED | ExpandableComposite.TITLE_BAR | Section.DESCRIPTION | ExpandableComposite.FOCUS_TITLE); section.setText("Install"); section.setDescription("Specify how to install bundles on the server"); section.setLayoutData(new GridData(GridData.FILL_HORIZONTAL | GridData.VERTICAL_ALIGN_FILL)); // ports Composite composite = toolkit.createComposite(section); GridLayout layout = new GridLayout(); layout.numColumns = 2; layout.marginHeight = 8; layout.marginWidth = 8; composite.setLayout(layout); GridData gridData = new GridData(GridData.VERTICAL_ALIGN_FILL | GridData.FILL_HORIZONTAL); composite.setLayoutData(gridData); toolkit.paintBordersFor(composite); section.setClient(composite); bundleLocalInstallButton = toolkit.createButton(composite, "Install bundles via bundle upload", SWT.RADIO); GridData data = new GridData(SWT.FILL, SWT.FILL, true, false, 2, 1); bundleLocalInstallButton.setLayoutData(data); quickLocalInstallButton = toolkit.createButton(composite, "Install bundles directly from the filesystem", SWT.RADIO); data = new GridData(SWT.FILL, SWT.FILL, true, false, 2, 1); quickLocalInstallButton.setLayoutData(data); actionArea = toolkit.createComposite(composite); RowLayout actionAreaLayout = new RowLayout(); actionAreaLayout.center = true; actionArea.setLayout(actionAreaLayout); supportBundleVersionLabel = toolkit.createLabel(actionArea, ""); installOrUpdateSupportBundleLink = toolkit.createHyperlink(actionArea, "(Install)", SWT.NONE); initialize(); } public void init(IEditorSite site, IEditorInput input) { super.init(site, input); serverListener = new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { if (ISlingLaunchpadServer.PROP_INSTALL_LOCALLY.equals(evt.getPropertyName())) { quickLocalInstallButton.setSelection((Boolean)evt.getNewValue()); bundleLocalInstallButton.setSelection(!(Boolean)evt.getNewValue()); } else if (evt.getPropertyName().equals( String.format(ISlingLaunchpadServer.PROP_BUNDLE_VERSION_FORMAT, EmbeddedArtifactLocator.SUPPORT_BUNDLE_SYMBOLIC_NAME))) { Version launchpadVersion = new Version((String) evt.getNewValue()); Version embeddedVersion = new Version(artifactLocator.loadToolingSupportBundle().getVersion()); updateActionArea(launchpadVersion, embeddedVersion); } } }; server.addPropertyChangeListener(serverListener); launchpadServer = (ISlingLaunchpadServer) server.getAdapter(ISlingLaunchpadServer.class); if (launchpadServer == null) { // TODO progress monitor launchpadServer = (ISlingLaunchpadServer) server.loadAdapter(ISlingLaunchpadServer.class, new NullProgressMonitor()); } artifactLocator = Activator.getDefault().getArtifactLocator(); osgiClientFactory = Activator.getDefault().getOsgiClientFactory(); } private void initialize() { final ISlingLaunchpadConfiguration config = launchpadServer.getConfiguration(); quickLocalInstallButton.setSelection(config.bundleInstallLocally()); bundleLocalInstallButton.setSelection(!config.bundleInstallLocally()); SelectionListener listener = new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { execute(new SetBundleInstallLocallyCommand(server, quickLocalInstallButton.getSelection())); } }; quickLocalInstallButton.addSelectionListener(listener); bundleLocalInstallButton.addSelectionListener(listener); Version serverVersion = launchpadServer.getBundleVersion(EmbeddedArtifactLocator.SUPPORT_BUNDLE_SYMBOLIC_NAME); final EmbeddedArtifact supportBundle = artifactLocator.loadToolingSupportBundle(); final Version embeddedVersion = new Version(supportBundle.getVersion()); updateActionArea(serverVersion, embeddedVersion); installOrUpdateSupportBundleLink.addHyperlinkListener(new HyperlinkAdapter() { @Override public void linkActivated(HyperlinkEvent e) { ProgressMonitorDialog dialog = new ProgressMonitorDialog(getShell()); dialog.setCancelable(true); try { dialog.run(true, false, new IRunnableWithProgress() { @Override public void run(IProgressMonitor monitor) throws InvocationTargetException, InterruptedException { final Version remoteVersion; monitor.beginTask("Installing support bundle", 3); // double-check, just in case monitor.setTaskName("Getting remote bundle version"); Version deployedVersion; final String message; try { RepositoryInfo repositoryInfo = ServerUtil.getRepositoryInfo(server.getOriginal(), monitor); OsgiClient client = osgiClientFactory.createOsgiClient(repositoryInfo); remoteVersion = client .getBundleVersion(EmbeddedArtifactLocator.SUPPORT_BUNDLE_SYMBOLIC_NAME); deployedVersion = remoteVersion; monitor.worked(1); if (remoteVersion != null && remoteVersion.compareTo(embeddedVersion) >= 0) { // version already up-to-date, due to bundle version // changing between startup check and now message = "Bundle is already installed and up to date"; } else { monitor.setTaskName("Installing bundle"); try (InputStream contents = supportBundle.openInputStream() ){ client.installBundle(contents, supportBundle.getName()); } deployedVersion = embeddedVersion; message = "Bundle version " + embeddedVersion + " installed"; } monitor.worked(1); monitor.setTaskName("Updating server configuration"); final Version finalDeployedVersion = deployedVersion; Display.getDefault().syncExec(new Runnable() { @Override public void run() { execute(new SetBundleVersionCommand(server, EmbeddedArtifactLocator.SUPPORT_BUNDLE_SYMBOLIC_NAME, finalDeployedVersion.toString())); try { server.save(false, new NullProgressMonitor()); } catch (CoreException e) { Activator.getDefault().getLog().log(e.getStatus()); } } }); monitor.worked(1); } catch (OsgiClientException | IOException | URISyntaxException e) { throw new InvocationTargetException(e); } finally { monitor.done(); } Display.getDefault().asyncExec(new Runnable() { @Override public void run() { MessageDialog.openInformation(getShell(), "Support bundle install operation", message); } }); } }); } catch (InvocationTargetException e1) { IStatus status = new Status(Status.ERROR, Activator.PLUGIN_ID, "Error while installing support bundle: " + e1.getTargetException().getMessage(), e1 .getTargetException()); ErrorDialog.openError(getShell(), "Error while installing support bundle", e1.getMessage(), status); } catch (InterruptedException e1) { Thread.currentThread().interrupt(); return; } } }); } private void updateActionArea(Version serverVersion, final Version embeddedVersion) { if (serverVersion == null || embeddedVersion.compareTo(serverVersion) > 0) { supportBundleVersionLabel .setText("Installation support bundle is not present or outdated, deployment will not work"); installOrUpdateSupportBundleLink.setText("(Install)"); installOrUpdateSupportBundleLink.setEnabled(true); } else { supportBundleVersionLabel.setText("Installation support bundle is present and up to date."); installOrUpdateSupportBundleLink.setText("(Reinstall)"); installOrUpdateSupportBundleLink.setEnabled(true); } actionArea.pack(); } /* * (non-Javadoc) * * @see org.eclipse.wst.server.ui.editor.ServerEditorSection#dispose() */ @Override public void dispose() { if (server != null) server.removePropertyChangeListener(serverListener); super.dispose(); } }
/* * Copyright 2016 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.schemaorg.core; import com.google.schemaorg.JsonLdContext; import com.google.schemaorg.SchemaOrgType; import com.google.schemaorg.core.datatype.Date; import com.google.schemaorg.core.datatype.DateTime; import com.google.schemaorg.core.datatype.Text; import com.google.schemaorg.core.datatype.URL; import com.google.schemaorg.goog.PopularityScoreSpecification; import javax.annotation.Nullable; /** Interface of <a href="http://schema.org/SocialEvent}">http://schema.org/SocialEvent}</a>. */ public interface SocialEvent extends Event { /** * Builder interface of <a * href="http://schema.org/SocialEvent}">http://schema.org/SocialEvent}</a>. */ public interface Builder extends Event.Builder { @Override Builder addJsonLdContext(@Nullable JsonLdContext context); @Override Builder addJsonLdContext(@Nullable JsonLdContext.Builder context); @Override Builder setJsonLdId(@Nullable String value); @Override Builder setJsonLdReverse(String property, Thing obj); @Override Builder setJsonLdReverse(String property, Thing.Builder builder); /** Add a value to property additionalType. */ Builder addAdditionalType(URL value); /** Add a value to property additionalType. */ Builder addAdditionalType(String value); /** Add a value to property aggregateRating. */ Builder addAggregateRating(AggregateRating value); /** Add a value to property aggregateRating. */ Builder addAggregateRating(AggregateRating.Builder value); /** Add a value to property aggregateRating. */ Builder addAggregateRating(String value); /** Add a value to property alternateName. */ Builder addAlternateName(Text value); /** Add a value to property alternateName. */ Builder addAlternateName(String value); /** Add a value to property attendee. */ Builder addAttendee(Organization value); /** Add a value to property attendee. */ Builder addAttendee(Organization.Builder value); /** Add a value to property attendee. */ Builder addAttendee(Person value); /** Add a value to property attendee. */ Builder addAttendee(Person.Builder value); /** Add a value to property attendee. */ Builder addAttendee(String value); /** Add a value to property attendees. */ Builder addAttendees(Organization value); /** Add a value to property attendees. */ Builder addAttendees(Organization.Builder value); /** Add a value to property attendees. */ Builder addAttendees(Person value); /** Add a value to property attendees. */ Builder addAttendees(Person.Builder value); /** Add a value to property attendees. */ Builder addAttendees(String value); /** Add a value to property description. */ Builder addDescription(Text value); /** Add a value to property description. */ Builder addDescription(String value); /** Add a value to property doorTime. */ Builder addDoorTime(DateTime value); /** Add a value to property doorTime. */ Builder addDoorTime(String value); /** Add a value to property duration. */ Builder addDuration(Duration value); /** Add a value to property duration. */ Builder addDuration(Duration.Builder value); /** Add a value to property duration. */ Builder addDuration(String value); /** Add a value to property endDate. */ Builder addEndDate(Date value); /** Add a value to property endDate. */ Builder addEndDate(String value); /** Add a value to property eventStatus. */ Builder addEventStatus(EventStatusType value); /** Add a value to property eventStatus. */ Builder addEventStatus(String value); /** Add a value to property image. */ Builder addImage(ImageObject value); /** Add a value to property image. */ Builder addImage(ImageObject.Builder value); /** Add a value to property image. */ Builder addImage(URL value); /** Add a value to property image. */ Builder addImage(String value); /** Add a value to property inLanguage. */ Builder addInLanguage(Language value); /** Add a value to property inLanguage. */ Builder addInLanguage(Language.Builder value); /** Add a value to property inLanguage. */ Builder addInLanguage(Text value); /** Add a value to property inLanguage. */ Builder addInLanguage(String value); /** Add a value to property location. */ Builder addLocation(Place value); /** Add a value to property location. */ Builder addLocation(Place.Builder value); /** Add a value to property location. */ Builder addLocation(PostalAddress value); /** Add a value to property location. */ Builder addLocation(PostalAddress.Builder value); /** Add a value to property location. */ Builder addLocation(Text value); /** Add a value to property location. */ Builder addLocation(String value); /** Add a value to property mainEntityOfPage. */ Builder addMainEntityOfPage(CreativeWork value); /** Add a value to property mainEntityOfPage. */ Builder addMainEntityOfPage(CreativeWork.Builder value); /** Add a value to property mainEntityOfPage. */ Builder addMainEntityOfPage(URL value); /** Add a value to property mainEntityOfPage. */ Builder addMainEntityOfPage(String value); /** Add a value to property name. */ Builder addName(Text value); /** Add a value to property name. */ Builder addName(String value); /** Add a value to property offers. */ Builder addOffers(Offer value); /** Add a value to property offers. */ Builder addOffers(Offer.Builder value); /** Add a value to property offers. */ Builder addOffers(String value); /** Add a value to property organizer. */ Builder addOrganizer(Organization value); /** Add a value to property organizer. */ Builder addOrganizer(Organization.Builder value); /** Add a value to property organizer. */ Builder addOrganizer(Person value); /** Add a value to property organizer. */ Builder addOrganizer(Person.Builder value); /** Add a value to property organizer. */ Builder addOrganizer(String value); /** Add a value to property performer. */ Builder addPerformer(Organization value); /** Add a value to property performer. */ Builder addPerformer(Organization.Builder value); /** Add a value to property performer. */ Builder addPerformer(Person value); /** Add a value to property performer. */ Builder addPerformer(Person.Builder value); /** Add a value to property performer. */ Builder addPerformer(String value); /** Add a value to property performers. */ Builder addPerformers(Organization value); /** Add a value to property performers. */ Builder addPerformers(Organization.Builder value); /** Add a value to property performers. */ Builder addPerformers(Person value); /** Add a value to property performers. */ Builder addPerformers(Person.Builder value); /** Add a value to property performers. */ Builder addPerformers(String value); /** Add a value to property potentialAction. */ Builder addPotentialAction(Action value); /** Add a value to property potentialAction. */ Builder addPotentialAction(Action.Builder value); /** Add a value to property potentialAction. */ Builder addPotentialAction(String value); /** Add a value to property previousStartDate. */ Builder addPreviousStartDate(Date value); /** Add a value to property previousStartDate. */ Builder addPreviousStartDate(String value); /** Add a value to property recordedIn. */ Builder addRecordedIn(CreativeWork value); /** Add a value to property recordedIn. */ Builder addRecordedIn(CreativeWork.Builder value); /** Add a value to property recordedIn. */ Builder addRecordedIn(String value); /** Add a value to property review. */ Builder addReview(Review value); /** Add a value to property review. */ Builder addReview(Review.Builder value); /** Add a value to property review. */ Builder addReview(String value); /** Add a value to property sameAs. */ Builder addSameAs(URL value); /** Add a value to property sameAs. */ Builder addSameAs(String value); /** Add a value to property startDate. */ Builder addStartDate(Date value); /** Add a value to property startDate. */ Builder addStartDate(String value); /** Add a value to property subEvent. */ Builder addSubEvent(Event value); /** Add a value to property subEvent. */ Builder addSubEvent(Event.Builder value); /** Add a value to property subEvent. */ Builder addSubEvent(String value); /** Add a value to property subEvents. */ Builder addSubEvents(Event value); /** Add a value to property subEvents. */ Builder addSubEvents(Event.Builder value); /** Add a value to property subEvents. */ Builder addSubEvents(String value); /** Add a value to property superEvent. */ Builder addSuperEvent(Event value); /** Add a value to property superEvent. */ Builder addSuperEvent(Event.Builder value); /** Add a value to property superEvent. */ Builder addSuperEvent(String value); /** Add a value to property typicalAgeRange. */ Builder addTypicalAgeRange(Text value); /** Add a value to property typicalAgeRange. */ Builder addTypicalAgeRange(String value); /** Add a value to property url. */ Builder addUrl(URL value); /** Add a value to property url. */ Builder addUrl(String value); /** Add a value to property workFeatured. */ Builder addWorkFeatured(CreativeWork value); /** Add a value to property workFeatured. */ Builder addWorkFeatured(CreativeWork.Builder value); /** Add a value to property workFeatured. */ Builder addWorkFeatured(String value); /** Add a value to property workPerformed. */ Builder addWorkPerformed(CreativeWork value); /** Add a value to property workPerformed. */ Builder addWorkPerformed(CreativeWork.Builder value); /** Add a value to property workPerformed. */ Builder addWorkPerformed(String value); /** Add a value to property detailedDescription. */ Builder addDetailedDescription(Article value); /** Add a value to property detailedDescription. */ Builder addDetailedDescription(Article.Builder value); /** Add a value to property detailedDescription. */ Builder addDetailedDescription(String value); /** Add a value to property popularityScore. */ Builder addPopularityScore(PopularityScoreSpecification value); /** Add a value to property popularityScore. */ Builder addPopularityScore(PopularityScoreSpecification.Builder value); /** Add a value to property popularityScore. */ Builder addPopularityScore(String value); /** * Add a value to property. * * @param name The property name. * @param value The value of the property. */ Builder addProperty(String name, SchemaOrgType value); /** * Add a value to property. * * @param name The property name. * @param builder The schema.org object builder for the property value. */ Builder addProperty(String name, Thing.Builder builder); /** * Add a value to property. * * @param name The property name. * @param value The string value of the property. */ Builder addProperty(String name, String value); /** Build a {@link SocialEvent} object. */ SocialEvent build(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id$ */ package org.apache.xmlgraphics.image.loader.impl.imageio; import java.awt.Color; import java.awt.color.ICC_ColorSpace; import java.awt.color.ICC_Profile; import java.awt.image.BufferedImage; import java.awt.image.ColorModel; import java.awt.image.ComponentColorModel; import java.awt.image.IndexColorModel; import java.awt.image.Raster; import java.awt.image.RenderedImage; import java.awt.image.WritableRaster; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.HashSet; import java.util.Iterator; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.StringTokenizer; import java.util.zip.DataFormatException; import java.util.zip.Inflater; import javax.imageio.IIOException; import javax.imageio.ImageIO; import javax.imageio.ImageReadParam; import javax.imageio.ImageReader; import javax.imageio.ImageTypeSpecifier; import javax.imageio.metadata.IIOMetadata; import javax.imageio.metadata.IIOMetadataFormatImpl; import javax.imageio.metadata.IIOMetadataNode; import javax.imageio.spi.IIOServiceProvider; import javax.imageio.stream.ImageInputStream; import javax.xml.transform.Source; import org.w3c.dom.Element; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.xmlgraphics.image.loader.Image; import org.apache.xmlgraphics.image.loader.ImageException; import org.apache.xmlgraphics.image.loader.ImageFlavor; import org.apache.xmlgraphics.image.loader.ImageInfo; import org.apache.xmlgraphics.image.loader.ImageSessionContext; import org.apache.xmlgraphics.image.loader.impl.AbstractImageLoader; import org.apache.xmlgraphics.image.loader.impl.ImageBuffered; import org.apache.xmlgraphics.image.loader.impl.ImageRendered; import org.apache.xmlgraphics.image.loader.util.ImageUtil; import org.apache.xmlgraphics.io.XmlSourceUtil; import org.apache.xmlgraphics.java2d.color.profile.ColorProfileUtil; /** * An ImageLoader implementation based on ImageIO for loading bitmap images. */ public class ImageLoaderImageIO extends AbstractImageLoader { /** logger */ protected static final Log log = LogFactory.getLog(ImageLoaderImageIO.class); private ImageFlavor targetFlavor; private static final String PNG_METADATA_NODE = "javax_imageio_png_1.0"; private static final String JPEG_METADATA_NODE = "javax_imageio_jpeg_image_1.0"; private static final Set PROVIDERS_IGNORING_ICC = new HashSet(); /** * Main constructor. * @param targetFlavor the target flavor */ public ImageLoaderImageIO(ImageFlavor targetFlavor) { if (!(ImageFlavor.BUFFERED_IMAGE.equals(targetFlavor) || ImageFlavor.RENDERED_IMAGE.equals(targetFlavor))) { throw new IllegalArgumentException("Unsupported target ImageFlavor: " + targetFlavor); } this.targetFlavor = targetFlavor; } /** {@inheritDoc} */ public ImageFlavor getTargetFlavor() { return this.targetFlavor; } /** {@inheritDoc} */ public Image loadImage(ImageInfo info, Map hints, ImageSessionContext session) throws ImageException, IOException { RenderedImage imageData = null; IIOException firstException = null; IIOMetadata iiometa = (IIOMetadata)info.getCustomObjects().get( ImageIOUtil.IMAGEIO_METADATA); boolean ignoreMetadata = (iiometa != null); boolean providerIgnoresICC = false; Source src = session.needSource(info.getOriginalURI()); ImageInputStream imgStream = ImageUtil.needImageInputStream(src); try { Iterator iter = ImageIO.getImageReaders(imgStream); while (iter.hasNext()) { ImageReader reader = (ImageReader)iter.next(); try { imgStream.mark(); reader.setInput(imgStream, false, ignoreMetadata); ImageReadParam param = getParam(reader, hints); final int pageIndex = ImageUtil.needPageIndexFromURI(info.getOriginalURI()); try { // if (ImageFlavor.BUFFERED_IMAGE.equals(this.targetFlavor)) { imageData = reader.read(pageIndex, param); // } else { // imageData = reader.read(pageIndex, param); //imageData = reader.readAsRenderedImage(pageIndex, param); //TODO Reenable the above when proper listeners are implemented //to react to late pixel population (so the stream can be closed //properly). // } if (iiometa == null) { iiometa = reader.getImageMetadata(pageIndex); } providerIgnoresICC = checkProviderIgnoresICC(reader .getOriginatingProvider()); break; //Quit early, we have the image } catch (IndexOutOfBoundsException indexe) { throw new ImageException("Page does not exist. Invalid image index: " + pageIndex); } catch (IllegalArgumentException iae) { //Some codecs like com.sun.imageio.plugins.wbmp.WBMPImageReader throw //IllegalArgumentExceptions when they have trouble parsing the image. throw new ImageException("Error loading image using ImageIO codec", iae); } catch (IIOException iioe) { if (firstException == null) { firstException = iioe; } else { log.debug("non-first error loading image: " + iioe.getMessage()); } } try { //Try fallback for CMYK images BufferedImage bi = getFallbackBufferedImage(reader, pageIndex, param); imageData = bi; firstException = null; //Clear exception after successful fallback attempt break; } catch (IIOException iioe) { //ignore } imgStream.reset(); } finally { reader.dispose(); } } } finally { XmlSourceUtil.closeQuietly(src); //TODO Some codecs may do late reading. } if (firstException != null) { throw new ImageException("Error while loading image: " + firstException.getMessage(), firstException); } if (imageData == null) { throw new ImageException("No ImageIO ImageReader found ."); } ColorModel cm = imageData.getColorModel(); Color transparentColor = null; if (cm instanceof IndexColorModel) { //transparent color will be extracted later from the image } else { if (providerIgnoresICC && cm instanceof ComponentColorModel) { // Apply ICC Profile to Image by creating a new image with a new // color model. ICC_Profile iccProf = tryToExctractICCProfile(iiometa); if (iccProf != null) { ColorModel cm2 = new ComponentColorModel( new ICC_ColorSpace(iccProf), cm.hasAlpha(), cm .isAlphaPremultiplied(), cm .getTransparency(), cm.getTransferType()); WritableRaster wr = Raster.createWritableRaster(imageData .getSampleModel(), null); imageData.copyData(wr); try { BufferedImage bi = new BufferedImage(cm2, wr, cm2 .isAlphaPremultiplied(), null); imageData = bi; cm = cm2; } catch (IllegalArgumentException iae) { String msg = "Image " + info.getOriginalURI() + " has an incompatible color profile." + " The color profile will be ignored." + "\nColor model of loaded bitmap: " + cm + "\nColor model of color profile: " + cm2; if (info.getCustomObjects().get("warningincustomobject") != null) { info.getCustomObjects().put("warning", msg); } else { log.warn(msg); } } } } // ImageIOUtil.dumpMetadataToSystemOut(iiometa); // Retrieve the transparent color from the metadata if (iiometa != null && iiometa.isStandardMetadataFormatSupported()) { Element metanode = (Element)iiometa.getAsTree( IIOMetadataFormatImpl.standardMetadataFormatName); Element dim = ImageIOUtil.getChild(metanode, "Transparency"); if (dim != null) { Element child; child = ImageIOUtil.getChild(dim, "TransparentColor"); if (child != null) { String value = child.getAttribute("value"); if (value.length() == 0) { //ignore } else if (cm.getNumColorComponents() == 1) { int gray = Integer.parseInt(value); transparentColor = new Color(gray, gray, gray); } else { StringTokenizer st = new StringTokenizer(value); transparentColor = new Color( Integer.parseInt(st.nextToken()), Integer.parseInt(st.nextToken()), Integer.parseInt(st.nextToken())); } } } } } if (ImageFlavor.BUFFERED_IMAGE.equals(this.targetFlavor)) { return new ImageBuffered(info, (BufferedImage)imageData, transparentColor); } else { return new ImageRendered(info, imageData, transparentColor); } } private ImageReadParam getParam(ImageReader reader, Map hints) throws IOException { if (hints != null && Boolean.TRUE.equals(hints.get("CMYK"))) { Iterator<ImageTypeSpecifier> types = reader.getImageTypes(0); while (types.hasNext()) { ImageTypeSpecifier type = types.next(); if (type.getNumComponents() == 4) { ImageReadParam param = new ImageReadParam(); param.setDestinationType(type); return param; } } } return reader.getDefaultReadParam(); } /** * Checks if the provider ignores the ICC color profile. This method will * assume providers work correctly, and return false if the provider is * unknown. This ensures backward-compatibility. * * @param provider * the ImageIO Provider * @return true if we know the provider to be broken and ignore ICC * profiles. */ private boolean checkProviderIgnoresICC(IIOServiceProvider provider) { // TODO: This information could be cached. StringBuffer b = new StringBuffer(provider.getDescription(Locale.ENGLISH)); b.append('/').append(provider.getVendorName()); b.append('/').append(provider.getVersion()); if (log.isDebugEnabled()) { log.debug("Image Provider: " + b.toString()); } return ImageLoaderImageIO.PROVIDERS_IGNORING_ICC.contains(b.toString()); } /** * Extract ICC Profile from ImageIO Metadata. This method currently only * supports PNG and JPEG metadata. * * @param iiometa * The ImageIO Metadata * @return an ICC Profile or null. */ private ICC_Profile tryToExctractICCProfile(IIOMetadata iiometa) { ICC_Profile iccProf = null; String[] supportedFormats = iiometa.getMetadataFormatNames(); for (String format : supportedFormats) { Element root = (Element) iiometa.getAsTree(format); if (PNG_METADATA_NODE.equals(format)) { iccProf = this .tryToExctractICCProfileFromPNGMetadataNode(root); } else if (JPEG_METADATA_NODE.equals(format)) { iccProf = this.tryToExctractICCProfileFromJPEGMetadataNode(root); } } return iccProf; } private ICC_Profile tryToExctractICCProfileFromPNGMetadataNode( Element pngNode) { ICC_Profile iccProf = null; Element iccpNode = ImageIOUtil.getChild(pngNode, "iCCP"); if (iccpNode instanceof IIOMetadataNode) { IIOMetadataNode imn = (IIOMetadataNode) iccpNode; byte[] prof = (byte[]) imn.getUserObject(); String comp = imn.getAttribute("compressionMethod"); if ("deflate".equalsIgnoreCase(comp)) { Inflater decompresser = new Inflater(); decompresser.setInput(prof); byte[] result = new byte[100]; ByteArrayOutputStream bos = new ByteArrayOutputStream(); boolean failed = false; while (!decompresser.finished() && !failed) { try { int resultLength = decompresser.inflate(result); bos.write(result, 0, resultLength); if (resultLength == 0) { // this means more data or an external dictionary is // needed. Both of which are not available, so we // fail. log.debug("Failed to deflate ICC Profile"); failed = true; } } catch (DataFormatException e) { log.debug("Failed to deflate ICC Profile", e); failed = true; } } decompresser.end(); try { iccProf = ColorProfileUtil.getICC_Profile(bos.toByteArray()); } catch (IllegalArgumentException e) { log.debug("Failed to interpret embedded ICC Profile", e); iccProf = null; } } } return iccProf; } private ICC_Profile tryToExctractICCProfileFromJPEGMetadataNode( Element jpgNode) { ICC_Profile iccProf = null; Element jfifNode = ImageIOUtil.getChild(jpgNode, "app0JFIF"); if (jfifNode != null) { Element app2iccNode = ImageIOUtil.getChild(jfifNode, "app2ICC"); if (app2iccNode instanceof IIOMetadataNode) { IIOMetadataNode imn = (IIOMetadataNode) app2iccNode; iccProf = (ICC_Profile) imn.getUserObject(); } } return iccProf; } private BufferedImage getFallbackBufferedImage(ImageReader reader, int pageIndex, ImageReadParam param) throws IOException { //Work-around found at: http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4799903 //There are some additional ideas there if someone wants to go further. // Try reading a Raster (no color conversion). Raster raster = reader.readRaster(pageIndex, param); // Arbitrarily select a BufferedImage type. int imageType; int numBands = raster.getNumBands(); switch(numBands) { case 1: imageType = BufferedImage.TYPE_BYTE_GRAY; break; case 3: imageType = BufferedImage.TYPE_3BYTE_BGR; break; case 4: imageType = BufferedImage.TYPE_4BYTE_ABGR; break; default: throw new UnsupportedOperationException("Unsupported band count: " + numBands); } // Create a BufferedImage. BufferedImage bi = new BufferedImage(raster.getWidth(), raster.getHeight(), imageType); // Set the image data. bi.getRaster().setRect(raster); return bi; } static { // TODO: This list could be kept in a resource file. PROVIDERS_IGNORING_ICC .add("Standard PNG image reader/Sun Microsystems, Inc./1.0"); PROVIDERS_IGNORING_ICC .add("Standard PNG image reader/Oracle Corporation/1.0"); PROVIDERS_IGNORING_ICC .add("Standard JPEG Image Reader/Sun Microsystems, Inc./0.5"); PROVIDERS_IGNORING_ICC .add("Standard JPEG Image Reader/Oracle Corporation/0.5"); } }
/** * Copyright (C) 2015 Greg Brandt (brandt.greg@gmail.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.brandtg.switchboard; import com.fasterxml.jackson.databind.ObjectMapper; import com.github.brandtg.switchboard.util.DropWizardApplicationRunner; import com.google.code.or.common.glossary.Pair; import com.google.code.or.common.glossary.Row; import org.apache.http.HttpHost; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.HttpClients; import org.apache.http.util.EntityUtils; import org.testng.Assert; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import java.net.InetSocketAddress; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.Statement; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; public class TestMysqlLogServer { private HttpClient httpClient; private long pollMillis; private long timeoutMillis; private String jdbc; private DropWizardApplicationRunner.DropWizardServer server; private HttpHost serverAddress; private HttpHost adminAddress; @BeforeClass public void beforeClass() throws Exception { httpClient = HttpClients.createDefault(); pollMillis = 1000; timeoutMillis = 30000; jdbc = "jdbc:mysql://localhost:3306/test"; serverAddress = new HttpHost("localhost", 8080); adminAddress = new HttpHost("localhost", 8081); } @BeforeMethod public void beforeMethod() throws Exception { // Reset and setup local MySQL try (Connection conn = DriverManager.getConnection(jdbc, "root", "")) { Statement stmt = conn.createStatement(); stmt.execute("RESET MASTER"); stmt.execute("GRANT ALL ON *.* TO 'switchboard'@'localhost' IDENTIFIED BY 'switchboard'"); stmt.execute("DROP TABLE IF EXISTS simple"); stmt.execute("CREATE TABLE simple (k INT, v INT)"); } // Start server MysqlLogServerConfig config = new MysqlLogServerConfig(); server = DropWizardApplicationRunner.createServer(config, MysqlLogServer.class); server.start(); } @AfterMethod public void afterMethod() throws Exception { server.stop(); } private void resetServer() throws Exception { if (server != null) { server.stop(); } MysqlLogServerConfig config = new MysqlLogServerConfig(); server = DropWizardApplicationRunner.createServer(config, MysqlLogServer.class); server.start(); } private void pollAndCheck(HttpHost host, String uri, long count, long highWaterMark) throws Exception { long startTime = System.currentTimeMillis(); long currentTime; do { // Query server HttpGet req = new HttpGet(uri); HttpResponse res = httpClient.execute(host, req); try { if (res.getStatusLine().getStatusCode() == 200) { ObjectMapper mapper = new ObjectMapper(); LogRegionResponse data = mapper.readValue(res.getEntity().getContent(), LogRegionResponse.class); // Get all sorted indexes present in response List<Long> indexes = new ArrayList<>(); for (LogRegion logRegion : data.getLogRegions()) { indexes.add(logRegion.getIndex()); } Collections.sort(indexes); // Check that we've have expected count and reached high watermark if (indexes.size() >= count && indexes.get(indexes.size() - 1) >= highWaterMark) { return; } } } finally { if (res != null) { EntityUtils.consume(res.getEntity()); } } // Wait until next time Thread.sleep(pollMillis); currentTime = System.currentTimeMillis(); } while (currentTime - startTime < timeoutMillis); // Exited, so timed out throw new IllegalStateException("Timed out while waiting for " + uri); } @Test public void testSimpleWrites() throws Exception { try (Connection conn = DriverManager.getConnection(jdbc, "root", "")) { // Write some rows, so we have binlog entries PreparedStatement pstmt = conn.prepareStatement("INSERT INTO simple VALUES(?, ?)"); for (int i = 0; i < 10; i++) { pstmt.setInt(1, i); pstmt.setInt(2, i); pstmt.execute(); } } pollAndCheck(serverAddress, "/log/test/0", 10, 10); } @Test public void testRotateBinlog() throws Exception { try (Connection conn = DriverManager.getConnection(jdbc, "root", "")) { // Write some rows, so we have binlog entries PreparedStatement pstmt = conn.prepareStatement("INSERT INTO simple VALUES(?, ?)"); for (int i = 0; i < 10; i++) { pstmt.setInt(1, i); pstmt.setInt(2, i); pstmt.execute(); } // Rotate logs Statement stmt = conn.createStatement(); stmt.execute("FLUSH LOGS"); // Write more for (int i = 10; i < 20; i++) { pstmt.setInt(1, i); pstmt.setInt(2, i); pstmt.execute(); } } pollAndCheck(serverAddress, "/log/test/0?count=100", 20, 20); } @Test public void testMysqlEventListener() throws Exception { try (Connection conn = DriverManager.getConnection(jdbc, "root", "")) { // Write some rows, so we have binlog entries PreparedStatement pstmt = conn.prepareStatement("INSERT INTO simple VALUES(?, ?)"); for (int i = 0; i < 10; i++) { pstmt.setInt(1, i); pstmt.setInt(2, i); pstmt.execute(); } } final AtomicInteger insertCount = new AtomicInteger(); final AtomicInteger beginCount = new AtomicInteger(); final AtomicInteger commitCount = new AtomicInteger(); final AtomicInteger rollbackCount = new AtomicInteger(); InetSocketAddress sourceAddress = new InetSocketAddress(8080); InetSocketAddress sinkAddress = new InetSocketAddress(9090); MysqlEventListener eventListener = new MysqlEventListener("test", sourceAddress, sinkAddress) { @Override public void onBegin(UUID sourceId, long transactionId) { beginCount.incrementAndGet(); } @Override public void onInsert(List<Row> rows) { insertCount.incrementAndGet(); } @Override public void onUpdate(List<Pair<Row>> rows) { } @Override public void onDelete(List<Row> rows) { } @Override public void onCommit() { commitCount.incrementAndGet(); } @Override public void onRollback() { rollbackCount.incrementAndGet(); } }; try { eventListener.start(); long startTime = System.currentTimeMillis(); long currentTime = startTime; do { // Once we've seen all writes, check expected state if (insertCount.get() == 10) { Assert.assertEquals(beginCount.get(), 10); Assert.assertEquals(commitCount.get(), 10); Assert.assertEquals(rollbackCount.get(), 0); return; } Thread.sleep(pollMillis); currentTime = System.currentTimeMillis(); } while (currentTime - startTime < timeoutMillis); } finally { eventListener.shutdown(); } Assert.fail("Timed out while polling"); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package dk.statsbiblioteket.summa.control.server.deploy; import dk.statsbiblioteket.summa.common.configuration.Configuration; import dk.statsbiblioteket.summa.control.api.BadConfigurationException; import dk.statsbiblioteket.summa.control.api.ClientConnection; import dk.statsbiblioteket.summa.control.api.ClientDeployer; import dk.statsbiblioteket.summa.control.api.ClientDeploymentException; import dk.statsbiblioteket.summa.control.api.feedback.Feedback; import dk.statsbiblioteket.summa.control.api.feedback.Message; import dk.statsbiblioteket.summa.control.api.feedback.VoidFeedback; import dk.statsbiblioteket.summa.control.bundle.BundleSpecBuilder; import dk.statsbiblioteket.summa.control.bundle.BundleStub; import dk.statsbiblioteket.util.Files; import dk.statsbiblioteket.util.Strings; import dk.statsbiblioteket.util.Zips; import dk.statsbiblioteket.util.console.ProcessRunner; import dk.statsbiblioteket.util.qa.QAInfo; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.Arrays; import java.util.List; /** * FIXME: Missing class docs for LocalDeployer * * @author Mikkel Kamstrup <mailto:mke@statsbiblioteket.dk> * @author Henrik Kirk <mailto:hbk@statsbiblioteket.dk> * @since Sep 2, 2009 */ @QAInfo(level = QAInfo.Level.NORMAL, state = QAInfo.State.IN_DEVELOPMENT, author = "mke, hbk", comment = "Needs JavaDoc") public class LocalDeployer implements ClientDeployer { private static final Log log = LogFactory.getLog(SSHDeployer.class); private static final int START_TIMEOUT = 7000; private String destination; private String source; private String clientId; private String confLocation; protected Configuration configuration; public LocalDeployer(Configuration conf) { destination = conf.getString(CONF_BASEPATH, "summa-control"); source = conf.getString(CONF_DEPLOYER_BUNDLE_FILE); clientId = conf.getString(CONF_INSTANCE_ID); confLocation = conf.getString(CONF_CLIENT_CONF, "configuration.xml"); destination += File.separator + clientId; } @Override public void deploy(Feedback feedback) throws Exception { File sourceFile = new File(source); //File destFile = new File(destination); /* Calculate destination archive path */ String archive = sourceFile.getName(); String archivePath = destination + File.separator + archive; File archiveFile = new File(archivePath); log.info("Deploying client"); if (source == null) { throw new BadConfigurationException(CONF_DEPLOYER_BUNDLE_FILE + " not set"); } /* Make sure target dir exists */ makeDestination(); /* Copy package to destination */ log.trace("Deploying from " + source + " to " + destination); Files.copy(sourceFile, archiveFile, false); log.debug("Deployed from " + source + " to " + destination); /* Unpack */ log.trace("Unpacking '" + archivePath); Zips.unzip(archivePath, destination, false); log.debug("Unpacked " + archivePath + " to " + destination); /* Clean up */ log.trace("Deleting " + archivePath); Files.delete(archivePath); log.debug("Deleted '" + archivePath); ensurePermissions(feedback); log.info("Finished deploy of " + source + " to " + destination); } /** * Check to see whether the destination folder exists. If it doesn't, try * to create it. * @throws IOException if the folder could not be created. */ private void makeDestination() throws Exception { File dest = new File(destination); if (dest.isFile()) { throw new IOException("Target destination is a regular file: " + destination); } if(!dest.exists() && !dest.mkdirs()) { throw new IOException("Target destination '" + destination + "' could not be created"); } } /** * Set file permissions as described in the ClientDeployer interface * @param feedback the Feedback object. * @throws IOException If an error occurs. */ private void ensurePermissions(Feedback feedback) throws IOException { log.debug("Setting file permissions for '" + destination + "'"); /* The 'cd destination part' needs to be added a single arg */ List<String> command = Arrays.asList( "chmod", "a=,u=r", BundleStub.POLICY_FILE, BundleStub.JMX_ACCESS_FILE, BundleStub.JMX_PASSWORD_FILE); ProcessRunner runner = new ProcessRunner(command); runner.setStartingDir(new File(destination)); log.trace("Command to ensure permissions:\n" + Strings.join(command, " ")); String error = null; try { runner.run(); if (runner.getReturnCode() != 0) { error = "Failed to set file permissions on '" + destination + "'. Got " + runner.getReturnCode() + " and message:\n\t" + runner.getProcessErrorAsString(); } } catch(Exception e) { error = "Failed to run:\n" + Strings.join(Arrays.asList(command), " ") + "\n" + "Got: " + e.getMessage() + "\n\n\t" + runner.getProcessErrorAsString(); log.error(error, e); } if (error != null) { log.error(error); feedback.putMessage(new Message(Message.MESSAGE_ALERT, error)); throw new ClientDeploymentException(error); } log.trace("File permissions fixed for client '" + clientId + "'"); } @Override public void start(Feedback feedback) throws Exception { log.info("Starting service"); /* Read the bundle spec */ File bdlFile = new File(source); log.trace("Creating InputStream for bdlFile '" + bdlFile + "', client.xml"); InputStream clientSpec; try { clientSpec = new ByteArrayInputStream (Zips.getZipEntry(bdlFile, "client.xml")); } catch(IOException e) { throw new IOException("Could not create InputStream for bdlFile '" + bdlFile + "', client.xml", e); } log.trace("Opening clientSpec with BundleSpecBuilder"); BundleSpecBuilder builder = BundleSpecBuilder.open(clientSpec); log.trace("Getting BundleStub from BundleSpecBuilder"); BundleStub stub = builder.getStub(); /* Add properties to the command line as we are obliged to */ log.trace("Adding properties to command line"); stub.addSystemProperty(ClientConnection.CONF_CLIENT_ID, clientId); log.debug("Building command line for " + clientId + " and configuration server " + confLocation); /* Exec the command line */ List<String> cmdLine = stub.buildCommandLine(); ProcessRunner runner = new ProcessRunner(cmdLine); runner.setStartingDir(new File(destination)); String error = null; log.debug("Starting instance '" + clientId + "' with:\n" + Strings.join(cmdLine, " ")); try { Thread processThread = new Thread(runner, "LocalDeployer Thread"); processThread.setDaemon(true); // Allow JVM to exit processThread.start(); /* Wait until the deployment is done or times out */ processThread.join(START_TIMEOUT); if (runner.isTimedOut()) { String errorMsg = runner.getProcessErrorAsString(); error = "Start request for client '" + clientId + "' with configuration server " + confLocation + ". Timed out" + (errorMsg != null ? ":\n" + errorMsg : ""); } else if (processThread.isAlive()) { /* The process is still running. This is probably a good sign, * but we have no way to be sure */ log.debug("Process thread for '" + clientId + "' still " + "running. Let's hope it is doing good"); } else if (runner.getReturnCode() != 0) { error = "Could not run client '" + clientId + "' with configuration server " + confLocation + ". Got return value " + runner.getReturnCode() + " and message " + runner.getProcessErrorAsString(); } } catch(Exception e) { error = "Could not start client '" + clientId + "' with configuration server " + confLocation + ": " + runner.getProcessErrorAsString(); log.error("Exception in start: " + e.getMessage(), e); } if (error != null) { log.error("Error when starting client: " + error); feedback.putMessage(new Message(Message.MESSAGE_ALERT, error)); throw new Exception(error); } log.info("Finished start of '" + clientId + "' with configuration server " + confLocation + ": " + runner.getProcessErrorAsString()); } @Override public String getTargetHost() { return "localhost"; } public static void main (String[] args) throws Exception { ClientDeployer d = new LocalDeployer( Configuration.newMemoryBased("summa.control.deployer.target", "localhost:222", "summa.control.deployer.bundle.file", "/home/mke/summa-control/repository/test-client-1.bundle", "summa.control.client.id", "t3")); d.start(new VoidFeedback()); } }
/* * Copyright (C) 2015 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.chaos.fx.cnbeta.widget; import android.support.annotation.NonNull; import android.view.animation.Interpolator; /** * This class offers a very small subset of {@code ValueAnimator}'s API, but works pre-v11 too. * <p> * You shouldn't not instantiate this directly. Instead use {@code ViewUtils.createAnimator()}. */ class ValueAnimatorCompat { interface AnimatorUpdateListener { /** * <p>Notifies the occurrence of another frame of the animation.</p> * * @param animator The animation which was repeated. */ void onAnimationUpdate(ValueAnimatorCompat animator); } /** * An animation listener receives notifications from an animation. * Notifications indicate animation related events, such as the end or the * repetition of the animation. */ interface AnimatorListener { /** * <p>Notifies the start of the animation.</p> * * @param animator The started animation. */ void onAnimationStart(ValueAnimatorCompat animator); /** * <p>Notifies the end of the animation. This callback is not invoked * for animations with repeat count set to INFINITE.</p> * * @param animator The animation which reached its end. */ void onAnimationEnd(ValueAnimatorCompat animator); /** * <p>Notifies the cancellation of the animation. This callback is not invoked * for animations with repeat count set to INFINITE.</p> * * @param animator The animation which was canceled. */ void onAnimationCancel(ValueAnimatorCompat animator); } static class AnimatorListenerAdapter implements AnimatorListener { @Override public void onAnimationStart(ValueAnimatorCompat animator) { } @Override public void onAnimationEnd(ValueAnimatorCompat animator) { } @Override public void onAnimationCancel(ValueAnimatorCompat animator) { } } interface Creator { @NonNull ValueAnimatorCompat createAnimator(); } static abstract class Impl { interface AnimatorUpdateListenerProxy { void onAnimationUpdate(); } interface AnimatorListenerProxy { void onAnimationStart(); void onAnimationEnd(); void onAnimationCancel(); } abstract void start(); abstract boolean isRunning(); abstract void setInterpolator(Interpolator interpolator); abstract void addListener(AnimatorListenerProxy listener); abstract void addUpdateListener(AnimatorUpdateListenerProxy updateListener); abstract void setIntValues(int from, int to); abstract int getAnimatedIntValue(); abstract void setFloatValues(float from, float to); abstract float getAnimatedFloatValue(); abstract void setDuration(long duration); abstract void cancel(); abstract float getAnimatedFraction(); abstract void end(); abstract long getDuration(); } private final Impl mImpl; ValueAnimatorCompat(Impl impl) { mImpl = impl; } public void start() { mImpl.start(); } public boolean isRunning() { return mImpl.isRunning(); } public void setInterpolator(Interpolator interpolator) { mImpl.setInterpolator(interpolator); } public void addUpdateListener(final AnimatorUpdateListener updateListener) { if (updateListener != null) { mImpl.addUpdateListener(new Impl.AnimatorUpdateListenerProxy() { @Override public void onAnimationUpdate() { updateListener.onAnimationUpdate(ValueAnimatorCompat.this); } }); } else { mImpl.addUpdateListener(null); } } public void addListener(final AnimatorListener listener) { if (listener != null) { mImpl.addListener(new Impl.AnimatorListenerProxy() { @Override public void onAnimationStart() { listener.onAnimationStart(ValueAnimatorCompat.this); } @Override public void onAnimationEnd() { listener.onAnimationEnd(ValueAnimatorCompat.this); } @Override public void onAnimationCancel() { listener.onAnimationCancel(ValueAnimatorCompat.this); } }); } else { mImpl.addListener(null); } } public void setIntValues(int from, int to) { mImpl.setIntValues(from, to); } public int getAnimatedIntValue() { return mImpl.getAnimatedIntValue(); } public void setFloatValues(float from, float to) { mImpl.setFloatValues(from, to); } public float getAnimatedFloatValue() { return mImpl.getAnimatedFloatValue(); } public void setDuration(long duration) { mImpl.setDuration(duration); } public void cancel() { mImpl.cancel(); } public float getAnimatedFraction() { return mImpl.getAnimatedFraction(); } public void end() { mImpl.end(); } public long getDuration() { return mImpl.getDuration(); } }
/* * Copyright (C) 2012-2015 DataStax Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.datastax.driver.core.utils; import com.datastax.driver.core.Native; import com.google.common.base.Charsets; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.lang.management.ManagementFactory; import java.net.InetAddress; import java.net.NetworkInterface; import java.net.SocketException; import java.net.UnknownHostException; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.*; import java.util.concurrent.atomic.AtomicLong; /** * Utility methods to help working with UUIDs, and more specifically, with time-based UUIDs * (also known as Version 1 UUIDs). * <h3>Notes on the algorithm used to generate time-based UUIDs</h3> * The algorithm follows roughly the description in RFC-4122, but with the following adaptations: * <ol> * <li>Since Java does not provide direct access to the host's MAC address, that information * is replaced with a digest of all IP addresses available on the host;</li> * <li>The process ID (PID) isn't easily available to Java either, so it is determined by one of the * following methods, in the order they are listed below: * <ol> * <li>If the System property <code>{@value PID_SYSTEM_PROPERTY}</code> is set then the value to use as a PID * will be read from that property;</li> * <li>Otherwise, if a native call to {@link Native#processId() getpid()} is possible, then the PID * will be read from that call;</li> * <li>Otherwise, an attempt will be made to read the PID from JMX's * {@link ManagementFactory#getRuntimeMXBean() RuntimeMXBean}, which is a well-known, * yet undocumented "hack", since most JVMs tend to use the JVM's PID as part of that MXBean name;</li> * <li>If all of the above fails, a random integer will be generated and used as a surrogate PID.</li> * </ol> * </li> * </ol> * * @jira_ticket JAVA-444 * @see <a href="http://www.ietf.org/rfc/rfc4122.txt">A Universally Unique IDentifier (UUID) URN Namespace (RFC 4122)</a> */ public final class UUIDs { /** * The System property to use to force the value of the process ID (PID). */ public static final String PID_SYSTEM_PROPERTY = "com.datastax.driver.PID"; private static final Logger LOGGER = LoggerFactory.getLogger(UUIDs.class); private UUIDs() { } private static final long START_EPOCH = makeEpoch(); private static final long CLOCK_SEQ_AND_NODE = makeClockSeqAndNode(); /* * The min and max possible lsb for a UUID. * Note that his is not 0 and all 1's because Cassandra TimeUUIDType * compares the lsb parts as a signed byte array comparison. So the min * value is 8 times -128 and the max is 8 times +127. * * Note that we ignore the uuid variant (namely, MIN_CLOCK_SEQ_AND_NODE * have variant 2 as it should, but MAX_CLOCK_SEQ_AND_NODE have variant 0) * because I don't trust all uuid implementation to have correctly set * those (pycassa don't always for instance). */ private static final long MIN_CLOCK_SEQ_AND_NODE = 0x8080808080808080L; private static final long MAX_CLOCK_SEQ_AND_NODE = 0x7f7f7f7f7f7f7f7fL; private static final AtomicLong lastTimestamp = new AtomicLong(0L); private static long makeEpoch() { // UUID v1 timestamp must be in 100-nanoseconds interval since 00:00:00.000 15 Oct 1582. Calendar c = Calendar.getInstance(TimeZone.getTimeZone("GMT-0")); c.set(Calendar.YEAR, 1582); c.set(Calendar.MONTH, Calendar.OCTOBER); c.set(Calendar.DAY_OF_MONTH, 15); c.set(Calendar.HOUR_OF_DAY, 0); c.set(Calendar.MINUTE, 0); c.set(Calendar.SECOND, 0); c.set(Calendar.MILLISECOND, 0); return c.getTimeInMillis(); } private static long makeNode() { /* * We don't have access to the MAC address (in pure JAVA at least) but * need to generate a node part that identify this host as uniquely as * possible. * The spec says that one option is to take as many source that * identify this node as possible and hash them together. That's what * we do here by gathering all the ip of this host as well as a few * other sources. */ try { MessageDigest digest = MessageDigest.getInstance("MD5"); for (String address : getAllLocalAddresses()) update(digest, address); Properties props = System.getProperties(); update(digest, props.getProperty("java.vendor")); update(digest, props.getProperty("java.vendor.url")); update(digest, props.getProperty("java.version")); update(digest, props.getProperty("os.arch")); update(digest, props.getProperty("os.name")); update(digest, props.getProperty("os.version")); update(digest, getProcessPiece()); byte[] hash = digest.digest(); long node = 0; for (int i = 0; i < 6; i++) node |= (0x00000000000000ffL & (long) hash[i]) << (i * 8); // Since we don't use the mac address, the spec says that multicast // bit (least significant bit of the first byte of the node ID) must be 1. return node | 0x0000010000000000L; } catch (NoSuchAlgorithmException e) { throw new RuntimeException(e); } } private static String getProcessPiece() { Integer pid = null; String pidProperty = System.getProperty(PID_SYSTEM_PROPERTY); if (pidProperty != null) { try { pid = Integer.parseInt(pidProperty); LOGGER.info("PID obtained from System property {}: {}", PID_SYSTEM_PROPERTY, pid); } catch (NumberFormatException e) { LOGGER.warn("Incorrect integer specified for PID in System property {}: {}", PID_SYSTEM_PROPERTY, pidProperty); } } if (pid == null && Native.isGetpidAvailable()) { try { pid = Native.processId(); LOGGER.info("PID obtained through native call to getpid(): {}", pid); } catch (Exception e) { LOGGER.warn("Native call to getpid() failed", e); } } if (pid == null) { try { String pidJmx = ManagementFactory.getRuntimeMXBean().getName().split("@")[0]; pid = Integer.parseInt(pidJmx); LOGGER.info("PID obtained through JMX: {}", pid); } catch (Exception e) { LOGGER.warn("Failed to obtain PID from JMX", e); } } if (pid == null) { pid = new java.util.Random().nextInt(); LOGGER.warn("Could not determine PID, falling back to a random integer: {}", pid); } ClassLoader loader = UUIDs.class.getClassLoader(); int loaderId = loader != null ? System.identityHashCode(loader) : 0; return Integer.toHexString(pid) + Integer.toHexString(loaderId); } private static void update(MessageDigest digest, String value) { if (value != null) digest.update(value.getBytes(Charsets.UTF_8)); } private static long makeClockSeqAndNode() { long clock = new Random(System.currentTimeMillis()).nextLong(); long node = makeNode(); long lsb = 0; lsb |= (clock & 0x0000000000003FFFL) << 48; lsb |= 0x8000000000000000L; lsb |= node; return lsb; } /** * Creates a new random (version 4) UUID. * <p/> * This method is just a convenience for {@code UUID.randomUUID()}. * * @return a newly generated, pseudo random, version 4 UUID. */ public static UUID random() { return UUID.randomUUID(); } /** * Creates a new time-based (version 1) UUID. * <p/> * UUIDs generated by this method are suitable for use with the * {@code timeuuid} Cassandra type. In particular the generated UUID * includes the timestamp of its generation. * <p/> * Note that there is no way to provide your own timestamp. This is deliberate, as we feel that this does not * conform to the UUID specification, and therefore don't want to encourage it through the API. * If you want to do it anyway, use the following workaround: * <pre> * Random random = new Random(); * UUID uuid = new UUID(UUIDs.startOf(userProvidedTimestamp).getMostSignificantBits(), random.nextLong()); * </pre> * If you simply need to perform a range query on a {@code timeuuid} column, use the "fake" UUID generated by * {@link #startOf(long)} and {@link #endOf(long)}. * * @return a new time-based UUID. */ public static UUID timeBased() { return new UUID(makeMSB(getCurrentTimestamp()), CLOCK_SEQ_AND_NODE); } /** * Creates a "fake" time-based UUID that sorts as the smallest possible * version 1 UUID generated at the provided timestamp. * <p/> * Such created UUIDs are useful in queries to select a time range of a * {@code timeuuid} column. * <p/> * The UUIDs created by this method <b>are not unique</b> and as such are * <b>not</b> suitable for anything else than querying a specific time * range. In particular, you should not insert such UUIDs. "True" UUIDs from * user-provided timestamps are not supported (see {@link #timeBased()} * for more explanations). * <p/> * Also, the timestamp to provide as a parameter must be a Unix timestamp (as * returned by {@link System#currentTimeMillis} or {@link java.util.Date#getTime}), and * <em>not</em> a count of 100-nanosecond intervals since 00:00:00.00, 15 October 1582 (as required by RFC-4122). * <p/> * In other words, given a UUID {@code uuid}, you should never call * {@code startOf(uuid.timestamp())} but rather * {@code startOf(unixTimestamp(uuid))}. * <p/> * Lastly, please note that Cassandra's {@code timeuuid} sorting is not compatible * with {@link UUID#compareTo} and hence the UUIDs created by this method * are not necessarily lower bound for that latter method. * * @param timestamp the Unix timestamp for which the created UUID must be a * lower bound. * @return the smallest (for Cassandra {@code timeuuid} sorting) UUID of {@code timestamp}. */ public static UUID startOf(long timestamp) { return new UUID(makeMSB(fromUnixTimestamp(timestamp)), MIN_CLOCK_SEQ_AND_NODE); } /** * Creates a "fake" time-based UUID that sorts as the biggest possible * version 1 UUID generated at the provided timestamp. * <p/> * See {@link #startOf(long)} for explanations about the intended usage of such UUID. * * @param timestamp the Unix timestamp for which the created UUID must be an * upper bound. * @return the biggest (for Cassandra {@code timeuuid} sorting) UUID of {@code timestamp}. */ public static UUID endOf(long timestamp) { long uuidTstamp = fromUnixTimestamp(timestamp + 1) - 1; return new UUID(makeMSB(uuidTstamp), MAX_CLOCK_SEQ_AND_NODE); } /** * Return the Unix timestamp contained by the provided time-based UUID. * <p/> * This method is not equivalent to {@link UUID#timestamp()}. More * precisely, a version 1 UUID stores a timestamp that represents the * number of 100-nanoseconds intervals since midnight, 15 October 1582 and * that is what {@link UUID#timestamp()} returns. This method however * converts that timestamp to the equivalent Unix timestamp in * milliseconds, i.e. a timestamp representing a number of milliseconds * since midnight, January 1, 1970 UTC. In particular, the timestamps * returned by this method are comparable to the timestamps returned by * {@link System#currentTimeMillis}, {@link java.util.Date#getTime}, etc. * * @param uuid the UUID to return the timestamp of. * @return the Unix timestamp of {@code uuid}. * @throws IllegalArgumentException if {@code uuid} is not a version 1 UUID. */ public static long unixTimestamp(UUID uuid) { if (uuid.version() != 1) throw new IllegalArgumentException(String.format("Can only retrieve the unix timestamp for version 1 uuid (provided version %d)", uuid.version())); long timestamp = uuid.timestamp(); return (timestamp / 10000) + START_EPOCH; } /* * Note that currently we use {@link System#currentTimeMillis} for a base time in * milliseconds, and then if we are in the same milliseconds that the * previous generation, we increment the number of nanoseconds. * However, since the precision is 100-nanoseconds intervals, we can only * generate 10K UUID within a millisecond safely. If we detect we have * already generated that much UUID within a millisecond (which, while * admittedly unlikely in a real application, is very achievable on even * modest machines), then we stall the generator (busy spin) until the next * millisecond as required by the RFC. */ private static long getCurrentTimestamp() { while (true) { long now = fromUnixTimestamp(System.currentTimeMillis()); long last = lastTimestamp.get(); if (now > last) { if (lastTimestamp.compareAndSet(last, now)) return now; } else { long lastMillis = millisOf(last); // If the clock went back in time, bail out if (millisOf(now) < millisOf(last)) return lastTimestamp.incrementAndGet(); long candidate = last + 1; // If we've generated more than 10k uuid in that millisecond, // we restart the whole process until we get to the next millis. // Otherwise, we try use our candidate ... unless we've been // beaten by another thread in which case we try again. if (millisOf(candidate) == lastMillis && lastTimestamp.compareAndSet(last, candidate)) return candidate; } } } // Package visible for testing static long fromUnixTimestamp(long tstamp) { return (tstamp - START_EPOCH) * 10000; } private static long millisOf(long timestamp) { return timestamp / 10000; } // Package visible for testing static long makeMSB(long timestamp) { long msb = 0L; msb |= (0x00000000ffffffffL & timestamp) << 32; msb |= (0x0000ffff00000000L & timestamp) >>> 16; msb |= (0x0fff000000000000L & timestamp) >>> 48; msb |= 0x0000000000001000L; // sets the version to 1. return msb; } private static Set<String> getAllLocalAddresses() { Set<String> allIps = new HashSet<String>(); try { InetAddress localhost = InetAddress.getLocalHost(); allIps.add(localhost.toString()); // Also return the hostname if available, it won't hurt (this does a dns lookup, it's only done once at startup) allIps.add(localhost.getCanonicalHostName()); InetAddress[] allMyIps = InetAddress.getAllByName(localhost.getCanonicalHostName()); if (allMyIps != null) { for (int i = 0; i < allMyIps.length; i++) allIps.add(allMyIps[i].toString()); } } catch (UnknownHostException e) { // Ignore, we'll try the network interfaces anyway } try { Enumeration<NetworkInterface> en = NetworkInterface.getNetworkInterfaces(); if (en != null) { while (en.hasMoreElements()) { Enumeration<InetAddress> enumIpAddr = en.nextElement().getInetAddresses(); while (enumIpAddr.hasMoreElements()) allIps.add(enumIpAddr.nextElement().toString()); } } } catch (SocketException e) { // Ignore, if we've really got nothing so far, we'll throw an exception } return allIps; } }
/* * Copyright 2007 Sascha Weinreuter * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.intellij.plugins.relaxNG.compact.psi.impl; import com.intellij.codeInsight.CodeInsightUtilCore; import com.intellij.codeInsight.daemon.EmptyResolveMessageProvider; import com.intellij.codeInsight.lookup.LookupItem; import com.intellij.codeInsight.template.*; import com.intellij.codeInspection.LocalQuickFix; import com.intellij.codeInspection.LocalQuickFixProvider; import com.intellij.codeInspection.ProblemDescriptor; import com.intellij.lang.ASTNode; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.fileEditor.FileEditorManager; import com.intellij.openapi.fileEditor.OpenFileDescriptor; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFileFactory; import com.intellij.psi.PsiReference; import com.intellij.psi.ResolveState; import com.intellij.psi.codeStyle.CodeStyleManager; import com.intellij.psi.scope.BaseScopeProcessor; import com.intellij.psi.tree.IElementType; import com.intellij.util.ArrayUtil; import com.intellij.util.IncorrectOperationException; import org.intellij.plugins.relaxNG.compact.RncElementTypes; import org.intellij.plugins.relaxNG.compact.RncFileType; import org.intellij.plugins.relaxNG.compact.RncTokenTypes; import org.intellij.plugins.relaxNG.compact.psi.*; import org.intellij.plugins.relaxNG.compact.psi.util.EscapeUtil; import org.intellij.plugins.relaxNG.compact.psi.util.RenameUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; public class RncNameImpl extends RncElementImpl implements RncName, PsiReference, EmptyResolveMessageProvider, LocalQuickFixProvider { private enum Kind { NAMESPACE, DATATYPES } public RncNameImpl(ASTNode node) { super(node); } @Override @Nullable public String getPrefix() { final String[] parts = EscapeUtil.unescapeText(getNode()).split(":", 2); return parts.length == 2 ? parts[0] : null; } @Override @NotNull public String getLocalPart() { final String[] parts = EscapeUtil.unescapeText(getNode()).split(":", 2); return parts.length == 1 ? parts[0] : parts[1]; } @Override public void accept(@NotNull RncElementVisitor visitor) { visitor.visitName(this); } @Override public PsiReference getReference() { return getPrefix() == null ? null : this; } @Override public PsiElement getElement() { return this; } @Override public TextRange getRangeInElement() { return TextRange.from(0, getText().indexOf(':')); } @Override @Nullable public PsiElement resolve() { final MyResolver resolver = new MyResolver(getPrefix(), getKind()); getContainingFile().processDeclarations(resolver, ResolveState.initial(), this, this); return resolver.getResult(); } private Kind getKind() { final IElementType parent = getNode().getTreeParent().getElementType(); if (parent == RncElementTypes.DATATYPE_PATTERN) { return Kind.DATATYPES; } else { return Kind.NAMESPACE; } } @Override @NotNull public String getCanonicalText() { return getRangeInElement().substring(getText()); } @Override public PsiElement handleElementRename(String newElementName) throws IncorrectOperationException { final ASTNode node = getNode(); final ASTNode child = RenameUtil.createPrefixedNode(getManager(), newElementName, getLocalPart()); node.getTreeParent().replaceChild(node, child); return child.getPsi(); } @Override public PsiElement bindToElement(@NotNull PsiElement element) throws IncorrectOperationException { throw new UnsupportedOperationException(); } @Override public boolean isReferenceTo(PsiElement element) { return element instanceof RncElement && Comparing.equal(resolve(), element); } @Override @NotNull public Object[] getVariants() { return ArrayUtil.EMPTY_OBJECT_ARRAY; } @Override public boolean isSoft() { final String prefix = getPrefix(); return "xsd".equals(prefix) || "xml".equals(prefix); } @Override @NotNull public String getUnresolvedMessagePattern() { return "Unresolved namespace prefix ''{0}''"; } @Nullable @Override public LocalQuickFix[] getQuickFixes() { if (getPrefix() != null) { return new LocalQuickFix[] { new CreateDeclFix(this) }; } return LocalQuickFix.EMPTY_ARRAY; } private static class MyResolver extends BaseScopeProcessor { private final String myPrefix; private final Kind myKind; private PsiElement myResult; public MyResolver(String prefix, Kind kind) { myPrefix = prefix; myKind = kind; } @Override public boolean execute(@NotNull PsiElement element, @NotNull ResolveState substitutor) { final ASTNode node = element.getNode(); if (node == null) return true; if (!(element instanceof RncDecl)) { return false; } final IElementType type = node.getElementType(); if (myKind == Kind.NAMESPACE && type == RncElementTypes.NS_DECL) { if (checkDecl(element)) return false; } else if (myKind == Kind.DATATYPES && type == RncElementTypes.DATATYPES_DECL) { if (checkDecl(element)) return false; } return true; } private boolean checkDecl(PsiElement element) { if (myPrefix.equals(((RncDecl)element).getPrefix())) { myResult = element; return true; } return false; } public PsiElement getResult() { return myResult; } } public static class CreateDeclFix implements LocalQuickFix { private final RncNameImpl myReference; public CreateDeclFix(RncNameImpl reference) { myReference = reference; } @Override @NotNull public String getName() { return getFamilyName() + " '" + myReference.getPrefix() + "'"; } @Override @NotNull public String getFamilyName() { return "Create " + myReference.getKind().name().toLowerCase() + " declaration"; } @Override public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) { final String prefix = myReference.getPrefix(); final PsiFileFactory factory = PsiFileFactory.getInstance(myReference.getProject()); final RncFile psiFile = (RncFile)factory.createFileFromText("dummy.rnc", RncFileType.getInstance(), myReference.getKind().name().toLowerCase() + " " + prefix + " = \"###\""); final RncFile rncFile = (RncFile)myReference.getContainingFile(); final RncDecl[] declarations = rncFile.getDeclarations(); final RncDecl decl = psiFile.getDeclarations()[0]; final RncDecl e; if (declarations.length > 0) { e = (RncDecl)rncFile.addAfter(decl, declarations[declarations.length - 1]); } else { final RncGrammar rncGrammar = rncFile.getGrammar(); if (rncGrammar != null) { e = (RncDecl)rncFile.addBefore(decl, rncGrammar); } else { e = (RncDecl)rncFile.add(decl); } } final ASTNode blockNode = e.getParent().getNode(); assert blockNode != null; final ASTNode newNode = e.getNode(); assert newNode != null; CodeStyleManager.getInstance(e.getManager().getProject()).reformatNewlyAddedElement(blockNode, newNode); final PsiElement literal = e.getLastChild(); assert literal != null; final ASTNode literalNode = literal.getNode(); assert literalNode != null; assert literalNode.getElementType() == RncTokenTypes.LITERAL; final int offset = literal.getTextRange().getStartOffset(); literal.delete(); VirtualFile virtualFile = myReference.getElement().getContainingFile().getVirtualFile(); if (virtualFile != null) { Editor editor = FileEditorManager.getInstance(project).openTextEditor(new OpenFileDescriptor(project, virtualFile, offset), true); if (editor != null) { RncDecl rncDecl = CodeInsightUtilCore.forcePsiPostprocessAndRestoreElement(e); final TemplateManager manager = TemplateManager.getInstance(project); final Template t = manager.createTemplate("", ""); t.addTextSegment(" \""); final Expression expression = new Expression() { @Override public Result calculateResult(ExpressionContext context) { return new TextResult(""); } @Override public Result calculateQuickResult(ExpressionContext context) { return calculateResult(context); } @Override public LookupItem[] calculateLookupItems(ExpressionContext context) { return LookupItem.EMPTY_ARRAY; } }; t.addVariable("uri", expression, expression, true); t.addTextSegment("\""); t.addEndVariable(); editor.getCaretModel().moveToOffset(rncDecl.getTextRange().getEndOffset()); manager.startTemplate(editor, t); } } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.creadur.whisker.model; import java.util.Collection; import java.util.Map; import java.util.Set; /** * High level description of licensing qualities. */ public class Descriptor { /** Principle license for main work. */ private final License primaryLicense; /** Optional additional primary copyright notice*/ private final String primaryCopyrightNotice; /** Individual or group with main responsible for main work. */ private final String primaryOrganisationId; /** A NOTICE for the main work, for inclusion alongside the LICENSE. */ private final String primaryNotice; /** License meta-data, indexed by id. */ private final Map<String, License> licenses; /** Organisation meta-data, indexed by id */ private final Map<String, Organisation> organisations; /** Notice meta-data, indexed by id. */ private final Map<String, String> notices; /** Directories expected to be contained within the release. */ private final Collection<WithinDirectory> contents; /** * Constructs a description of the expected licensing qualities of a * distribution, with no additional primary copyright notice. * * @param primaryLicense * not null * @param primaryOrganisationId * not null * @param primaryNotice * possibly null * @param licenses * not null, possibly empty * @param notices * not null, possibly empty * @param organisations * not null, possibly empty * @param contents * not null, possibly empty */ public Descriptor(final License primaryLicense, final String primaryOrganisationId, final String primaryNotice, final Map<String, License> licenses, final Map<String, String> notices, final Map<String, Organisation> organisations, final Collection<WithinDirectory> contents) { this( primaryLicense, null, primaryOrganisationId, primaryNotice, licenses, notices, organisations, contents); } /** * Constructs a description of the expected licensing qualities of a * distribution, with a primary additional copyright notice. * * @param primaryLicense * not null * @param primaryCopyrightNotice * optional primary copyright notice, possibly null * * @param primaryOrganisationId * not null * @param primaryNotice * possibly null * @param licenses * not null, possibly empty * @param notices * not null, possibly empty * @param organisations * not null, possibly empty * @param contents * not null, possibly empty */ public Descriptor(final License primaryLicense, final String primaryCopyrightNotice, final String primaryOrganisationId, final String primaryNotice, final Map<String, License> licenses, final Map<String, String> notices, final Map<String, Organisation> organisations, final Collection<WithinDirectory> contents) { super(); this.primaryLicense = primaryLicense; this.primaryCopyrightNotice = primaryCopyrightNotice; this.primaryOrganisationId = primaryOrganisationId; this.primaryNotice = primaryNotice; this.licenses = licenses; this.notices = notices; this.organisations = organisations; this.contents = contents; } /** * Gets an additional copyright notice needed * for some primary licenses. * @return optional primary copyright notice, * possibly null */ public String getPrimaryCopyrightNotice() { return primaryCopyrightNotice; } /** * Is there a primary copyright notice? * @return true if a primary copyright notice * has been set, false otherwise */ public boolean isPrimaryCopyrightNotice() { return primaryCopyrightNotice != null; } /** * Gets the principle NOTICE for the main work. * * @return the primaryNotice */ public String getPrimaryNotice() { return this.primaryNotice; } /** * Collates NOTICE meta-data for resources. * * @return not null, possibly empty */ public Map<String, Collection<Resource>> getResourceNotices() { final NoticeCollator collator = new NoticeCollator(); traverse(collator); return collator.resourceNotices(this.notices); } /** * Gets the organisations described. * @return organisations indexed by id, not null */ public Map<String, Organisation> getOrganisations() { return organisations; } /** * Collates NOTICE meta-data not linked to any resource. * * @return not null, possibly empty */ public Set<String> getOtherNotices() { final NoticeCollator collator = new NoticeCollator(); traverse(collator); return collator.notices(this.notices); } /** * Gets the license with the given id. * * @param id * not null * @return the license with the given id, or null */ public License license(final String id) { return this.licenses.get(id); } /** * Gets the principle license under which the work is licensed. * * @return the principle license, not null */ public License getPrimaryLicense() { return this.primaryLicense; } /** * Gets the contents expected in the distribution. * * @return not null, possibly null */ public Collection<WithinDirectory> getContents() { return this.contents; } /** * Is the given license the principle license for the main work? * * @param license * not null * @return true when the given license is the primary license, not null */ public boolean isPrimary(final License license) { return this.primaryLicense.equals(license); } /** * Is the given individual or group the principle organisation with * responsibility for the main work. * * @param byOrganisation * not null * @return true when the given organisation is primary */ public boolean isPrimary(final ByOrganisation byOrganisation) { return byOrganisation.getId().equals(this.primaryOrganisationId); } /** * Is this collection of resources expected to contain only material * licensed under the primary license by the * primary organisation with the primary copyright notice? * * @param contentElement * not null * @return true when the contents are all licensed under the primary license * by the primary organisation */ public boolean isOnlyPrimary(final ContentElement contentElement) { final NoCopyrightNoticeVerifier verifier = new NoCopyrightNoticeVerifier(); final LicenseAndOrganisationCollator collator = new LicenseAndOrganisationCollator(); contentElement.accept(collator); contentElement.accept(verifier); return collator.isOnlyLicense(getPrimaryLicense()) && collator.isOnlyOrganisation(this.primaryOrganisationId) && !verifier.isCopyrightNoticePresent(); } /** * Traverses the content directories. * * @param visitor * possibly null */ public void traverse(final Visitor visitor) { for (final WithinDirectory directory : getContents()) { directory.accept(visitor); } } /** * Traverses the given directory. * * @param visitor * possibly null * @param directoryName * not null */ public void traverseDirectory(final Visitor visitor, final String directoryName) { for (final WithinDirectory directory : getContents()) { if (directory.isNamed(directoryName)) { directory.accept(visitor); } } } /** * Is a NOTICE document required? * * @return true when a NOTICE is required, false otherwise */ public boolean isNoticeRequired() { return primaryNoticeExists() || resourceNoticesExist(); } /** * Does any resource have a required notice? * * @return true when at least one required third party notice exists, false * otherwise */ public boolean resourceNoticesExist() { return !getResourceNotices().isEmpty(); } /** * Does the work described have a primary notice? * * @return true unless the primary notice is null or whitespace */ public boolean primaryNoticeExists() { return (this.primaryNotice != null) && !"".equals(this.primaryNotice.trim()); } /** * Is this the work of the primary organisation only? * * @return true when no third party resources are contained, false when * third party resources exist. In particular, true when * contents are empty. */ public boolean isPrimaryOnly() { final boolean result; if (contents.size() > 0) { final LicenseAndOrganisationCollator collator = new LicenseAndOrganisationCollator(); for (final WithinDirectory directory : contents) { directory.accept(collator); } result = collator.isOnlyOrganisation(primaryOrganisationId); } else { result = true; } return result; } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/metastore/v1beta/metastore.proto package com.google.cloud.metastore.v1beta; /** * * * <pre> * Request message for * [DataprocMetastore.ListMetadataImports][google.cloud.metastore.v1beta.DataprocMetastore.ListMetadataImports]. * </pre> * * Protobuf type {@code google.cloud.metastore.v1beta.ListMetadataImportsRequest} */ public final class ListMetadataImportsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.metastore.v1beta.ListMetadataImportsRequest) ListMetadataImportsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListMetadataImportsRequest.newBuilder() to construct. private ListMetadataImportsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListMetadataImportsRequest() { parent_ = ""; pageToken_ = ""; filter_ = ""; orderBy_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListMetadataImportsRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ListMetadataImportsRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); parent_ = s; break; } case 16: { pageSize_ = input.readInt32(); break; } case 26: { java.lang.String s = input.readStringRequireUtf8(); pageToken_ = s; break; } case 34: { java.lang.String s = input.readStringRequireUtf8(); filter_ = s; break; } case 42: { java.lang.String s = input.readStringRequireUtf8(); orderBy_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.metastore.v1beta.MetastoreProto .internal_static_google_cloud_metastore_v1beta_ListMetadataImportsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.metastore.v1beta.MetastoreProto .internal_static_google_cloud_metastore_v1beta_ListMetadataImportsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.metastore.v1beta.ListMetadataImportsRequest.class, com.google.cloud.metastore.v1beta.ListMetadataImportsRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; private volatile java.lang.Object parent_; /** * * * <pre> * Required. The relative resource name of the service whose metadata imports * to list, in the following form: * `projects/{project_number}/locations/{location_id}/services/{service_id}/metadataImports`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The relative resource name of the service whose metadata imports * to list, in the following form: * `projects/{project_number}/locations/{location_id}/services/{service_id}/metadataImports`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_; /** * * * <pre> * Optional. The maximum number of imports to return. The response may contain * less than the maximum number. If unspecified, no more than 500 imports are * returned. The maximum value is 1000; values above 1000 are changed to 1000. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; private volatile java.lang.Object pageToken_; /** * * * <pre> * Optional. A page token, received from a previous * [DataprocMetastore.ListServices][google.cloud.metastore.v1beta.DataprocMetastore.ListServices] * call. Provide this token to retrieve the subsequent page. * To retrieve the first page, supply an empty page token. * When paginating, other parameters provided to * [DataprocMetastore.ListServices][google.cloud.metastore.v1beta.DataprocMetastore.ListServices] * must match the call that provided the page token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * Optional. A page token, received from a previous * [DataprocMetastore.ListServices][google.cloud.metastore.v1beta.DataprocMetastore.ListServices] * call. Provide this token to retrieve the subsequent page. * To retrieve the first page, supply an empty page token. * When paginating, other parameters provided to * [DataprocMetastore.ListServices][google.cloud.metastore.v1beta.DataprocMetastore.ListServices] * must match the call that provided the page token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 4; private volatile java.lang.Object filter_; /** * * * <pre> * Optional. The filter to apply to list results. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * Optional. The filter to apply to list results. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ORDER_BY_FIELD_NUMBER = 5; private volatile java.lang.Object orderBy_; /** * * * <pre> * Optional. Specify the ordering of results as described in [Sorting * Order](https://cloud.google.com/apis/design/design_patterns#sorting_order). * If not specified, the results will be sorted in the default order. * </pre> * * <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The orderBy. */ @java.lang.Override public java.lang.String getOrderBy() { java.lang.Object ref = orderBy_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); orderBy_ = s; return s; } } /** * * * <pre> * Optional. Specify the ordering of results as described in [Sorting * Order](https://cloud.google.com/apis/design/design_patterns#sorting_order). * If not specified, the results will be sorted in the default order. * </pre> * * <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for orderBy. */ @java.lang.Override public com.google.protobuf.ByteString getOrderByBytes() { java.lang.Object ref = orderBy_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); orderBy_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, orderBy_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, orderBy_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.metastore.v1beta.ListMetadataImportsRequest)) { return super.equals(obj); } com.google.cloud.metastore.v1beta.ListMetadataImportsRequest other = (com.google.cloud.metastore.v1beta.ListMetadataImportsRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getFilter().equals(other.getFilter())) return false; if (!getOrderBy().equals(other.getOrderBy())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (37 * hash) + ORDER_BY_FIELD_NUMBER; hash = (53 * hash) + getOrderBy().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.metastore.v1beta.ListMetadataImportsRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.metastore.v1beta.ListMetadataImportsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.metastore.v1beta.ListMetadataImportsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.metastore.v1beta.ListMetadataImportsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.metastore.v1beta.ListMetadataImportsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.metastore.v1beta.ListMetadataImportsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.metastore.v1beta.ListMetadataImportsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.metastore.v1beta.ListMetadataImportsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.metastore.v1beta.ListMetadataImportsRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.metastore.v1beta.ListMetadataImportsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.metastore.v1beta.ListMetadataImportsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.metastore.v1beta.ListMetadataImportsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.metastore.v1beta.ListMetadataImportsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [DataprocMetastore.ListMetadataImports][google.cloud.metastore.v1beta.DataprocMetastore.ListMetadataImports]. * </pre> * * Protobuf type {@code google.cloud.metastore.v1beta.ListMetadataImportsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.metastore.v1beta.ListMetadataImportsRequest) com.google.cloud.metastore.v1beta.ListMetadataImportsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.metastore.v1beta.MetastoreProto .internal_static_google_cloud_metastore_v1beta_ListMetadataImportsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.metastore.v1beta.MetastoreProto .internal_static_google_cloud_metastore_v1beta_ListMetadataImportsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.metastore.v1beta.ListMetadataImportsRequest.class, com.google.cloud.metastore.v1beta.ListMetadataImportsRequest.Builder.class); } // Construct using com.google.cloud.metastore.v1beta.ListMetadataImportsRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); parent_ = ""; pageSize_ = 0; pageToken_ = ""; filter_ = ""; orderBy_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.metastore.v1beta.MetastoreProto .internal_static_google_cloud_metastore_v1beta_ListMetadataImportsRequest_descriptor; } @java.lang.Override public com.google.cloud.metastore.v1beta.ListMetadataImportsRequest getDefaultInstanceForType() { return com.google.cloud.metastore.v1beta.ListMetadataImportsRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.metastore.v1beta.ListMetadataImportsRequest build() { com.google.cloud.metastore.v1beta.ListMetadataImportsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.metastore.v1beta.ListMetadataImportsRequest buildPartial() { com.google.cloud.metastore.v1beta.ListMetadataImportsRequest result = new com.google.cloud.metastore.v1beta.ListMetadataImportsRequest(this); result.parent_ = parent_; result.pageSize_ = pageSize_; result.pageToken_ = pageToken_; result.filter_ = filter_; result.orderBy_ = orderBy_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.metastore.v1beta.ListMetadataImportsRequest) { return mergeFrom((com.google.cloud.metastore.v1beta.ListMetadataImportsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.metastore.v1beta.ListMetadataImportsRequest other) { if (other == com.google.cloud.metastore.v1beta.ListMetadataImportsRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; onChanged(); } if (!other.getOrderBy().isEmpty()) { orderBy_ = other.orderBy_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.metastore.v1beta.ListMetadataImportsRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.metastore.v1beta.ListMetadataImportsRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The relative resource name of the service whose metadata imports * to list, in the following form: * `projects/{project_number}/locations/{location_id}/services/{service_id}/metadataImports`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The relative resource name of the service whose metadata imports * to list, in the following form: * `projects/{project_number}/locations/{location_id}/services/{service_id}/metadataImports`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The relative resource name of the service whose metadata imports * to list, in the following form: * `projects/{project_number}/locations/{location_id}/services/{service_id}/metadataImports`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; onChanged(); return this; } /** * * * <pre> * Required. The relative resource name of the service whose metadata imports * to list, in the following form: * `projects/{project_number}/locations/{location_id}/services/{service_id}/metadataImports`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); onChanged(); return this; } /** * * * <pre> * Required. The relative resource name of the service whose metadata imports * to list, in the following form: * `projects/{project_number}/locations/{location_id}/services/{service_id}/metadataImports`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Optional. The maximum number of imports to return. The response may contain * less than the maximum number. If unspecified, no more than 500 imports are * returned. The maximum value is 1000; values above 1000 are changed to 1000. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Optional. The maximum number of imports to return. The response may contain * less than the maximum number. If unspecified, no more than 500 imports are * returned. The maximum value is 1000; values above 1000 are changed to 1000. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; onChanged(); return this; } /** * * * <pre> * Optional. The maximum number of imports to return. The response may contain * less than the maximum number. If unspecified, no more than 500 imports are * returned. The maximum value is 1000; values above 1000 are changed to 1000. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageSize() { pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. A page token, received from a previous * [DataprocMetastore.ListServices][google.cloud.metastore.v1beta.DataprocMetastore.ListServices] * call. Provide this token to retrieve the subsequent page. * To retrieve the first page, supply an empty page token. * When paginating, other parameters provided to * [DataprocMetastore.ListServices][google.cloud.metastore.v1beta.DataprocMetastore.ListServices] * must match the call that provided the page token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. A page token, received from a previous * [DataprocMetastore.ListServices][google.cloud.metastore.v1beta.DataprocMetastore.ListServices] * call. Provide this token to retrieve the subsequent page. * To retrieve the first page, supply an empty page token. * When paginating, other parameters provided to * [DataprocMetastore.ListServices][google.cloud.metastore.v1beta.DataprocMetastore.ListServices] * must match the call that provided the page token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. A page token, received from a previous * [DataprocMetastore.ListServices][google.cloud.metastore.v1beta.DataprocMetastore.ListServices] * call. Provide this token to retrieve the subsequent page. * To retrieve the first page, supply an empty page token. * When paginating, other parameters provided to * [DataprocMetastore.ListServices][google.cloud.metastore.v1beta.DataprocMetastore.ListServices] * must match the call that provided the page token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; onChanged(); return this; } /** * * * <pre> * Optional. A page token, received from a previous * [DataprocMetastore.ListServices][google.cloud.metastore.v1beta.DataprocMetastore.ListServices] * call. Provide this token to retrieve the subsequent page. * To retrieve the first page, supply an empty page token. * When paginating, other parameters provided to * [DataprocMetastore.ListServices][google.cloud.metastore.v1beta.DataprocMetastore.ListServices] * must match the call that provided the page token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); onChanged(); return this; } /** * * * <pre> * Optional. A page token, received from a previous * [DataprocMetastore.ListServices][google.cloud.metastore.v1beta.DataprocMetastore.ListServices] * call. Provide this token to retrieve the subsequent page. * To retrieve the first page, supply an empty page token. * When paginating, other parameters provided to * [DataprocMetastore.ListServices][google.cloud.metastore.v1beta.DataprocMetastore.ListServices] * must match the call that provided the page token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * Optional. The filter to apply to list results. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. The filter to apply to list results. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. The filter to apply to list results. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; onChanged(); return this; } /** * * * <pre> * Optional. The filter to apply to list results. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); onChanged(); return this; } /** * * * <pre> * Optional. The filter to apply to list results. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; onChanged(); return this; } private java.lang.Object orderBy_ = ""; /** * * * <pre> * Optional. Specify the ordering of results as described in [Sorting * Order](https://cloud.google.com/apis/design/design_patterns#sorting_order). * If not specified, the results will be sorted in the default order. * </pre> * * <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The orderBy. */ public java.lang.String getOrderBy() { java.lang.Object ref = orderBy_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); orderBy_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. Specify the ordering of results as described in [Sorting * Order](https://cloud.google.com/apis/design/design_patterns#sorting_order). * If not specified, the results will be sorted in the default order. * </pre> * * <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for orderBy. */ public com.google.protobuf.ByteString getOrderByBytes() { java.lang.Object ref = orderBy_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); orderBy_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. Specify the ordering of results as described in [Sorting * Order](https://cloud.google.com/apis/design/design_patterns#sorting_order). * If not specified, the results will be sorted in the default order. * </pre> * * <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The orderBy to set. * @return This builder for chaining. */ public Builder setOrderBy(java.lang.String value) { if (value == null) { throw new NullPointerException(); } orderBy_ = value; onChanged(); return this; } /** * * * <pre> * Optional. Specify the ordering of results as described in [Sorting * Order](https://cloud.google.com/apis/design/design_patterns#sorting_order). * If not specified, the results will be sorted in the default order. * </pre> * * <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearOrderBy() { orderBy_ = getDefaultInstance().getOrderBy(); onChanged(); return this; } /** * * * <pre> * Optional. Specify the ordering of results as described in [Sorting * Order](https://cloud.google.com/apis/design/design_patterns#sorting_order). * If not specified, the results will be sorted in the default order. * </pre> * * <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for orderBy to set. * @return This builder for chaining. */ public Builder setOrderByBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); orderBy_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.metastore.v1beta.ListMetadataImportsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.metastore.v1beta.ListMetadataImportsRequest) private static final com.google.cloud.metastore.v1beta.ListMetadataImportsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.metastore.v1beta.ListMetadataImportsRequest(); } public static com.google.cloud.metastore.v1beta.ListMetadataImportsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListMetadataImportsRequest> PARSER = new com.google.protobuf.AbstractParser<ListMetadataImportsRequest>() { @java.lang.Override public ListMetadataImportsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ListMetadataImportsRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<ListMetadataImportsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListMetadataImportsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.metastore.v1beta.ListMetadataImportsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.vector.accessor.writer; import java.util.ArrayList; import java.util.List; import org.apache.drill.exec.record.metadata.ColumnMetadata; import org.apache.drill.exec.vector.accessor.ColumnReader; import org.apache.drill.exec.vector.accessor.ColumnWriterIndex; import org.apache.drill.exec.vector.accessor.writer.AbstractArrayWriter.ArrayObjectWriter; import org.apache.drill.exec.vector.accessor.writer.dummy.DummyArrayWriter; import org.apache.drill.exec.vector.complex.AbstractMapVector; import org.apache.drill.exec.vector.complex.MapVector; import org.apache.drill.exec.vector.complex.RepeatedMapVector; /** * Writer for a Drill Map type. Maps are actually tuples, just like rows. */ public abstract class MapWriter extends AbstractTupleWriter { /** * Wrap the outer index to avoid incrementing the array index * on the call to <tt>nextElement().</tt> For maps, the increment * is done at the map level, not the column level. */ private static class MemberWriterIndex implements ColumnWriterIndex { private final ColumnWriterIndex baseIndex; private MemberWriterIndex(ColumnWriterIndex baseIndex) { this.baseIndex = baseIndex; } @Override public int rowStartIndex() { return baseIndex.rowStartIndex(); } @Override public int vectorIndex() { return baseIndex.vectorIndex(); } @Override public void nextElement() { } @Override public void prevElement() { } @Override public void rollover() { } @Override public ColumnWriterIndex outerIndex() { return baseIndex.outerIndex(); } @Override public String toString() { return new StringBuilder() .append("[") .append(getClass().getSimpleName()) .append(" baseIndex = ") .append(baseIndex.toString()) .append("]") .toString(); } } /** * Writer for a single (non-array) map. Clients don't really "write" maps; * rather, this writer is a holder for the columns within the map, and those * columns are what is written. */ protected static class SingleMapWriter extends MapWriter { private final MapVector mapVector; protected SingleMapWriter(ColumnMetadata schema, MapVector vector, List<AbstractObjectWriter> writers) { super(schema, writers); mapVector = vector; } @Override public void endWrite() { super.endWrite(); // A non repeated map has a field that holds the value count. // Update it. (A repeated map uses the offset vector's value count.) // Special form of set value count: used only for // this class to avoid setting the value count of children. // Setting these counts was already done. Doing it again // will corrupt nullable vectors because the writers don't // set the "lastSet" field of nullable vector accessors, // and the initial value of -1 will cause all values to // be overwritten. mapVector.setMapValueCount(vectorIndex.vectorIndex()); } @Override public void preRollover() { super.preRollover(); mapVector.setMapValueCount(vectorIndex.rowStartIndex()); } @Override public boolean isProjected() { return true; } } /** * Writer for a an array of maps. A single array index coordinates writes * to the constituent member vectors so that, say, the values for (row 10, * element 5) all occur to the same position in the columns within the map. * Since the map is an array, it has an associated offset vector, which the * parent array writer is responsible for maintaining. */ protected static class ArrayMapWriter extends MapWriter { protected ArrayMapWriter(ColumnMetadata schema, List<AbstractObjectWriter> writers) { super(schema, writers); } @Override public void bindIndex(ColumnWriterIndex index) { // This is a repeated map, so the provided index is an array element // index. Convert this to an index that will not increment the element // index on each write so that a map with three members, say, won't // increment the index for each member. Rather, the index must be // incremented at the array level. bindIndex(index, new MemberWriterIndex(index)); } @Override public boolean isProjected() { return true; } } protected static class DummyMapWriter extends MapWriter { protected DummyMapWriter(ColumnMetadata schema, List<AbstractObjectWriter> writers) { super(schema, writers); } @Override public boolean isProjected() { return false; } @Override public void copy(ColumnReader from) { } } protected static class DummyArrayMapWriter extends MapWriter { protected DummyArrayMapWriter(ColumnMetadata schema, List<AbstractObjectWriter> writers) { super(schema, writers); } @Override public boolean isProjected() { return false; } @Override public void copy(ColumnReader from) { } } protected final ColumnMetadata mapColumnSchema; protected MapWriter(ColumnMetadata schema, List<AbstractObjectWriter> writers) { super(schema.mapSchema(), writers); mapColumnSchema = schema; } public static TupleObjectWriter buildMap(ColumnMetadata schema, MapVector vector, List<AbstractObjectWriter> writers) { MapWriter mapWriter; if (vector != null) { // Vector is not required for a map writer; the map's columns // are written, but not the (non-array) map. mapWriter = new SingleMapWriter(schema, vector, writers); } else { mapWriter = new DummyMapWriter(schema, writers); } return new TupleObjectWriter(mapWriter); } public static ArrayObjectWriter buildMapArray(ColumnMetadata schema, RepeatedMapVector mapVector, List<AbstractObjectWriter> writers) { MapWriter mapWriter; if (mapVector != null) { mapWriter = new ArrayMapWriter(schema, writers); } else { mapWriter = new DummyArrayMapWriter(schema, writers); } TupleObjectWriter mapArray = new TupleObjectWriter(mapWriter); AbstractArrayWriter arrayWriter; if (mapVector != null) { arrayWriter = new ObjectArrayWriter(schema, mapVector.getOffsetVector(), mapArray); } else { arrayWriter = new DummyArrayWriter(schema, mapArray); } return new ArrayObjectWriter(arrayWriter); } public static AbstractObjectWriter buildMapWriter(ColumnMetadata schema, AbstractMapVector vector, List<AbstractObjectWriter> writers) { if (schema.isArray()) { return MapWriter.buildMapArray(schema, (RepeatedMapVector) vector, writers); } else { return MapWriter.buildMap(schema, (MapVector) vector, writers); } } public static AbstractObjectWriter buildMapWriter(ColumnMetadata schema, AbstractMapVector vector) { assert schema.mapSchema().size() == 0; return buildMapWriter(schema, vector, new ArrayList<AbstractObjectWriter>()); } @Override public ColumnMetadata schema() { return mapColumnSchema; } }
/* * Copyright (c) 2014 Intellibins authors * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of The Intern nor the names of its contributors may * be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE LISTED COPYRIGHT HOLDERS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.intellibins.glassware; import com.google.android.glass.media.Sounds; import com.google.android.glass.view.WindowUtils; import com.google.zxing.BarcodeFormat; import com.google.zxing.DecodeHintType; import com.google.zxing.Result; import com.google.zxing.ResultMetadataType; import com.google.zxing.ResultPoint; import com.google.zxing.client.android.camera.CameraManager; import com.github.barcodeeye.image.ImageManager; import com.github.barcodeeye.migrated.AmbientLightManager; import com.github.barcodeeye.migrated.BeepManager; import com.github.barcodeeye.migrated.FinishListener; import com.github.barcodeeye.migrated.InactivityTimer; import com.github.barcodeeye.scan.CaptureActivityHandler; import com.github.barcodeeye.scan.ui.ViewfinderView; import android.app.AlertDialog; import android.content.Context; import android.content.Intent; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Paint; import android.media.AudioManager; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.preference.PreferenceManager; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.view.SurfaceHolder; import android.view.SurfaceView; import android.view.Window; import java.io.IOException; import java.util.Collection; import java.util.EnumSet; import java.util.Map; /** * This activity opens the camera and does the actual scanning on a background * thread. It draws a viewfinder to help the user place the barcode correctly, * shows feedback as the image processing is happening, * and then overlays the results when a scan is successful. * * @author dswitkin@google.com (Daniel Switkin) * @author Sean Owen */ public final class CaptureActivity extends BaseGlassActivity implements SurfaceHolder.Callback { public static final String ITEM_TYPE = "ITEM_TYPE"; public static final String ITEM_PAPER = "ITEM_PAPER"; public static final String ITEM_METAL_GLASS_PLASTIC = "ITEM_METAL_GLASS_PLASTIC"; public static final String ITEM_SPECIAL_WASTE = "ITEM_SPECIAL_WASTE"; private static final String IMAGE_PREFIX = "BarcodeEye_"; private static final String TAG = CaptureActivity.class.getSimpleName(); private static final Collection<ResultMetadataType> DISPLAYABLE_METADATA_TYPES = EnumSet .of(ResultMetadataType.ISSUE_NUMBER, ResultMetadataType.SUGGESTED_PRICE, ResultMetadataType.ERROR_CORRECTION_LEVEL, ResultMetadataType.POSSIBLE_COUNTRY); private CameraManager mCameraManager; private CaptureActivityHandler mHandler; private Result mSavedResultToShow; private ViewfinderView mViewfinderView; private boolean mHasSurface; private Map<DecodeHintType, ?> mDecodeHints; private InactivityTimer mInactivityTimer; private BeepManager mBeepManager; private AmbientLightManager mAmbientLightManager; private ImageManager mImageManager; public static Intent newIntent(Context context) { Intent intent = new Intent(context, CaptureActivity.class); return intent; } /** * Superimpose a line for 1D or dots for 2D to highlight the key features of * the barcode. * * @param barcode A bitmap of the captured image. * @param scaleFactor amount by which thumbnail was scaled * @param rawResult The decoded results which contains the points to draw. */ private static void drawResultPoints(Bitmap barcode, float scaleFactor, Result rawResult, int color) { ResultPoint[] points = rawResult.getResultPoints(); if (points != null && points.length > 0) { Canvas canvas = new Canvas(barcode); Paint paint = new Paint(); paint.setColor(color); if (points.length == 2) { paint.setStrokeWidth(4.0f); drawLine(canvas, paint, points[0], points[1], scaleFactor); } else if (points.length == 4 && (rawResult.getBarcodeFormat() == BarcodeFormat.UPC_A || rawResult .getBarcodeFormat() == BarcodeFormat.EAN_13)) { // Hacky special case -- draw two lines, for the barcode and metadata drawLine(canvas, paint, points[0], points[1], scaleFactor); drawLine(canvas, paint, points[2], points[3], scaleFactor); } else { paint.setStrokeWidth(10.0f); for (ResultPoint point : points) { if (point != null) { canvas.drawPoint(scaleFactor * point.getX(), scaleFactor * point.getY(), paint); } } } } } private static void drawLine(Canvas canvas, Paint paint, ResultPoint a, ResultPoint b, float scaleFactor) { if (a != null && b != null) { canvas.drawLine(scaleFactor * a.getX(), scaleFactor * a.getY(), scaleFactor * b.getX(), scaleFactor * b.getY(), paint); } } public ViewfinderView getViewfinderView() { return mViewfinderView; } public Handler getHandler() { return mHandler; } public CameraManager getCameraManager() { return mCameraManager; } @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); getWindow().requestFeature(WindowUtils.FEATURE_VOICE_COMMANDS); setContentView(R.layout.activity_capture); mImageManager = new ImageManager(this); mHasSurface = false; mInactivityTimer = new InactivityTimer(this); mBeepManager = new BeepManager(this); mAmbientLightManager = new AmbientLightManager(this); mViewfinderView = (ViewfinderView) findViewById(R.id.viewfinder_view); PreferenceManager.setDefaultValues(this, R.xml.preferences, false); } @Override protected void onResume() { super.onResume(); // CameraManager must be initialized here, not in onCreate(). This is necessary because we don't // want to open the camera driver and measure the screen size if we're going to show the help on // first launch. That led to bugs where the scanning rectangle was the wrong size and partially // off screen. mCameraManager = new CameraManager(getApplication()); mViewfinderView.setCameraManager(mCameraManager); mHandler = null; SurfaceView surfaceView = (SurfaceView) findViewById(R.id.preview_view); SurfaceHolder surfaceHolder = surfaceView.getHolder(); if (mHasSurface) { // The activity was paused but not stopped, so the surface still exists. Therefore // surfaceCreated() won't be called, so init the camera here. initCamera(surfaceHolder); } else { // Install the callback and wait for surfaceCreated() to init the camera. surfaceHolder.addCallback(this); } mBeepManager.updatePrefs(); mAmbientLightManager.start(mCameraManager); mInactivityTimer.onResume(); } @Override protected void onPause() { if (mHandler != null) { mHandler.quitSynchronously(); mHandler = null; } mInactivityTimer.onPause(); mAmbientLightManager.stop(); mCameraManager.closeDriver(); if (!mHasSurface) { SurfaceView surfaceView = (SurfaceView) findViewById(R.id.preview_view); SurfaceHolder surfaceHolder = surfaceView.getHolder(); surfaceHolder.removeCallback(this); } super.onPause(); } @Override protected boolean onTap() { AudioManager audio = (AudioManager) getSystemService(Context.AUDIO_SERVICE); audio.playSoundEffect(Sounds.TAP); openOptionsMenu(); return super.onTap(); } @Override protected void onDestroy() { mInactivityTimer.shutdown(); super.onDestroy(); } private void decodeOrStoreSavedBitmap(Bitmap bitmap, Result result) { // Bitmap isn't used yet -- will be used soon if (mHandler == null) { mSavedResultToShow = result; } else { if (result != null) { mSavedResultToShow = result; } if (mSavedResultToShow != null) { Message message = Message.obtain(mHandler, R.id.decode_succeeded, mSavedResultToShow); mHandler.sendMessage(message); } mSavedResultToShow = null; } } @Override public void surfaceCreated(SurfaceHolder holder) { if (holder == null) { Log.e(TAG, "*** WARNING *** surfaceCreated() gave us a null surface!"); } if (!mHasSurface) { mHasSurface = true; initCamera(holder); } } @Override public void surfaceDestroyed(SurfaceHolder holder) { mHasSurface = false; } @Override public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { } /** * A valid barcode has been found, so give an indication of success and show * the results. * * @param rawResult The contents of the barcode. * @param scaleFactor amount by which thumbnail was scaled * @param barcode A greyscale bitmap of the camera data which was decoded. */ public void handleDecode(Result rawResult, Bitmap barcode, float scaleFactor) { mInactivityTimer.onActivity(); boolean fromLiveScan = barcode != null; if (fromLiveScan) { mBeepManager.playBeepSoundAndVibrate(); drawResultPoints(barcode, scaleFactor, rawResult, getResources() .getColor(R.color.result_points)); } handleDecodeInternally(rawResult, barcode); } // TODO UI // Put up our own UI for how to handle the decoded contents. private void handleDecodeInternally(Result rawResult, Bitmap barcode) { String text = rawResult.getText().toLowerCase(); Log.v(TAG, "handleDecodeInternally text " + text); if(ITEM_METAL_GLASS_PLASTIC.toLowerCase().contains(text)) { startResultActivity(ITEM_METAL_GLASS_PLASTIC); } else if(ITEM_PAPER.toLowerCase().contains(text)) { startResultActivity(ITEM_PAPER); } else if(ITEM_SPECIAL_WASTE.toLowerCase().contains(text)) { startResultActivity(ITEM_SPECIAL_WASTE); } else { startResultActivity(text); } } private void initCamera(SurfaceHolder surfaceHolder) { if (surfaceHolder == null) { throw new IllegalStateException("No SurfaceHolder provided"); } if (mCameraManager.isOpen()) { Log.w(TAG, "initCamera() while already open -- late SurfaceView callback?"); return; } try { mCameraManager.openDriver(surfaceHolder); // Creating the handler starts the preview, which can also throw a RuntimeException. if (mHandler == null) { mHandler = new CaptureActivityHandler(this, null, mDecodeHints, null, mCameraManager); } decodeOrStoreSavedBitmap(null, null); } catch (IOException e) { Log.w(TAG, e); displayFrameworkBugMessageAndExit(); } catch (InterruptedException e) { e.printStackTrace(); } } /** * FIXME: This should be a glass compatible view (Card) */ private void displayFrameworkBugMessageAndExit() { AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle(getString(R.string.app_name)); builder.setMessage(getString(R.string.msg_camera_framework_bug)); builder.setPositiveButton(R.string.button_ok, new FinishListener(this)); builder.setOnCancelListener(new FinishListener(this)); builder.show(); } public void restartPreviewAfterDelay(long delayMS) { if (mHandler != null) { mHandler.sendEmptyMessageDelayed(R.id.restart_preview, delayMS); } } public void drawViewfinder() { mViewfinderView.drawViewfinder(); } @Override public boolean onCreatePanelMenu(int featureId, Menu menu) { if (featureId == WindowUtils.FEATURE_VOICE_COMMANDS || featureId == Window.FEATURE_OPTIONS_PANEL) { getMenuInflater().inflate(R.menu.menu_main, menu); return true; } return super.onCreatePanelMenu(featureId, menu); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.menu_main, menu); return true; } @Override public boolean onMenuItemSelected(int featureId, MenuItem item) { if (featureId == WindowUtils.FEATURE_VOICE_COMMANDS || featureId == Window.FEATURE_OPTIONS_PANEL) { switch (item.getItemId()) { case R.id.plastic_menu_item: startResultActivity(ITEM_METAL_GLASS_PLASTIC); break; case R.id.metal_menu_item: startResultActivity(ITEM_METAL_GLASS_PLASTIC); break; case R.id.glass_menu_item: startResultActivity(ITEM_METAL_GLASS_PLASTIC); break; case R.id.paper_menu_item: startResultActivity(ITEM_PAPER); break; case R.id.special_waste_menu_item: startResultActivity(ITEM_SPECIAL_WASTE); break; default: return true; } return true; } return super.onMenuItemSelected(featureId, item); } private void startResultActivity(final String itemType) { new Handler().post(new Runnable() { @Override public void run() { Intent intent = new Intent(CaptureActivity.this, ResultActivity.class); intent.putExtra(ITEM_TYPE, itemType); startActivity(intent); finish(); } }); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.lucene.util.mutable; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.BytesRef; /** * Simple test of the basic contract of the various {@link MutableValue} implementaitons. */ public class TestMutableValues extends LuceneTestCase { public void testStr() { MutableValueStr xxx = new MutableValueStr(); assert xxx.value.equals(new BytesRef()) : "defaults have changed, test utility may not longer be as high"; assert xxx.exists : "defaults have changed, test utility may not longer be as high"; assertSanity(xxx); MutableValueStr yyy = new MutableValueStr(); assertSanity(yyy); assertEquality(xxx, yyy); xxx.exists = false; assertSanity(xxx); assertInEquality(xxx,yyy); yyy.exists = false; assertEquality(xxx, yyy); xxx.value.length = 0; xxx.value.copyChars("zzz"); xxx.exists = true; assertSanity(xxx); assertInEquality(xxx,yyy); yyy.value.length = 0; yyy.value.copyChars("aaa"); yyy.exists = true; assertSanity(yyy); assertInEquality(xxx,yyy); assertTrue(0 < xxx.compareTo(yyy)); assertTrue(yyy.compareTo(xxx) < 0); xxx.copy(yyy); assertSanity(xxx); assertEquality(xxx, yyy); // special BytesRef considerations... xxx.exists = false; xxx.value.length = 0; // but leave bytes alone assertInEquality(xxx,yyy); yyy.exists = false; yyy.value.length = 0; // but leave bytes alone assertEquality(xxx, yyy); } public void testDouble() { MutableValueDouble xxx = new MutableValueDouble(); assert xxx.value == 0.0D : "defaults have changed, test utility may not longer be as high"; assert xxx.exists : "defaults have changed, test utility may not longer be as high"; assertSanity(xxx); MutableValueDouble yyy = new MutableValueDouble(); assertSanity(yyy); assertEquality(xxx, yyy); xxx.exists = false; assertSanity(xxx); assertInEquality(xxx,yyy); yyy.exists = false; assertEquality(xxx, yyy); xxx.value = 42.0D; xxx.exists = true; assertSanity(xxx); assertInEquality(xxx,yyy); yyy.value = -99.0D; yyy.exists = true; assertSanity(yyy); assertInEquality(xxx,yyy); assertTrue(0 < xxx.compareTo(yyy)); assertTrue(yyy.compareTo(xxx) < 0); xxx.copy(yyy); assertSanity(xxx); assertEquality(xxx, yyy); } public void testInt() { MutableValueInt xxx = new MutableValueInt(); assert xxx.value == 0 : "defaults have changed, test utility may not longer be as high"; assert xxx.exists : "defaults have changed, test utility may not longer be as high"; assertSanity(xxx); MutableValueInt yyy = new MutableValueInt(); assertSanity(yyy); assertEquality(xxx, yyy); xxx.exists = false; assertSanity(xxx); assertInEquality(xxx,yyy); yyy.exists = false; assertEquality(xxx, yyy); xxx.value = 42; xxx.exists = true; assertSanity(xxx); assertInEquality(xxx,yyy); yyy.value = -99; yyy.exists = true; assertSanity(yyy); assertInEquality(xxx,yyy); assertTrue(0 < xxx.compareTo(yyy)); assertTrue(yyy.compareTo(xxx) < 0); xxx.copy(yyy); assertSanity(xxx); assertEquality(xxx, yyy); } public void testFloat() { MutableValueFloat xxx = new MutableValueFloat(); assert xxx.value == 0.0F : "defaults have changed, test utility may not longer be as high"; assert xxx.exists : "defaults have changed, test utility may not longer be as high"; assertSanity(xxx); MutableValueFloat yyy = new MutableValueFloat(); assertSanity(yyy); assertEquality(xxx, yyy); xxx.exists = false; assertSanity(xxx); assertInEquality(xxx,yyy); yyy.exists = false; assertEquality(xxx, yyy); xxx.value = 42.0F; xxx.exists = true; assertSanity(xxx); assertInEquality(xxx,yyy); yyy.value = -99.0F; yyy.exists = true; assertSanity(yyy); assertInEquality(xxx,yyy); assertTrue(0 < xxx.compareTo(yyy)); assertTrue(yyy.compareTo(xxx) < 0); xxx.copy(yyy); assertSanity(xxx); assertEquality(xxx, yyy); } public void testLong() { MutableValueLong xxx = new MutableValueLong(); assert xxx.value == 0L : "defaults have changed, test utility may not longer be as high"; assert xxx.exists : "defaults have changed, test utility may not longer be as high"; assertSanity(xxx); MutableValueLong yyy = new MutableValueLong(); assertSanity(yyy); assertEquality(xxx, yyy); xxx.exists = false; assertSanity(xxx); assertInEquality(xxx,yyy); yyy.exists = false; assertEquality(xxx, yyy); xxx.value = 42L; xxx.exists = true; assertSanity(xxx); assertInEquality(xxx,yyy); yyy.value = -99L; yyy.exists = true; assertSanity(yyy); assertInEquality(xxx,yyy); assertTrue(0 < xxx.compareTo(yyy)); assertTrue(yyy.compareTo(xxx) < 0); xxx.copy(yyy); assertSanity(xxx); assertEquality(xxx, yyy); } public void testBool() { MutableValueBool xxx = new MutableValueBool(); assert xxx.value == false : "defaults have changed, test utility may not longer be as high"; assert xxx.exists : "defaults have changed, test utility may not longer be as high"; assertSanity(xxx); MutableValueBool yyy = new MutableValueBool(); assertSanity(yyy); assertEquality(xxx, yyy); xxx.exists = false; assertSanity(xxx); assertInEquality(xxx,yyy); yyy.exists = false; assertEquality(xxx, yyy); xxx.value = true; xxx.exists = true; assertSanity(xxx); assertInEquality(xxx,yyy); yyy.value = false; yyy.exists = true; assertSanity(yyy); assertInEquality(xxx,yyy); assertTrue(0 < xxx.compareTo(yyy)); assertTrue(yyy.compareTo(xxx) < 0); xxx.copy(yyy); assertSanity(xxx); assertEquality(xxx, yyy); } private void assertSanity(MutableValue x) { assertEquality(x, x); MutableValue y = x.duplicate(); assertEquality(x, y); } private void assertEquality(MutableValue x, MutableValue y) { assertEquals(x.hashCode(), y.hashCode()); assertEquals(x, y); assertEquals(y, x); assertTrue(x.equalsSameType(y)); assertTrue(y.equalsSameType(x)); assertEquals(0, x.compareTo(y)); assertEquals(0, y.compareTo(x)); assertEquals(0, x.compareSameType(y)); assertEquals(0, y.compareSameType(x)); } private void assertInEquality(MutableValue x, MutableValue y) { assertFalse(x.equals(y)); assertFalse(y.equals(x)); assertFalse(x.equalsSameType(y)); assertFalse(y.equalsSameType(x)); assertFalse(0 == x.compareTo(y)); assertFalse(0 == y.compareTo(x)); } }