src_fm_fc_ms_ff
stringlengths 43
86.8k
| target
stringlengths 20
276k
|
|---|---|
HealthStatsMetrics extends SystemMetrics<HealthStatsMetrics> { @VisibleForTesting static <K, V> ArrayMap<K, V> opArrayMaps(int op, ArrayMap<K, V> a, @Nullable ArrayMap<K, V> b) { int aSize = a.size(); ArrayMap<K, V> output = new ArrayMap<>(); for (int i = 0; i < aSize; i++) { K key = a.keyAt(i); V bValue = b == null ? null : b.get(key); output.put(key, bValue == null ? a.valueAt(i) : (V) opValues(op, a.valueAt(i), bValue)); } if (op == OP_SUM) { int bSize = b == null ? 0 : b.size(); for (int i = 0; i < bSize; i++) { K key = b.keyAt(i); if (a.get(key) == null) { output.put(key, b.valueAt(i)); } } } return output; } HealthStatsMetrics(); HealthStatsMetrics(HealthStats healthStats); HealthStatsMetrics(HealthStatsMetrics metrics); @Override HealthStatsMetrics sum(
@Nullable HealthStatsMetrics b, @Nullable HealthStatsMetrics output); @Override HealthStatsMetrics diff(
@Nullable HealthStatsMetrics b, @Nullable HealthStatsMetrics output); @Override HealthStatsMetrics set(HealthStatsMetrics b); HealthStatsMetrics set(HealthStats healthStats); @Override String toString(); static String getKeyName(int key); JSONObject toJSONObject(); @Override boolean equals(Object o); @Override int hashCode(); public String dataType; final SparseArray<Long> measurement; final SparseArray<TimerMetrics> timer; final SparseArray<ArrayMap<String, Long>> measurements; final SparseArray<ArrayMap<String, TimerMetrics>> timers; final SparseArray<ArrayMap<String, HealthStatsMetrics>> stats; }
|
@Test public void testSumArrayMaps() { ArrayMap<String, Long> a = new ArrayMap<>(); a.put("a", 1L); a.put("c", 2L); ArrayMap<String, Long> b = new ArrayMap<>(); b.put("b", 1L); b.put("c", 3L); ArrayMap<String, Long> sum = HealthStatsMetrics.opArrayMaps(OP_SUM, a, b); assertThat(sum.get("a")).isEqualTo(1); assertThat(sum.get("b")).isEqualTo(1); assertThat(sum.get("c")).isEqualTo(5); assertThat(sum.size()).isEqualTo(3); }
@Test public void testDiffArrayMaps() { ArrayMap<String, Long> a = new ArrayMap<>(); a.put("a", 1L); a.put("c", 2L); ArrayMap<String, Long> b = new ArrayMap<>(); b.put("b", 1L); b.put("c", 3L); ArrayMap<String, Long> sum = HealthStatsMetrics.opArrayMaps(OP_DIFF, a, b); assertThat(sum.get("a")).isEqualTo(1); assertThat(sum.get("c")).isEqualTo(-1); assertThat(sum.size()).isEqualTo(2); }
|
HealthStatsMetrics extends SystemMetrics<HealthStatsMetrics> { @VisibleForTesting static <K> SparseArray<K> op(int op, SparseArray<K> a, SparseArray<K> b, SparseArray<K> output) { output.clear(); for (int i = 0; i < a.size(); i++) { int aKey = a.keyAt(i); output.put(aKey, (K) opValues(op, a.valueAt(i), b.get(aKey))); } if (op == OP_SUM) { for (int i = 0; i < b.size(); i++) { int bKey = b.keyAt(i); if (a.get(bKey) == null) { output.put(bKey, b.valueAt(i)); } } } return output; } HealthStatsMetrics(); HealthStatsMetrics(HealthStats healthStats); HealthStatsMetrics(HealthStatsMetrics metrics); @Override HealthStatsMetrics sum(
@Nullable HealthStatsMetrics b, @Nullable HealthStatsMetrics output); @Override HealthStatsMetrics diff(
@Nullable HealthStatsMetrics b, @Nullable HealthStatsMetrics output); @Override HealthStatsMetrics set(HealthStatsMetrics b); HealthStatsMetrics set(HealthStats healthStats); @Override String toString(); static String getKeyName(int key); JSONObject toJSONObject(); @Override boolean equals(Object o); @Override int hashCode(); public String dataType; final SparseArray<Long> measurement; final SparseArray<TimerMetrics> timer; final SparseArray<ArrayMap<String, Long>> measurements; final SparseArray<ArrayMap<String, TimerMetrics>> timers; final SparseArray<ArrayMap<String, HealthStatsMetrics>> stats; }
|
@Test public void testSumSparseArrays() { SparseArray<Long> a = new SparseArray<>(); a.put(10, 10L); a.put(30, 30L); SparseArray<Long> b = new SparseArray<>(); b.put(10, 10L); b.put(20, 20L); SparseArray<Long> sum = new SparseArray<>(); HealthStatsMetrics.op(OP_SUM, a, b, sum); assertThat(sum.get(10)).isEqualTo(20); assertThat(sum.get(20)).isEqualTo(20); assertThat(sum.get(30)).isEqualTo(30); assertThat(sum.size()).isEqualTo(3); }
@Test public void testDiffSparseArrays() { SparseArray<Long> a = new SparseArray<>(); a.put(10, 10L); a.put(30, 30L); SparseArray<Long> b = new SparseArray<>(); b.put(10, 10L); b.put(20, 20L); SparseArray<Long> sum = new SparseArray<>(); HealthStatsMetrics.op(OP_DIFF, a, b, sum); assertThat(sum.get(10)).isEqualTo(0); assertThat(sum.get(30)).isEqualTo(30); assertThat(sum.size()).isEqualTo(2); }
|
HealthStatsMetrics extends SystemMetrics<HealthStatsMetrics> { @Override public HealthStatsMetrics sum( @Nullable HealthStatsMetrics b, @Nullable HealthStatsMetrics output) { if (output == null) { output = new HealthStatsMetrics(); } output.dataType = dataType; if (b == null) { output.set(this); } else if (!strEquals(b.dataType, dataType)) { throw new IllegalArgumentException( "Attempting to add different types of HealthStatMetrics: " + dataType + " and " + b.dataType); } else { op(OP_SUM, measurement, b.measurement, output.measurement); op(OP_SUM, measurements, b.measurements, output.measurements); op(OP_SUM, timer, b.timer, output.timer); op(OP_SUM, timers, b.timers, output.timers); op(OP_SUM, stats, b.stats, output.stats); } return output; } HealthStatsMetrics(); HealthStatsMetrics(HealthStats healthStats); HealthStatsMetrics(HealthStatsMetrics metrics); @Override HealthStatsMetrics sum(
@Nullable HealthStatsMetrics b, @Nullable HealthStatsMetrics output); @Override HealthStatsMetrics diff(
@Nullable HealthStatsMetrics b, @Nullable HealthStatsMetrics output); @Override HealthStatsMetrics set(HealthStatsMetrics b); HealthStatsMetrics set(HealthStats healthStats); @Override String toString(); static String getKeyName(int key); JSONObject toJSONObject(); @Override boolean equals(Object o); @Override int hashCode(); public String dataType; final SparseArray<Long> measurement; final SparseArray<TimerMetrics> timer; final SparseArray<ArrayMap<String, Long>> measurements; final SparseArray<ArrayMap<String, TimerMetrics>> timers; final SparseArray<ArrayMap<String, HealthStatsMetrics>> stats; }
|
@Test public void testSum() { HealthStatsMetrics a = createTestMetrics(); HealthStatsMetrics b = createTestMetrics(); HealthStatsMetrics sum = a.sum(b, null); HealthStatsMetrics expectedSum = new HealthStatsMetrics(); expectedSum.dataType = TEST_DATATYPE; expectedSum.measurement.put(123, 2000L); expectedSum.measurements.put(234, new ArrayMap<String, Long>()); expectedSum.measurements.get(234).put("measurements", 4000L); expectedSum.timer.put(345, new HealthStatsMetrics.TimerMetrics(10, 4000)); ArrayMap<String, HealthStatsMetrics.TimerMetrics> timersValues = new ArrayMap<>(); timersValues.put("timers", new HealthStatsMetrics.TimerMetrics(12, 6000)); expectedSum.timers.put(456, timersValues); ArrayMap<String, HealthStatsMetrics> value = new ArrayMap<>(); value.put("stats", new HealthStatsMetrics(expectedSum)); expectedSum.stats.put(1234, value); assertThat(sum).isEqualTo(expectedSum); }
|
HealthStatsMetrics extends SystemMetrics<HealthStatsMetrics> { @Override public HealthStatsMetrics diff( @Nullable HealthStatsMetrics b, @Nullable HealthStatsMetrics output) { if (output == null) { output = new HealthStatsMetrics(); } output.dataType = dataType; if (b == null || compareSnapshotAge(this, b) < 0 ) { output.set(this); } else if (!strEquals(b.dataType, dataType)) { throw new IllegalArgumentException( "Attempting to subtract different types of HealthStatMetrics: " + dataType + " and " + b.dataType); } else { op(OP_DIFF, measurement, b.measurement, output.measurement); op(OP_DIFF, measurements, b.measurements, output.measurements); op(OP_DIFF, timer, b.timer, output.timer); op(OP_DIFF, timers, b.timers, output.timers); op(OP_DIFF, stats, b.stats, output.stats); } return output; } HealthStatsMetrics(); HealthStatsMetrics(HealthStats healthStats); HealthStatsMetrics(HealthStatsMetrics metrics); @Override HealthStatsMetrics sum(
@Nullable HealthStatsMetrics b, @Nullable HealthStatsMetrics output); @Override HealthStatsMetrics diff(
@Nullable HealthStatsMetrics b, @Nullable HealthStatsMetrics output); @Override HealthStatsMetrics set(HealthStatsMetrics b); HealthStatsMetrics set(HealthStats healthStats); @Override String toString(); static String getKeyName(int key); JSONObject toJSONObject(); @Override boolean equals(Object o); @Override int hashCode(); public String dataType; final SparseArray<Long> measurement; final SparseArray<TimerMetrics> timer; final SparseArray<ArrayMap<String, Long>> measurements; final SparseArray<ArrayMap<String, TimerMetrics>> timers; final SparseArray<ArrayMap<String, HealthStatsMetrics>> stats; }
|
@Test public void testDiff() { HealthStatsMetrics a = createTestMetrics(); HealthStatsMetrics b = createTestMetrics(); HealthStatsMetrics diff = a.diff(b, null); HealthStatsMetrics expectedDiff = new HealthStatsMetrics(); expectedDiff.dataType = TEST_DATATYPE; expectedDiff.measurement.put(123, 0L); expectedDiff.measurements.put(234, new ArrayMap<String, Long>()); expectedDiff.measurements.get(234).put("measurements", 0L); expectedDiff.timer.put(345, new HealthStatsMetrics.TimerMetrics(0, 0)); ArrayMap<String, HealthStatsMetrics.TimerMetrics> timersValues = new ArrayMap<>(); timersValues.put("timers", new HealthStatsMetrics.TimerMetrics(0, 0)); expectedDiff.timers.put(456, timersValues); ArrayMap<String, HealthStatsMetrics> value = new ArrayMap<>(); value.put("stats", new HealthStatsMetrics(expectedDiff)); expectedDiff.stats.put(1234, value); assertThat(diff).isEqualTo(expectedDiff); }
@Test public void testDiffWithReset() { HealthStatsMetrics a = createTestMetrics(); a.measurement.put(UidHealthStats.MEASUREMENT_REALTIME_BATTERY_MS, 100L); HealthStatsMetrics b = createTestMetrics(); b.measurement.put(UidHealthStats.MEASUREMENT_REALTIME_BATTERY_MS, 200L); HealthStatsMetrics output = a.diff(b, null); HealthStatsMetrics expectedOutput = createTestMetrics(); expectedOutput.measurement.put(UidHealthStats.MEASUREMENT_REALTIME_BATTERY_MS, 100L); assertThat(output).isEqualTo(expectedOutput); }
|
HealthStatsMetrics extends SystemMetrics<HealthStatsMetrics> { public JSONObject toJSONObject() throws JSONException { JSONObject output = new JSONObject(); output.put("type", dataType); addMeasurement(output); addTimer(output); addMeasurements(output); addTimers(output); addStats(output); return output; } HealthStatsMetrics(); HealthStatsMetrics(HealthStats healthStats); HealthStatsMetrics(HealthStatsMetrics metrics); @Override HealthStatsMetrics sum(
@Nullable HealthStatsMetrics b, @Nullable HealthStatsMetrics output); @Override HealthStatsMetrics diff(
@Nullable HealthStatsMetrics b, @Nullable HealthStatsMetrics output); @Override HealthStatsMetrics set(HealthStatsMetrics b); HealthStatsMetrics set(HealthStats healthStats); @Override String toString(); static String getKeyName(int key); JSONObject toJSONObject(); @Override boolean equals(Object o); @Override int hashCode(); public String dataType; final SparseArray<Long> measurement; final SparseArray<TimerMetrics> timer; final SparseArray<ArrayMap<String, Long>> measurements; final SparseArray<ArrayMap<String, TimerMetrics>> timers; final SparseArray<ArrayMap<String, HealthStatsMetrics>> stats; }
|
@Test public void datatypeToJSON() throws Exception { HealthStatsMetrics metrics = new HealthStatsMetrics(); metrics.dataType = TEST_DATATYPE; JSONObject json = metrics.toJSONObject(); assertThat(json.getString("type")).isEqualTo(TEST_DATATYPE); }
@Test public void measurementToJSON() throws Exception { HealthStatsMetrics metrics = new HealthStatsMetrics(); metrics.measurement.put(234, 345L); JSONObject json = metrics.toJSONObject(); assertThat(json.getJSONObject("measurement").getLong("234")).isEqualTo(345L); }
@Test public void timerToJSON() throws Exception { HealthStatsMetrics metrics = new HealthStatsMetrics(); metrics.timer.put(123, new HealthStatsMetrics.TimerMetrics(1, 11)); JSONObject json = metrics.toJSONObject(); assertThat(json.getJSONObject("timer")).isNotNull(); assertThat(json.getJSONObject("timer").getJSONObject("123").getInt("count")).isEqualTo(1); assertThat(json.getJSONObject("timer").getJSONObject("123").getLong("time_ms")).isEqualTo(11L); }
@Test public void measurementsToJSON() throws Exception { HealthStatsMetrics metrics = new HealthStatsMetrics(); metrics.measurements.put(234, new ArrayMap<String, Long>()); metrics.measurements.get(234).put("abcd", 2000L); JSONObject json = metrics.toJSONObject(); assertThat(json.getJSONObject("measurements").getJSONObject("234").getLong("abcd")) .isEqualTo(2000L); }
@Test public void timersToJSON() throws Exception { HealthStatsMetrics metrics = new HealthStatsMetrics(); metrics.timers.put(345, new ArrayMap<String, HealthStatsMetrics.TimerMetrics>()); metrics.timers.get(345).put("val", new HealthStatsMetrics.TimerMetrics(23, 24)); JSONObject json = metrics.toJSONObject(); assertThat( json.getJSONObject("timers").getJSONObject("345").getJSONObject("val").getInt("count")) .isEqualTo(23); assertThat( json.getJSONObject("timers") .getJSONObject("345") .getJSONObject("val") .getInt("time_ms")) .isEqualTo(24); }
@Test public void jsonConversionSkipsEmptyContainers() throws Exception { HealthStatsMetrics metrics = new HealthStatsMetrics(); assertThat(metrics.toJSONObject().length()).isEqualTo(0); }
@Test public void jsonConversionSkipsZeroMeasurement() throws Exception { HealthStatsMetrics metrics = new HealthStatsMetrics(); metrics.measurement.put(123, 0L); assertThat(metrics.toJSONObject().length()).isEqualTo(0); }
@Test public void jsonConversionSkipsZeroTimer() throws Exception { HealthStatsMetrics metrics = new HealthStatsMetrics(); metrics.timer.put(123, new HealthStatsMetrics.TimerMetrics(0, 0)); assertThat(metrics.toJSONObject().length()).isEqualTo(0); }
@Test public void jsonConversionDoesNotSkipPartialZeroTimer() throws Exception { HealthStatsMetrics metrics = new HealthStatsMetrics(); metrics.timer.put(123, new HealthStatsMetrics.TimerMetrics(0, 2)); metrics.timer.put(234, new HealthStatsMetrics.TimerMetrics(2, 0)); assertThat(metrics.toJSONObject().getJSONObject("timer").length()).isEqualTo(2); }
@Test public void jsonConversionSkipsZeroMeasurements() throws Exception { HealthStatsMetrics metrics = new HealthStatsMetrics(); metrics.measurements.put(234, new ArrayMap<String, Long>()); metrics.measurements.get(234).put("abcd", 0L); assertThat(metrics.toJSONObject().length()).isEqualTo(0); }
@Test public void jsonConversionSkipsZeroTimers() throws Exception { HealthStatsMetrics metrics = new HealthStatsMetrics(); metrics.timers.put(345, new ArrayMap<String, HealthStatsMetrics.TimerMetrics>()); metrics.timers.get(345).put("val", new HealthStatsMetrics.TimerMetrics(0, 0)); assertThat(metrics.toJSONObject().length()).isEqualTo(0); }
@Test public void jsonConversionDoesNotSkipPartialZeroTimers() throws Exception { HealthStatsMetrics metrics = new HealthStatsMetrics(); metrics.timers.put(345, new ArrayMap<String, HealthStatsMetrics.TimerMetrics>()); metrics.timers.get(345).put("val", new HealthStatsMetrics.TimerMetrics(0, 10)); metrics.timers.get(345).put("val2", new HealthStatsMetrics.TimerMetrics(20, 0)); assertThat(metrics.toJSONObject().getJSONObject("timers").getJSONObject("345").length()) .isEqualTo(2); }
@Test public void jsonConversionSkipsEmptyHealthStats() throws Exception { HealthStatsMetrics metrics = new HealthStatsMetrics(); metrics.stats.put(123, new ArrayMap<String, HealthStatsMetrics>()); HealthStatsMetrics inner = new HealthStatsMetrics(); metrics.stats.get(123).put("abc", inner); assertThat(metrics.toJSONObject().length()).isEqualTo(0); }
|
SensorMetrics extends SystemMetrics<SensorMetrics> { @Override public SensorMetrics set(SensorMetrics b) { total.set(b.total); if (isAttributionEnabled && b.isAttributionEnabled) { sensorConsumption.clear(); for (int i = 0, l = b.sensorConsumption.size(); i < l; i++) { sensorConsumption.put(b.sensorConsumption.keyAt(i), b.sensorConsumption.valueAt(i)); } } return this; } SensorMetrics(); SensorMetrics(boolean isAttributionEnabled); @Override SensorMetrics sum(@Nullable SensorMetrics b, @Nullable SensorMetrics output); @Override SensorMetrics diff(@Nullable SensorMetrics b, @Nullable SensorMetrics output); @Override SensorMetrics set(SensorMetrics b); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); @Nullable JSONObject attributionToJSONObject(); public boolean isAttributionEnabled; final Consumption total; final SparseArray<Consumption> sensorConsumption; }
|
@Test public void testSet() { SensorMetrics metrics = new SensorMetrics(true); metrics.set(createAttributedMetrics()); assertThat(metrics).isEqualTo(createAttributedMetrics()); }
@Test public void testUnattributedSet() { SensorMetrics metrics = new SensorMetrics(); metrics.set(createAttributedMetrics()); SensorMetrics comparisonMetrics = createAttributedMetrics(); comparisonMetrics.isAttributionEnabled = false; comparisonMetrics.sensorConsumption.clear(); assertThat(metrics).isEqualTo(comparisonMetrics); }
|
TimeMetricsCollector extends SystemMetricsCollector<TimeMetrics> { @Override @ThreadSafe(enableChecks = false) public boolean getSnapshot(TimeMetrics snapshot) { checkNotNull(snapshot, "Null value passed to getSnapshot!"); snapshot.realtimeMs = SystemClock.elapsedRealtime(); snapshot.uptimeMs = SystemClock.uptimeMillis(); return true; } @Override @ThreadSafe(enableChecks = false) boolean getSnapshot(TimeMetrics snapshot); @Override TimeMetrics createMetrics(); }
|
@Test public void testTimes() { ShadowSystemClock.setUptimeMillis(1234); ShadowSystemClock.setElapsedRealtime(9876); TimeMetrics snapshot = new TimeMetrics(); TimeMetricsCollector collector = new TimeMetricsCollector(); collector.getSnapshot(snapshot); assertThat(snapshot.uptimeMs).isEqualTo(1234); assertThat(snapshot.realtimeMs).isEqualTo(9876); }
|
BluetoothMetrics extends SystemMetrics<BluetoothMetrics> { @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } BluetoothMetrics that = (BluetoothMetrics) o; if (bleScanCount != that.bleScanCount || bleScanDurationMs != that.bleScanDurationMs || bleOpportunisticScanCount != that.bleOpportunisticScanCount || bleOpportunisticScanDurationMs != that.bleOpportunisticScanDurationMs) { return false; } return true; } @Override BluetoothMetrics sum(@Nullable BluetoothMetrics b, @Nullable BluetoothMetrics output); @Override BluetoothMetrics diff(@Nullable BluetoothMetrics b, @Nullable BluetoothMetrics output); @Override BluetoothMetrics set(BluetoothMetrics b); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); public int bleScanCount; public long bleScanDurationMs; public int bleOpportunisticScanCount; public long bleOpportunisticScanDurationMs; }
|
@Test public void testEquals() { BluetoothMetrics metricsA = new BluetoothMetrics(); metricsA.bleScanDurationMs = 1000; metricsA.bleScanCount = 2; metricsA.bleOpportunisticScanDurationMs = 4000; metricsA.bleOpportunisticScanCount = 8; BluetoothMetrics metricsB = new BluetoothMetrics(); metricsB.bleScanDurationMs = 1000; metricsB.bleScanCount = 2; metricsB.bleOpportunisticScanDurationMs = 4000; metricsB.bleOpportunisticScanCount = 8; assertThat(new BluetoothMetrics()).isEqualTo(new BluetoothMetrics()); assertThat(metricsA).isEqualTo(metricsB); }
|
BluetoothMetrics extends SystemMetrics<BluetoothMetrics> { @Override public BluetoothMetrics set(BluetoothMetrics b) { bleScanCount = b.bleScanCount; bleScanDurationMs = b.bleScanDurationMs; bleOpportunisticScanCount = b.bleOpportunisticScanCount; bleOpportunisticScanDurationMs = b.bleOpportunisticScanDurationMs; return this; } @Override BluetoothMetrics sum(@Nullable BluetoothMetrics b, @Nullable BluetoothMetrics output); @Override BluetoothMetrics diff(@Nullable BluetoothMetrics b, @Nullable BluetoothMetrics output); @Override BluetoothMetrics set(BluetoothMetrics b); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); public int bleScanCount; public long bleScanDurationMs; public int bleOpportunisticScanCount; public long bleOpportunisticScanDurationMs; }
|
@Test public void testSet() { BluetoothMetrics metrics = new BluetoothMetrics(); metrics.bleScanDurationMs = 1000; metrics.bleScanCount = 10; metrics.bleOpportunisticScanDurationMs = 5000; metrics.bleOpportunisticScanCount = 3; BluetoothMetrics alternate = new BluetoothMetrics(); alternate.set(metrics); assertThat(alternate).isEqualTo(metrics); }
|
BluetoothMetrics extends SystemMetrics<BluetoothMetrics> { @Override public BluetoothMetrics diff(@Nullable BluetoothMetrics b, @Nullable BluetoothMetrics output) { if (output == null) { output = new BluetoothMetrics(); } if (b == null) { output.set(this); } else { output.bleScanCount = bleScanCount - b.bleScanCount; output.bleScanDurationMs = bleScanDurationMs - b.bleScanDurationMs; output.bleOpportunisticScanCount = bleOpportunisticScanCount - b.bleOpportunisticScanCount; output.bleOpportunisticScanDurationMs = bleOpportunisticScanDurationMs - b.bleOpportunisticScanDurationMs; } return output; } @Override BluetoothMetrics sum(@Nullable BluetoothMetrics b, @Nullable BluetoothMetrics output); @Override BluetoothMetrics diff(@Nullable BluetoothMetrics b, @Nullable BluetoothMetrics output); @Override BluetoothMetrics set(BluetoothMetrics b); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); public int bleScanCount; public long bleScanDurationMs; public int bleOpportunisticScanCount; public long bleOpportunisticScanDurationMs; }
|
@Test public void testDiff() { BluetoothMetrics metrics = new BluetoothMetrics(); metrics.bleScanDurationMs = 1000; metrics.bleScanCount = 10; metrics.bleOpportunisticScanDurationMs = 5000; metrics.bleOpportunisticScanCount = 3; BluetoothMetrics olderMetrics = new BluetoothMetrics(); olderMetrics.bleScanDurationMs = 800; olderMetrics.bleScanCount = 7; olderMetrics.bleOpportunisticScanDurationMs = 2000; olderMetrics.bleOpportunisticScanCount = 1; BluetoothMetrics deltaMetrics = new BluetoothMetrics(); deltaMetrics = metrics.diff(olderMetrics, deltaMetrics); assertThat(deltaMetrics.bleScanCount).isEqualTo(3); assertThat(deltaMetrics.bleScanDurationMs).isEqualTo(200); assertThat(deltaMetrics.bleOpportunisticScanCount).isEqualTo(2); assertThat(deltaMetrics.bleOpportunisticScanDurationMs).isEqualTo(3000); }
|
BluetoothMetrics extends SystemMetrics<BluetoothMetrics> { @Override public BluetoothMetrics sum(@Nullable BluetoothMetrics b, @Nullable BluetoothMetrics output) { if (output == null) { output = new BluetoothMetrics(); } if (b == null) { output.set(this); } else { output.bleScanCount = bleScanCount + b.bleScanCount; output.bleScanDurationMs = bleScanDurationMs + b.bleScanDurationMs; output.bleOpportunisticScanCount = bleOpportunisticScanCount + b.bleOpportunisticScanCount; output.bleOpportunisticScanDurationMs = bleOpportunisticScanDurationMs + b.bleOpportunisticScanDurationMs; } return output; } @Override BluetoothMetrics sum(@Nullable BluetoothMetrics b, @Nullable BluetoothMetrics output); @Override BluetoothMetrics diff(@Nullable BluetoothMetrics b, @Nullable BluetoothMetrics output); @Override BluetoothMetrics set(BluetoothMetrics b); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); public int bleScanCount; public long bleScanDurationMs; public int bleOpportunisticScanCount; public long bleOpportunisticScanDurationMs; }
|
@Test public void testSum() { BluetoothMetrics metricsA = new BluetoothMetrics(); metricsA.bleScanDurationMs = 1000; metricsA.bleScanCount = 10; metricsA.bleOpportunisticScanDurationMs = 4000; metricsA.bleOpportunisticScanCount = 1; BluetoothMetrics metricsB = new BluetoothMetrics(); metricsB.bleScanDurationMs = 2000; metricsB.bleScanCount = 20; metricsB.bleOpportunisticScanDurationMs = 8000; metricsB.bleOpportunisticScanCount = 2; BluetoothMetrics output = new BluetoothMetrics(); metricsA.sum(metricsB, output); assertThat(output.bleScanCount).isEqualTo(30); assertThat(output.bleScanDurationMs).isEqualTo(3000); assertThat(output.bleOpportunisticScanCount).isEqualTo(3); assertThat(output.bleOpportunisticScanDurationMs).isEqualTo(12000); }
|
CpuFrequencyMetrics extends SystemMetrics<CpuFrequencyMetrics> { @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } CpuFrequencyMetrics that = (CpuFrequencyMetrics) o; if (timeInStateS.length != that.timeInStateS.length) { return false; } for (int i = 0, size = timeInStateS.length; i < size; i++) { if (!sparseIntArrayEquals(timeInStateS[i], that.timeInStateS[i])) { return false; } } return true; } CpuFrequencyMetrics(); @Override CpuFrequencyMetrics sum(
@Nullable CpuFrequencyMetrics b, @Nullable CpuFrequencyMetrics output); @Override CpuFrequencyMetrics diff(
@Nullable CpuFrequencyMetrics b, @Nullable CpuFrequencyMetrics output); @Override CpuFrequencyMetrics set(CpuFrequencyMetrics b); @Override boolean equals(Object o); static boolean sparseIntArrayEquals(SparseIntArray a, SparseIntArray b); @Override int hashCode(); @Override String toString(); @Nullable JSONObject toJSONObject(); final SparseIntArray[] timeInStateS; }
|
@Test public void testEquals() { CpuFrequencyMetrics metricsA = new CpuFrequencyMetrics(); metricsA.timeInStateS[0].put(200, 2000); metricsA.timeInStateS[0].put(100, 1000); metricsA.timeInStateS[1].put(300, 3000); CpuFrequencyMetrics metricsB = new CpuFrequencyMetrics(); metricsB.timeInStateS[0].put(100, 1000); metricsB.timeInStateS[0].put(200, 2000); metricsB.timeInStateS[1].put(300, 3000); assertThat(metricsA).isEqualTo(metricsB); }
|
CpuFrequencyMetrics extends SystemMetrics<CpuFrequencyMetrics> { @Override public CpuFrequencyMetrics sum( @Nullable CpuFrequencyMetrics b, @Nullable CpuFrequencyMetrics output) { if (output == null) { output = new CpuFrequencyMetrics(); } if (b == null) { output.set(this); } else { for (int i = 0; i < timeInStateS.length; i++) { SparseIntArray aCore = timeInStateS[i]; SparseIntArray bCore = b.timeInStateS[i]; SparseIntArray outputCore = output.timeInStateS[i]; for (int j = 0; j < aCore.size(); j++) { int frequency = aCore.keyAt(j); outputCore.put(frequency, aCore.valueAt(j) + bCore.get(frequency, 0)); } for (int j = 0; j < bCore.size(); j++) { int frequency = bCore.keyAt(j); if (aCore.indexOfKey(frequency) < 0) { outputCore.put(frequency, bCore.valueAt(j)); } } } } return output; } CpuFrequencyMetrics(); @Override CpuFrequencyMetrics sum(
@Nullable CpuFrequencyMetrics b, @Nullable CpuFrequencyMetrics output); @Override CpuFrequencyMetrics diff(
@Nullable CpuFrequencyMetrics b, @Nullable CpuFrequencyMetrics output); @Override CpuFrequencyMetrics set(CpuFrequencyMetrics b); @Override boolean equals(Object o); static boolean sparseIntArrayEquals(SparseIntArray a, SparseIntArray b); @Override int hashCode(); @Override String toString(); @Nullable JSONObject toJSONObject(); final SparseIntArray[] timeInStateS; }
|
@Test public void testSum() { CpuFrequencyMetrics metricsA = new CpuFrequencyMetrics(); metricsA.timeInStateS[0].put(100, 1); metricsA.timeInStateS[0].put(200, 2); metricsA.timeInStateS[1].put(1000, 1); CpuFrequencyMetrics metricsB = new CpuFrequencyMetrics(); metricsB.timeInStateS[0].put(200, 5); metricsB.timeInStateS[0].put(300, 3); metricsB.timeInStateS[2].put(2000, 2); CpuFrequencyMetrics output = new CpuFrequencyMetrics(); metricsA.sum(metricsB, output); assertThat(output.timeInStateS[0].size()).isEqualTo(3); assertThat(output.timeInStateS[0].get(100)).isEqualTo(1); assertThat(output.timeInStateS[0].get(200)).isEqualTo(7); assertThat(output.timeInStateS[0].get(300)).isEqualTo(3); assertThat(output.timeInStateS[1].size()).isEqualTo(1); assertThat(output.timeInStateS[1].get(1000)).isEqualTo(1); assertThat(output.timeInStateS[2].size()).isEqualTo(1); assertThat(output.timeInStateS[2].get(2000)).isEqualTo(2); assertThat(output.timeInStateS[3].size()).isEqualTo(0); }
|
CpuFrequencyMetrics extends SystemMetrics<CpuFrequencyMetrics> { @Override public CpuFrequencyMetrics diff( @Nullable CpuFrequencyMetrics b, @Nullable CpuFrequencyMetrics output) { if (output == null) { output = new CpuFrequencyMetrics(); } if (b == null) { output.set(this); } else { for (int i = 0; i < timeInStateS.length; i++) { SparseIntArray aCore = timeInStateS[i]; SparseIntArray bCore = b.timeInStateS[i]; SparseIntArray outputCore = output.timeInStateS[i]; boolean hasCoreReset = false; for (int j = 0, size = aCore.size(); j < size && !hasCoreReset; j++) { int frequency = aCore.keyAt(j); int difference = aCore.valueAt(j) - bCore.get(frequency, 0); if (difference < 0) { hasCoreReset = true; break; } outputCore.put(frequency, difference); } if (hasCoreReset) { copyArrayInto(aCore, outputCore); } } } return output; } CpuFrequencyMetrics(); @Override CpuFrequencyMetrics sum(
@Nullable CpuFrequencyMetrics b, @Nullable CpuFrequencyMetrics output); @Override CpuFrequencyMetrics diff(
@Nullable CpuFrequencyMetrics b, @Nullable CpuFrequencyMetrics output); @Override CpuFrequencyMetrics set(CpuFrequencyMetrics b); @Override boolean equals(Object o); static boolean sparseIntArrayEquals(SparseIntArray a, SparseIntArray b); @Override int hashCode(); @Override String toString(); @Nullable JSONObject toJSONObject(); final SparseIntArray[] timeInStateS; }
|
@Test public void testDiff() { CpuFrequencyMetrics metricsA = new CpuFrequencyMetrics(); metricsA.timeInStateS[0].put(100, 100); metricsA.timeInStateS[0].put(200, 200); metricsA.timeInStateS[1].put(300, 300); metricsA.timeInStateS[2].put(400, 400); CpuFrequencyMetrics metricsB = new CpuFrequencyMetrics(); metricsB.timeInStateS[0].put(100, 20); metricsB.timeInStateS[1].put(300, 40); CpuFrequencyMetrics output = new CpuFrequencyMetrics(); metricsA.diff(metricsB, output); assertThat(output.timeInStateS[0].size()).isEqualTo(2); assertThat(output.timeInStateS[0].get(100)).isEqualTo(80); assertThat(output.timeInStateS[0].get(200)).isEqualTo(200); assertThat(output.timeInStateS[1].size()).isEqualTo(1); assertThat(output.timeInStateS[1].get(300)).isEqualTo(260); assertThat(output.timeInStateS[2].size()).isEqualTo(1); assertThat(output.timeInStateS[2].get(400)).isEqualTo(400); assertThat(output.timeInStateS[3].size()).isEqualTo(0); }
@Test public void testDiffWithCoreReset() { CpuFrequencyMetrics metricsA = new CpuFrequencyMetrics(); metricsA.timeInStateS[0].put(100, 100); metricsA.timeInStateS[1].put(200, 50); metricsA.timeInStateS[1].put(250, 100); CpuFrequencyMetrics metricsB = new CpuFrequencyMetrics(); metricsB.timeInStateS[0].put(100, 50); metricsB.timeInStateS[1].put(200, 200); metricsB.timeInStateS[1].put(250, 75); metricsB.timeInStateS[2].put(300, 300); CpuFrequencyMetrics output = new CpuFrequencyMetrics(); metricsA.diff(metricsB, output); assertThat(output.timeInStateS[0].size()).isEqualTo(1); assertThat(output.timeInStateS[0].get(100)).isEqualTo(50); assertThat(output.timeInStateS[1].size()).isEqualTo(2); assertThat(output.timeInStateS[1].get(200)).isEqualTo(50); assertThat(output.timeInStateS[1].get(250)).isEqualTo(100); assertThat(output.timeInStateS[2].size()).isEqualTo(0); assertThat(output.timeInStateS[3].size()).isEqualTo(0); }
@Test public void testNullOutput() { CpuFrequencyMetrics metricsA = new CpuFrequencyMetrics(); CpuFrequencyMetrics metricsB = new CpuFrequencyMetrics(); assertThat(metricsA.diff(metricsB)).isNotNull(); }
@Test public void testNullSubtrahend() { CpuFrequencyMetrics metricsA = new CpuFrequencyMetrics(); metricsA.timeInStateS[0].put(100, 200); metricsA.timeInStateS[1].put(200, 300); CpuFrequencyMetrics output = new CpuFrequencyMetrics(); metricsA.diff(null, output); assertThat(metricsA).isEqualTo(output); }
|
SensorMetrics extends SystemMetrics<SensorMetrics> { @Override public SensorMetrics sum(@Nullable SensorMetrics b, @Nullable SensorMetrics output) { if (output == null) { output = new SensorMetrics(isAttributionEnabled); } if (b == null) { output.set(this); } else { total.sum(b.total, output.total); if (output.isAttributionEnabled) { op(+1, sensorConsumption, b.sensorConsumption, output.sensorConsumption); } } return output; } SensorMetrics(); SensorMetrics(boolean isAttributionEnabled); @Override SensorMetrics sum(@Nullable SensorMetrics b, @Nullable SensorMetrics output); @Override SensorMetrics diff(@Nullable SensorMetrics b, @Nullable SensorMetrics output); @Override SensorMetrics set(SensorMetrics b); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); @Nullable JSONObject attributionToJSONObject(); public boolean isAttributionEnabled; final Consumption total; final SparseArray<Consumption> sensorConsumption; }
|
@Test public void testAttributedSum() { SensorMetrics a = createAttributedMetrics(); SensorMetrics b = createAttributedMetrics(); SensorMetrics result = a.sum(b); assertThat(result.total.powerMah).isEqualTo(a.total.powerMah * 2); assertThat(result.total.wakeUpTimeMs).isEqualTo(a.total.wakeUpTimeMs * 2); assertThat(result.total.activeTimeMs).isEqualTo(a.total.activeTimeMs * 2); for (int i = 0, l = result.sensorConsumption.size(); i < l; i++) { int key = result.sensorConsumption.keyAt(i); SensorMetrics.Consumption value = result.sensorConsumption.valueAt(i); assertThat(value).isEqualTo(a.sensorConsumption.get(key).sum(b.sensorConsumption.get(key))); } }
|
CpuMetricsCollector extends SystemMetricsCollector<CpuMetrics> { @Override @ThreadSafe(enableChecks = false) public boolean getSnapshot(CpuMetrics snapshot) { checkNotNull(snapshot, "Null value passed to getSnapshot!"); try { ProcFileReader reader = mProcFileReader.get(); if (reader == null) { reader = new ProcFileReader(getPath()); mProcFileReader.set(reader); } reader.reset(); if (!reader.isValid()) { return false; } int index = 0; while (index < PROC_USER_TIME_FIELD) { reader.skipSpaces(); index++; } snapshot.userTimeS = readField(reader); snapshot.systemTimeS = readField(reader); snapshot.childUserTimeS = readField(reader); snapshot.childSystemTimeS = readField(reader); } catch (ProcFileReader.ParseException pe) { SystemMetricsLogger.wtf(TAG, "Unable to parse CPU time field", pe); return false; } if (mLastSnapshot.get() == null) { mLastSnapshot.set(new CpuMetrics()); } CpuMetrics lastSnapshot = mLastSnapshot.get(); if (Double.compare(snapshot.userTimeS, lastSnapshot.userTimeS) < 0 || Double.compare(snapshot.systemTimeS, lastSnapshot.systemTimeS) < 0 || Double.compare(snapshot.childUserTimeS, lastSnapshot.childUserTimeS) < 0 || Double.compare(snapshot.childSystemTimeS, lastSnapshot.childSystemTimeS) < 0) { SystemMetricsLogger.wtf( TAG, "Cpu Time Decreased from " + lastSnapshot.toString() + " to " + snapshot.toString()); return false; } lastSnapshot.set(snapshot); return true; } CpuMetricsCollector(); @Override @ThreadSafe(enableChecks = false) boolean getSnapshot(CpuMetrics snapshot); @Override CpuMetrics createMetrics(); }
|
@Test public void testBrokenFile() throws Exception { TestableCpuMetricsCollector collector = new TestableCpuMetricsCollector().setPath(createFile("I am a weird android manufacturer")); CpuMetrics snapshot = new CpuMetrics(); assertThat(collector.getSnapshot(snapshot)).isFalse(); }
@Test public void testNegativeFields() throws Exception { StringBuilder testStringBuilder = new StringBuilder(); for (int i = 0; i < 20; i++) { testStringBuilder.append(-i * 100).append(' '); } TestableCpuMetricsCollector collector = new TestableCpuMetricsCollector().setPath(createFile(testStringBuilder.toString())); CpuMetrics snapshot = new CpuMetrics(); assertThat(collector.getSnapshot(snapshot)).isFalse(); }
@Test public void testErrorOnDecreasing() throws Exception { SystemMetricsLogger.Delegate logger = mock(SystemMetricsLogger.Delegate.class); SystemMetricsLogger.setDelegate(logger); StringBuilder initialEntry = new StringBuilder(); for (int i = 0; i < 20; i++) { initialEntry.append(i * 2000).append(' '); } String path = createFile(initialEntry.toString()); TestableCpuMetricsCollector collector = new TestableCpuMetricsCollector().setPath(path); CpuMetrics snapshot = new CpuMetrics(); assertThat(collector.getSnapshot(snapshot)).isTrue(); verify(logger, never()).wtf(anyString(), anyString(), (Throwable) any()); StringBuilder secondEntry = new StringBuilder(); for (int i = 0; i < 20; i++) { secondEntry.append(i * 1000).append(' '); } overwriteFile(new File(path), secondEntry.toString()); assertThat(collector.getSnapshot(snapshot)).isFalse(); verify(logger, times(1)).wtf(anyString(), anyString(), (Throwable) any()); }
@Test public void testRealProcfile() throws Exception { String stat = "21031 (facebook.katana) S 354 354 0 0 -1 1077952832 227718 1446 318 0 9852 889 6 11 20 0 133 0 502496 2050461696 70553 4294967295 1 1 0 0 0 0 4608 0 1166120188 4294967295 0 0 17 0 0 0 0 0 0 0 0 0 0 0 0 0 0"; TestableCpuMetricsCollector collector = new TestableCpuMetricsCollector().setPath(createFile(stat)); CpuMetrics snapshot = new CpuMetrics(); assertThat(collector.getSnapshot(snapshot)).isTrue(); assertThat(snapshot.userTimeS).isEqualTo(9852.0 / 100); assertThat(snapshot.systemTimeS).isEqualTo(889.0 / 100); assertThat(snapshot.childUserTimeS).isEqualTo(6.0 / 100); assertThat(snapshot.childSystemTimeS).isEqualTo(11.0 / 100); }
@Test public void testSaneProcFile() throws Exception { StringBuilder testStringBuilder = new StringBuilder(); for (int i = 0; i < 20; i++) { testStringBuilder.append(i * 100).append(' '); } TestableCpuMetricsCollector collector = new TestableCpuMetricsCollector().setPath(createFile(testStringBuilder.toString())); CpuMetrics snapshot = new CpuMetrics(); assertThat(collector.getSnapshot(snapshot)).isTrue(); assertThat(snapshot.userTimeS).isEqualTo(13); assertThat(snapshot.systemTimeS).isEqualTo(14); assertThat(snapshot.childUserTimeS).isEqualTo(15); assertThat(snapshot.childSystemTimeS).isEqualTo(16); }
@Test public void testUnreadableProcFile() throws Exception { TestableCpuMetricsCollector collector = new TestableCpuMetricsCollector().setPath(""); CpuMetrics snapshot = new CpuMetrics(); assertThat(collector.getSnapshot(snapshot)).isFalse(); }
|
WakeLockMetrics extends SystemMetrics<WakeLockMetrics> { @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } WakeLockMetrics that = (WakeLockMetrics) o; if (isAttributionEnabled != that.isAttributionEnabled || heldTimeMs != that.heldTimeMs || acquiredCount != that.acquiredCount) { return false; } return Utilities.simpleArrayMapEquals(tagTimeMs, that.tagTimeMs); } WakeLockMetrics(); WakeLockMetrics(boolean isAttributionEnabled); @Override WakeLockMetrics sum(@Nullable WakeLockMetrics b, @Nullable WakeLockMetrics output); @Override WakeLockMetrics diff(@Nullable WakeLockMetrics b, @Nullable WakeLockMetrics output); @Override WakeLockMetrics set(WakeLockMetrics b); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); @Nullable JSONObject attributionToJSONObject(); public boolean isAttributionEnabled; final SimpleArrayMap<String, Long> tagTimeMs; public long heldTimeMs; public long acquiredCount; }
|
@Test public void testEquals() { assertThat(new WakeLockMetrics()).isEqualTo(new WakeLockMetrics()); assertThat(createInitializedMetrics()).isEqualTo(createInitializedMetrics()); }
|
SensorMetrics extends SystemMetrics<SensorMetrics> { @Override public SensorMetrics diff(@Nullable SensorMetrics b, @Nullable SensorMetrics output) { if (output == null) { output = new SensorMetrics(isAttributionEnabled); } if (b == null) { output.set(this); } else { total.diff(b.total, output.total); if (output.isAttributionEnabled) { op(-1, sensorConsumption, b.sensorConsumption, output.sensorConsumption); } } return output; } SensorMetrics(); SensorMetrics(boolean isAttributionEnabled); @Override SensorMetrics sum(@Nullable SensorMetrics b, @Nullable SensorMetrics output); @Override SensorMetrics diff(@Nullable SensorMetrics b, @Nullable SensorMetrics output); @Override SensorMetrics set(SensorMetrics b); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); @Nullable JSONObject attributionToJSONObject(); public boolean isAttributionEnabled; final Consumption total; final SparseArray<Consumption> sensorConsumption; }
|
@Test public void testAttributedDiff() { SensorMetrics a = createAttributedMetrics(); SensorMetrics b = createAttributedMetrics(); SensorMetrics result = a.diff(b); assertThat(result).isEqualTo(new SensorMetrics(true)); }
|
UndertowEndpointManager implements WebSocketConnectionCallback, XEndpointManager<UndertowEndpoint> { UndertowEndpoint createEndpoint(WebSocketChannel channel) { final UndertowEndpoint endpoint = new UndertowEndpoint(this, channel); try { channel.setOption(Options.TCP_NODELAY, NODELAY); } catch (IOException e) { throw new OptionAssignmentException("Error setting option", e); } channel.getReceiveSetter().set(endpoint); channel.resumeReceives(); scanner.addEndpoint(endpoint); listener.onConnect(endpoint); if (idleTimeoutMillis != 0) { channel.setIdleTimeout(idleTimeoutMillis); } return endpoint; } UndertowEndpointManager(XEndpointScanner<UndertowEndpoint> scanner, int idleTimeoutMillis, XEndpointConfig<?> config,
XEndpointListener<? super UndertowEndpoint> listener); @Override void onConnect(WebSocketHttpExchange exchange, WebSocketChannel channel); @Override Collection<UndertowEndpoint> getEndpoints(); }
|
@Test(expected=OptionAssignmentException.class) public void testCreateEndpointWithError() throws IOException { @SuppressWarnings("unchecked") final XEndpointListener<UndertowEndpoint> listener = mock(XEndpointListener.class); final UndertowEndpointManager mgr = new UndertowEndpointManager(null, 0, new DerivedEndpointConfig(), listener); final WebSocketChannel channel = Mockito.mock(WebSocketChannel.class); when(channel.setOption(any(), any())).thenThrow(new IOException("Boom")); mgr.createEndpoint(channel); }
|
UndertowEndpoint extends AbstractReceiveListener implements XEndpoint { WebSocketCallback<Void> wrapCallback(XSendCallback callback) { return new WebSocketCallback<Void>() { private final AtomicBoolean onceOnly = new AtomicBoolean(); @Override public void complete(WebSocketChannel channel, Void context) { if (onceOnly.compareAndSet(false, true)) { backlog.decrementAndGet(); if (callback != null) callback.onComplete(UndertowEndpoint.this); } } @Override public void onError(WebSocketChannel channel, Void context, Throwable cause) { if (onceOnly.compareAndSet(false, true)) { backlog.decrementAndGet(); if (callback != null) callback.onError(UndertowEndpoint.this, cause); } } }; } UndertowEndpoint(UndertowEndpointManager manager, WebSocketChannel channel); @Override @SuppressWarnings("unchecked") T getContext(); @Override void setContext(Object context); @Override void send(String payload, XSendCallback callback); @Override void send(ByteBuffer payload, XSendCallback callback); WebSocketChannel getChannel(); @Override void flush(); @Override void sendPing(); @Override void close(); @Override void terminate(); @Override InetSocketAddress getRemoteAddress(); @Override long getBacklog(); @Override boolean isOpen(); @Override long getLastActivityTime(); @Override String toString(); }
|
@Test public void testCallbackOnceOnlyComplete() { createEndpointManager(); final XSendCallback callback = mock(XSendCallback.class); final WebSocketCallback<Void> wsCallback = endpoint.wrapCallback(callback); wsCallback.complete(channel, null); verify(callback, times(1)).onComplete(eq(endpoint)); wsCallback.complete(channel, null); verify(callback, times(1)).onComplete(eq(endpoint)); }
|
PomHelper { public static boolean updatePomVersions(List<PomUpdateStatus> pomsToChange, List<DependencyVersionChange> changes) throws IOException { Map<String, String> propertyChanges = new TreeMap<>(); for (PomUpdateStatus status : pomsToChange) { status.updateVersions(changes, propertyChanges); } if (!propertyChanges.isEmpty()) { for (PomUpdateStatus status : pomsToChange) { status.updateProperties(propertyChanges); } } boolean answer = false; for (PomUpdateStatus status : pomsToChange) { if (status.saveIfChanged()) { answer = true; } } return answer; } static boolean updatePomVersionsInPoms(File dir, List<DependencyVersionChange> changes); static boolean updatePomVersions(List<PomUpdateStatus> pomsToChange, List<DependencyVersionChange> changes); static boolean updatePluginVersion(Document doc, DependencyVersionChange change, Map<String, String> propertyChanges, boolean lazyAdd); static boolean updateDependencyVersion(Document doc, DependencyVersionChange change, Map<String, String> propertyChanges); static boolean updateProperties(Document doc, Map<String, String> propertyChanges); }
|
@Test public void testVersionReplacement() throws Exception { File outDir = Tests.copyPackageSources(getClass()); LOG.info("Updating poms in " + outDir); File[] files = outDir.listFiles(); assertNotNull("No output files!", files); assertTrue("No output files!", files.length > 0); List<PomUpdateStatus> pomsToChange = new ArrayList<>(); for (File file : files) { try { PomUpdateStatus pomUpdateStatus = PomUpdateStatus.createPomUpdateStatus(file); pomUpdateStatus.setRootPom(true); pomsToChange.add(pomUpdateStatus); } catch (Exception e) { fail("Failed to parse " + file, e); } } Map<String, String> propertyVersions = new HashMap<>(); propertyVersions.put("assertj.version", assertJVersion); propertyVersions.put("fabric8.maven.plugin.version", fmpVersion); propertyVersions.put("fabric8.version", fabric8Version); propertyVersions.put("spring-boot.version", springBootVersion); List<DependencyVersionChange> changes = new ArrayList<>(); changes.add(new MavenDependencyVersionChange("io.fabric8:fabric8-maven-plugin", fmpVersion, MavenScopes.PLUGIN, true, ElementProcessors.createFabric8MavenPluginElementProcessor())); changes.add(new DependencyVersionChange(Kind.MAVEN, "org.assertj:assertj-core", assertJVersion, MavenScopes.ARTIFACT)); changes.add(new DependencyVersionChange(Kind.MAVEN, "io.fabric8:fabric8-project-bom-with-platform-deps", fabric8Version, MavenScopes.ARTIFACT)); changes.add(new DependencyVersionChange(Kind.MAVEN, "org.springframework.boot:spring-boot-dependencies", springBootVersion, MavenScopes.ARTIFACT)); PomHelper.updatePomVersions(pomsToChange, changes); for (File file : files) { Document doc; try { doc = parseXmlFile(file); } catch (Exception e) { fail("Failed to parse " + file + " due to " + e, e); continue; } assertPropertiesValid(file, doc, propertyVersions); assertChangesValid(file, doc, changes); } }
|
BorderingDistanceMetric implements SpatialDistanceMetric { @Override public double distance(Geometry g1, Geometry g2) { if (adjacencyList.isEmpty()) { throw new UnsupportedOperationException(); } int srcId = getContainingGeometry(g1); int destId = getContainingGeometry(g2); if (srcId < 0) { throw new IllegalArgumentException("No containing geometry for source geometry"); } if (destId < 0) { throw new IllegalArgumentException("No containing geometry for destination geometry"); } if (srcId == destId) { return 0; } TIntSet seen = new TIntHashSet(); TIntLinkedList queue = new TIntLinkedList(); for (int id: adjacencyList.get(srcId).toArray()) { if (id == destId) { return 1; } queue.add(id); seen.add(id); } for (int level = 2; level <= maxSteps; level++) { int nodes = queue.size(); for (int i = 0; i < nodes; i++) { int id = queue.removeAt(0); if (!adjacencyList.containsKey(id)) { continue; } for (int id2 : adjacencyList.get(id).toArray()) { if (id2 == destId) { return level; } if (!seen.contains(id2)) { queue.add(id2); seen.add(id2); } } } } return Double.POSITIVE_INFINITY; } BorderingDistanceMetric(SpatialDataDao dao, String layer); @Override void setValidConcepts(TIntSet concepts); @Override void enableCache(boolean enable); @Override String getName(); @Override double distance(Geometry g1, Geometry g2); @Override float[][] distance(List<Geometry> rowGeometries, List<Geometry> colGeometries); @Override float[][] distance(List<Geometry> geometries); @Override List<Neighbor> getNeighbors(Geometry g, int maxNeighbors); @Override List<Neighbor> getNeighbors(Geometry g, int maxNeighbors, double maxDistance); void setBufferWidth(double bufferWidth); void setMaxSteps(int maxSteps); void setForceContains(boolean forceContains); }
|
@Test public void testSimple() throws DaoException { assertEquals(0, metric.distance(g("Minnesota"), g("Minnesota")), 0.01); assertEquals(1, metric.distance(g("Wisconsin"), g("Minnesota")), 0.01); assertEquals(1, metric.distance(g("North Dakota"), g("Minnesota")), 0.01); assertEquals(1, metric.distance(g("South Dakota"), g("Minnesota")), 0.01); assertEquals(1, metric.distance(g("Iowa"), g("Minnesota")), 0.01); assertEquals(2, metric.distance(g("Illinois"), g("Minnesota")), 0.01); assertEquals(4, metric.distance(g("Texas"), g("Minnesota")), 0.01); assertEquals(0, metric.distance(g("Minnesota").getCentroid(), g("Minnesota")), 0.01); assertEquals(0, metric.distance(g("Minnesota"), g("Minnesota").getCentroid()), 0.01); assertEquals(4, metric.distance(g("Minnesota").getCentroid(), g("Texas").getCentroid()), 0.01); }
|
Configurator implements Cloneable { public <T> T get(Class<T> klass, String name) throws ConfigurationException { return get(klass, name, null); } Configurator(Configuration conf); Configuration getConf(); T get(Class<T> klass, String name); T get(Class<T> klass, String name, String runtimeKey, String runtimeValue); T get(Class<T> klass, String name, Map<String, String> runtimeParams); String resolveComponentName(Class klass, String name); Config getConfig(Class klass, String name); T construct(Class<T> klass, String name, Config conf, Map<String, String> runtimeParams); T get(Class<T> klass); void close(); static final int MAX_FILE_SIZE; }
|
@Test public void testRuntimeParams() throws ConfigurationException, IOException { Configurator conf = new Configurator(new Configuration()); Integer i1 = conf.get(Integer.class, "foo"); assertEquals(i1, 42); Integer i2 = conf.get(Integer.class, "foo"); assertEquals(i1, i2); Map<String, String> args3 = new HashMap<String, String>(); args3.put("overrideConstant", "423523524"); Integer i3 = conf.get(Integer.class, "foo", args3); assertEquals(i3, 423523524); Map<String, String> args4 = new HashMap<String, String>(); args4.put("overrideConstant", "423523236"); Integer i4 = conf.get(Integer.class, "foo", args4); assertEquals(i4, 423523236); assertEquals(new Integer(423523524), i3); assertNotSame(new Integer(423523524), i3); Integer i5 = conf.get(Integer.class, "foo", args3); assertEquals(i3, i5); assertSame(i3, i5); }
@Test public void testSimple() throws ConfigurationException { Configurator conf = new Configurator(new Configuration()); Integer i = conf.get(Integer.class, "foo"); assertEquals(i, 42); Integer j = conf.get(Integer.class, "bar"); assertEquals(j, 23); Integer k = conf.get(Integer.class, "baz"); assertEquals(k, 0); Integer l = conf.get(Integer.class, "biff"); assertEquals(l, 1); }
@Test public void testNonExistentJar() throws ConfigurationException { String separator = System.getProperty("path.separator"); System.setProperty("java.class.path", System.getProperty("java.class.path") + separator + "foobarbaz.jar"); Configurator conf = new Configurator(new Configuration()); Integer i = conf.get(Integer.class, "foo"); assertEquals(i, 42); Integer j = conf.get(Integer.class, "bar"); assertEquals(j, 23); Integer k = conf.get(Integer.class, "baz"); assertEquals(k, 0); Integer l = conf.get(Integer.class, "biff"); assertEquals(l, 1); }
|
SparseMatrix implements Matrix<SparseMatrixRow> { @Override public Iterator<SparseMatrixRow> iterator() { return new SparseMatrixIterator(); } SparseMatrix(File path); long lastModified(); @Override SparseMatrixRow getRow(int rowId); @Override int[] getRowIds(); @Override int getNumRows(); ValueConf getValueConf(); @Override Iterator<SparseMatrixRow> iterator(); void close(); @Override File getPath(); static final Logger LOG; static final int DEFAULT_HEADER_SIZE; static final int FILE_HEADER; }
|
@Test public void testWrite() throws IOException { File tmp = File.createTempFile("matrix", null); SparseMatrixWriter.write(tmp, srcRows.iterator()); }
@Test public void testReadWrite() throws IOException { File tmp = File.createTempFile("matrix", null); SparseMatrixWriter.write(tmp, srcRows.iterator()); Matrix m1 = new SparseMatrix(tmp); Matrix m2 = new SparseMatrix(tmp); }
@Test public void testTranspose() throws IOException { for (int numOpenPages: new int[] { 1, Integer.MAX_VALUE}) { File tmp1 = File.createTempFile("matrix", null); File tmp2 = File.createTempFile("matrix", null); File tmp3 = File.createTempFile("matrix", null); SparseMatrixWriter.write(tmp1, srcRows.iterator()); SparseMatrix m = new SparseMatrix(tmp1); verifyIsSourceMatrix(m); new SparseMatrixTransposer(m, tmp2, 1).transpose(); SparseMatrix m2 = new SparseMatrix(tmp2); new SparseMatrixTransposer(m2, tmp3, 1).transpose(); Matrix m3 = new SparseMatrix(tmp3); verifyIsSourceMatrixUnordered(m3, .001); } }
@Test public void testRows() throws IOException { for (int numOpenPages: new int[] { 1, Integer.MAX_VALUE}) { File tmp = File.createTempFile("matrix", null); SparseMatrixWriter.write(tmp, srcRows.iterator()); Matrix m = new SparseMatrix(tmp); verifyIsSourceMatrix(m); } }
|
DenseMatrix implements Matrix<DenseMatrixRow> { @Override public Iterator<DenseMatrixRow> iterator() { return new DenseMatrixIterator(); } DenseMatrix(File path); @Override DenseMatrixRow getRow(int rowId); @Override int[] getRowIds(); int[] getColIds(); @Override int getNumRows(); ValueConf getValueConf(); @Override Iterator<DenseMatrixRow> iterator(); @Override File getPath(); @Override void close(); static final Logger LOG; static final int FILE_HEADER; static final int DEFAULT_HEADER_SIZE; }
|
@Test public void testWrite() throws IOException { File tmp = File.createTempFile("matrix", null); DenseMatrixWriter.write(tmp, srcRows.iterator()); }
@Test public void testReadWrite() throws IOException { File tmp = File.createTempFile("matrix", null); DenseMatrixWriter.write(tmp, srcRows.iterator()); DenseMatrix m1 = new DenseMatrix(tmp); DenseMatrix m2 = new DenseMatrix(tmp); }
@Test public void testTranspose() throws IOException { for (int numOpenPages: new int[] { 1, Integer.MAX_VALUE}) { File tmp1 = File.createTempFile("matrix", null); File tmp2 = File.createTempFile("matrix", null); File tmp3 = File.createTempFile("matrix", null); DenseMatrixWriter.write(tmp1, srcRows.iterator()); DenseMatrix m = new DenseMatrix(tmp1); verifyIsSourceMatrix(m); } }
@Test public void testRows() throws IOException { for (int numOpenPages: new int[] { 1, Integer.MAX_VALUE}) { File tmp = File.createTempFile("matrix", null); DenseMatrixWriter.write(tmp, srcRows.iterator()); DenseMatrix m = new DenseMatrix(tmp); verifyIsSourceMatrix(m); } }
|
PageViewUtils { public static SortedSet<DateTime> timestampsInInterval(DateTime start, DateTime end) { if (start.isAfter(end)) { throw new IllegalArgumentException(); } DateTime current = new DateTime( start.year().get(), start.monthOfYear().get(), start.dayOfMonth().get(), start.hourOfDay().get(), 0); if (current.isBefore(start)) { current = current.plusHours(1); } SortedSet<DateTime> result = new TreeSet<DateTime>(); while (!current.isAfter(end)) { result.add(current); current = current.plusHours(1); } return result; } static SortedSet<DateTime> timestampsInInterval(DateTime start, DateTime end); }
|
@Test public void testTstampsInRange() { long now = System.currentTimeMillis(); Random random = new Random(); for (int i = 0; i < 1000; i++) { long tstamp = (long) (random.nextDouble() * now); DateTime beg = new DateTime(tstamp); DateTime end = beg.plusHours(1); SortedSet<DateTime> tstamps = PageViewUtils.timestampsInInterval(beg, end); assertEquals(tstamps.size(), 1); DateTime dt = tstamps.first(); assertTrue(beg.isBefore(dt)); assertTrue(end.isAfter(dt)); } }
|
BorderingDistanceMetric implements SpatialDistanceMetric { @Override public List<Neighbor> getNeighbors(Geometry g, int maxNeighbors) { return getNeighbors(g, maxNeighbors, Double.MAX_VALUE); } BorderingDistanceMetric(SpatialDataDao dao, String layer); @Override void setValidConcepts(TIntSet concepts); @Override void enableCache(boolean enable); @Override String getName(); @Override double distance(Geometry g1, Geometry g2); @Override float[][] distance(List<Geometry> rowGeometries, List<Geometry> colGeometries); @Override float[][] distance(List<Geometry> geometries); @Override List<Neighbor> getNeighbors(Geometry g, int maxNeighbors); @Override List<Neighbor> getNeighbors(Geometry g, int maxNeighbors, double maxDistance); void setBufferWidth(double bufferWidth); void setMaxSteps(int maxSteps); void setForceContains(boolean forceContains); }
|
@Test public void testKnn() throws DaoException { List<SpatialDistanceMetric.Neighbor> neighbors = metric.getNeighbors(g("Minnesota"), 100); for (int i = 0; i < neighbors.size(); i++) { SpatialDistanceMetric.Neighbor n = neighbors.get(i); String name = n(n.conceptId); if (i == 0) { assertEquals(name, "Minnesota"); } else if (i <= 4) { assertTrue(Arrays.asList("North Dakota", "Iowa", "Wisconsin", "South Dakota").contains(name)); } } SpatialDistanceMetric.Neighbor last = neighbors.get(neighbors.size() - 1); assertEquals(8.0, last.distance, 0.01); assertEquals("Maine", n(last.conceptId)); }
|
Language implements Comparable<Language>, Serializable { public static Language getByLangCode(String langCode) { langCode = langCode.replace('_', '-').toLowerCase(); if (WIKIDATA.getLangCode().equals(langCode)) { return WIKIDATA; } for (Language lang : LANGUAGES) { if (lang.langCode.equalsIgnoreCase(langCode)) { return lang; } } throw new IllegalArgumentException("unknown langCode: '" + langCode + "'"); } private Language(short id, String langCode, String enLangName, String nativeName); short getId(); String getLangCode(); String getEnLangName(); String getNativeName(); Locale getLocale(); static Language getByLangCode(String langCode); static Language getByLangCodeLenient(String langCode); static Language getByFullLangName(String language); static boolean hasLangCode(String langCode); static Language getById(int id); String getDomain(); LanguageInfo getLanguageInfo(); @Override int compareTo(Language language); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static final String LANGUAGE_TSV; static Language[] LANGUAGES; static Language WIKIDATA; static final Language EN; static final Language DE; static final Language FR; static final Language NL; static final Language IT; static final Language PL; static final Language ES; static final Language RU; static final Language JA; static final Language PT; static final Language ZH; static final Language SV; static final Language VI; static final Language UK; static final Language CA; static final Language NO; static final Language FI; static final Language CS; static final Language HU; static final Language KO; static final Language FA; static final Language ID; static final Language TR; static final Language AR; static final Language RO; static final Language SK; static final Language EO; static final Language DA; static final Language SR; static final Language LT; static final Language MS; static final Language HE; static final Language EU; static final Language SL; static final Language BG; static final Language KK; static final Language VO; static final Language HR; static final Language WAR; static final Language HI; static final Language ET; static final Language GL; static final Language AZ; static final Language NN; static final Language SIMPLE; static final Language LA; static final Language EL; static final Language TH; static final Language NEW; static final Language ROA_RUP; static final Language OC; static final Language SH; static final Language KA; static final Language MK; static final Language TL; static final Language HT; static final Language PMS; static final Language TE; static final Language TA; static final Language BE_X_OLD; static final Language BE; static final Language BR; static final Language CEB; static final Language LV; static final Language SQ; static final Language JV; static final Language MG; static final Language CY; static final Language LB; static final Language MR; static final Language IS; static final Language BS; static final Language YO; static final Language AN; static final Language LMO; static final Language HY; static final Language FY; static final Language BPY; static final Language ML; static final Language PNB; static final Language SW; static final Language BN; static final Language IO; static final Language AF; static final Language GU; static final Language ZH_YUE; static final Language NE; static final Language NDS; static final Language UR; static final Language KU; static final Language UZ; static final Language AST; static final Language SCN; static final Language SU; static final Language QU; static final Language DIQ; static final Language BA; static final Language TT; static final Language MY; static final Language GA; static final Language CV; static final Language IA; static final Language NAP; static final Language BAT_SMG; static final Language MAP_BMS; static final Language WA; static final Language ALS; static final Language KN; static final Language AM; static final Language GD; static final Language BUG; static final Language TG; static final Language ZH_MIN_NAN; static final Language YI; static final Language VEC; static final Language SCO; static final Language HIF; static final Language ROA_TARA; static final Language OS; static final Language ARZ; static final Language NAH; static final Language MZN; static final Language SAH; static final Language KY; static final Language MN; static final Language SA; static final Language PAM; static final Language HSB; static final Language LI; static final Language MI; static final Language SI; static final Language CO; static final Language CKB; static final Language GAN; static final Language GLK; static final Language BO; static final Language FO; static final Language BAR; static final Language BCL; static final Language ILO; static final Language MRJ; static final Language SE; static final Language FIU_VRO; static final Language NDS_NL; static final Language TK; static final Language VLS; static final Language PS; static final Language GV; static final Language RUE; static final Language DV; static final Language NRM; static final Language PAG; static final Language PA; static final Language KOI; static final Language RM; static final Language KM; static final Language KV; static final Language UDM; static final Language CSB; static final Language MHR; static final Language FUR; static final Language MT; static final Language ZEA; static final Language WUU; static final Language LIJ; static final Language UG; static final Language LAD; static final Language PI; static final Language XMF; static final Language SC; static final Language BH; static final Language ZH_CLASSICAL; static final Language OR; static final Language NOV; static final Language KSH; static final Language ANG; static final Language SO; static final Language KW; static final Language STQ; static final Language NV; static final Language HAK; static final Language FRR; static final Language AY; static final Language FRP; static final Language EXT; static final Language SZL; static final Language PCD; static final Language IE; static final Language GAG; static final Language HAW; static final Language XAL; static final Language LN; static final Language RW; static final Language PDC; static final Language PFL; static final Language VEP; static final Language KRC; static final Language CRH; static final Language EML; static final Language GN; static final Language ACE; static final Language TO; static final Language CE; static final Language KL; static final Language ARC; static final Language MYV; static final Language DSB; static final Language AS; static final Language BJN; static final Language PAP; static final Language TPI; static final Language LBE; static final Language MDF; static final Language WO; static final Language JBO; static final Language KAB; static final Language SN; static final Language AV; static final Language CBK_ZAM; static final Language TY; static final Language SRN; static final Language KBD; static final Language LO; static final Language LEZ; static final Language AB; static final Language MWL; static final Language LTG; static final Language NA; static final Language IG; static final Language KG; static final Language TET; static final Language ZA; static final Language KAA; static final Language NSO; static final Language ZU; static final Language RMY; static final Language CU; static final Language TN; static final Language CHR; static final Language CHY; static final Language GOT; static final Language SM; static final Language BI; static final Language MO; static final Language BM; static final Language IU; static final Language PIH; static final Language IK; static final Language SS; static final Language SD; static final Language PNT; static final Language CDO; static final Language EE; static final Language HA; static final Language TI; static final Language BXR; static final Language TS; static final Language OM; static final Language KS; static final Language KI; static final Language VE; static final Language SG; static final Language RN; static final Language CR; static final Language DZ; static final Language LG; static final Language AK; static final Language FF; static final Language TUM; static final Language FJ; static final Language ST; static final Language TW; static final Language XH; static final Language CH; static final Language NY; static final Language NG; static final Language II; static final Language CHO; static final Language MH; static final Language AA; static final Language KJ; static final Language HO; static final Language MUS; static final Language KR; static final Language HZ; }
|
@Test(expected=IllegalArgumentException.class) public void testNonexistentByLangCode() { Language.getByLangCode("zz"); }
|
Language implements Comparable<Language>, Serializable { public static Language getById(int id) { if (0 < id && id <= LANGUAGES.length) { return LANGUAGES[id-1]; } else { throw new IllegalArgumentException("unknown language id: '" + id + "'"); } } private Language(short id, String langCode, String enLangName, String nativeName); short getId(); String getLangCode(); String getEnLangName(); String getNativeName(); Locale getLocale(); static Language getByLangCode(String langCode); static Language getByLangCodeLenient(String langCode); static Language getByFullLangName(String language); static boolean hasLangCode(String langCode); static Language getById(int id); String getDomain(); LanguageInfo getLanguageInfo(); @Override int compareTo(Language language); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static final String LANGUAGE_TSV; static Language[] LANGUAGES; static Language WIKIDATA; static final Language EN; static final Language DE; static final Language FR; static final Language NL; static final Language IT; static final Language PL; static final Language ES; static final Language RU; static final Language JA; static final Language PT; static final Language ZH; static final Language SV; static final Language VI; static final Language UK; static final Language CA; static final Language NO; static final Language FI; static final Language CS; static final Language HU; static final Language KO; static final Language FA; static final Language ID; static final Language TR; static final Language AR; static final Language RO; static final Language SK; static final Language EO; static final Language DA; static final Language SR; static final Language LT; static final Language MS; static final Language HE; static final Language EU; static final Language SL; static final Language BG; static final Language KK; static final Language VO; static final Language HR; static final Language WAR; static final Language HI; static final Language ET; static final Language GL; static final Language AZ; static final Language NN; static final Language SIMPLE; static final Language LA; static final Language EL; static final Language TH; static final Language NEW; static final Language ROA_RUP; static final Language OC; static final Language SH; static final Language KA; static final Language MK; static final Language TL; static final Language HT; static final Language PMS; static final Language TE; static final Language TA; static final Language BE_X_OLD; static final Language BE; static final Language BR; static final Language CEB; static final Language LV; static final Language SQ; static final Language JV; static final Language MG; static final Language CY; static final Language LB; static final Language MR; static final Language IS; static final Language BS; static final Language YO; static final Language AN; static final Language LMO; static final Language HY; static final Language FY; static final Language BPY; static final Language ML; static final Language PNB; static final Language SW; static final Language BN; static final Language IO; static final Language AF; static final Language GU; static final Language ZH_YUE; static final Language NE; static final Language NDS; static final Language UR; static final Language KU; static final Language UZ; static final Language AST; static final Language SCN; static final Language SU; static final Language QU; static final Language DIQ; static final Language BA; static final Language TT; static final Language MY; static final Language GA; static final Language CV; static final Language IA; static final Language NAP; static final Language BAT_SMG; static final Language MAP_BMS; static final Language WA; static final Language ALS; static final Language KN; static final Language AM; static final Language GD; static final Language BUG; static final Language TG; static final Language ZH_MIN_NAN; static final Language YI; static final Language VEC; static final Language SCO; static final Language HIF; static final Language ROA_TARA; static final Language OS; static final Language ARZ; static final Language NAH; static final Language MZN; static final Language SAH; static final Language KY; static final Language MN; static final Language SA; static final Language PAM; static final Language HSB; static final Language LI; static final Language MI; static final Language SI; static final Language CO; static final Language CKB; static final Language GAN; static final Language GLK; static final Language BO; static final Language FO; static final Language BAR; static final Language BCL; static final Language ILO; static final Language MRJ; static final Language SE; static final Language FIU_VRO; static final Language NDS_NL; static final Language TK; static final Language VLS; static final Language PS; static final Language GV; static final Language RUE; static final Language DV; static final Language NRM; static final Language PAG; static final Language PA; static final Language KOI; static final Language RM; static final Language KM; static final Language KV; static final Language UDM; static final Language CSB; static final Language MHR; static final Language FUR; static final Language MT; static final Language ZEA; static final Language WUU; static final Language LIJ; static final Language UG; static final Language LAD; static final Language PI; static final Language XMF; static final Language SC; static final Language BH; static final Language ZH_CLASSICAL; static final Language OR; static final Language NOV; static final Language KSH; static final Language ANG; static final Language SO; static final Language KW; static final Language STQ; static final Language NV; static final Language HAK; static final Language FRR; static final Language AY; static final Language FRP; static final Language EXT; static final Language SZL; static final Language PCD; static final Language IE; static final Language GAG; static final Language HAW; static final Language XAL; static final Language LN; static final Language RW; static final Language PDC; static final Language PFL; static final Language VEP; static final Language KRC; static final Language CRH; static final Language EML; static final Language GN; static final Language ACE; static final Language TO; static final Language CE; static final Language KL; static final Language ARC; static final Language MYV; static final Language DSB; static final Language AS; static final Language BJN; static final Language PAP; static final Language TPI; static final Language LBE; static final Language MDF; static final Language WO; static final Language JBO; static final Language KAB; static final Language SN; static final Language AV; static final Language CBK_ZAM; static final Language TY; static final Language SRN; static final Language KBD; static final Language LO; static final Language LEZ; static final Language AB; static final Language MWL; static final Language LTG; static final Language NA; static final Language IG; static final Language KG; static final Language TET; static final Language ZA; static final Language KAA; static final Language NSO; static final Language ZU; static final Language RMY; static final Language CU; static final Language TN; static final Language CHR; static final Language CHY; static final Language GOT; static final Language SM; static final Language BI; static final Language MO; static final Language BM; static final Language IU; static final Language PIH; static final Language IK; static final Language SS; static final Language SD; static final Language PNT; static final Language CDO; static final Language EE; static final Language HA; static final Language TI; static final Language BXR; static final Language TS; static final Language OM; static final Language KS; static final Language KI; static final Language VE; static final Language SG; static final Language RN; static final Language CR; static final Language DZ; static final Language LG; static final Language AK; static final Language FF; static final Language TUM; static final Language FJ; static final Language ST; static final Language TW; static final Language XH; static final Language CH; static final Language NY; static final Language NG; static final Language II; static final Language CHO; static final Language MH; static final Language AA; static final Language KJ; static final Language HO; static final Language MUS; static final Language KR; static final Language HZ; }
|
@Test(expected=IllegalArgumentException.class) public void testNonexistentById() { Language.getById(-1); }
|
Title implements Externalizable { public Title(String text, LanguageInfo language) { this(text, false, language); } Title(String text, LanguageInfo language); Title(String text, boolean isCanonical, LanguageInfo lang); Title(String title, Language language); String getCanonicalTitle(); LanguageInfo getLanguageInfo(); Language getLanguage(); NameSpace getNamespace(); String getNamespaceString(); String getTitleStringWithoutNamespace(); @Override String toString(); @Override int hashCode(); @Override boolean equals(Object o); Title toUpperCase(); String toUrl(); long longHashCode(); static long longHashCode(Language l, String title, NameSpace ns); static long longHashCode(int langId, String title, int nsArbitraryId); String[] getNameAndDisambiguator(); static String canonicalize(String title, LanguageInfo lang); @Override void readExternal(ObjectInput in); @Override void writeExternal(ObjectOutput out); }
|
@Test public void testTitle(){ LanguageInfo lang = LanguageInfo.getByLangCode("en"); Title pokemon = new Title("Pokemon: The Movie",lang); assert (pokemon.getNamespaceString() == null); assert (pokemon.getNamespace()==NameSpace.ARTICLE); assert (pokemon.getTitleStringWithoutNamespace().equals("Pokemon: The Movie")); Title axelson = new Title("Ax:son Johnson family",lang); assert (axelson.getNamespaceString() == null); assert (axelson.getNamespace()==NameSpace.ARTICLE); assert(axelson.getTitleStringWithoutNamespace().equals("Ax:son Johnson family")); Title pokemonTalk = new Title("Talk:Pokemon: The Movie",lang); assert (pokemonTalk.getNamespaceString().equals("Talk")); assert (pokemonTalk.getNamespace()==NameSpace.TALK); assert (pokemonTalk.getTitleStringWithoutNamespace().equals("Pokemon: The Movie")); Title badCategory = new Title("Category: ",lang); assert (badCategory.getNamespaceString().equals("Category")); assert (badCategory.getNamespace()==NameSpace.CATEGORY); assert (badCategory.getTitleStringWithoutNamespace().equals("")); }
|
WikidataParser { public WikidataEntity parse(String json) throws WpParseException { JacksonTermedStatementDocument mwDoc; try { mwDoc = mapper.readValue(json, JacksonTermedStatementDocument.class); } catch (IOException e) { LOG.info("Error parsing: " + json); throw new WpParseException(e); } WikidataEntity record = new WikidataEntity(mwDoc.getEntityId().getId()); for (List<MonolingualTextValue> vlist : mwDoc.getAliases().values()) { if (vlist.isEmpty()) continue; if (!validLanguage(vlist.get(0).getLanguageCode())) continue; Language lang = Language.getByLangCodeLenient(vlist.get(0).getLanguageCode()); record.getAliases().put(lang, new ArrayList<String>()); for (MonolingualTextValue v : vlist) { record.getAliases().get(lang).add(v.getText()); } } for (MonolingualTextValue v : mwDoc.getDescriptions().values()) { if (validLanguage(v.getLanguageCode())) { Language lang = Language.getByLangCodeLenient(v.getLanguageCode()); record.getDescriptions().put(lang, v.getText()); } } for (MonolingualTextValue v : mwDoc.getLabels().values()) { if (validLanguage(v.getLanguageCode())) { Language lang = Language.getByLangCodeLenient(v.getLanguageCode()); record.getLabels().put(lang, v.getText()); } } if (mwDoc instanceof JacksonItemDocument) { for (List<JacksonStatement> statements : ((JacksonItemDocument)mwDoc).getJsonClaims().values()) { for (JacksonStatement s : statements) { record.getStatements().add(parseOneClaim(record, s)); } } } return record; } WikidataParser(); WikidataParser(LanguageSet langs); WikidataEntity parse(String json); WikidataValue snakToValue(final String type, Value snak); WikidataValue jsonToValue(String type, JsonElement element); static Object gsonToPrimitive(JsonElement element); }
|
@Test public void testWikidataRawRecord() throws IOException, WpParseException { String json = WpIOUtils.resourceToString("/testPage.json"); WikidataParser parser = new WikidataParser(); WikidataEntity entity = parser.parse(json); assertEquals(entity.getType(), WikidataEntity.Type.ITEM); assertEquals(entity.getId(), 157); assertEquals(entity.getLabels().get(Language.ES), "Fran\u00e7ois Hollande"); assertEquals(entity.getDescriptions().get(Language.EN), "24th President of the French Republic"); WikidataStatement stm = entity.getStatements().get(0); assertEquals(stm.getProperty().getId(), 40); assertEquals(stm.getValue().getType(), WikidataValue.Type.ITEM); assertEquals(stm.getValue().getItemValue(), 16783695); }
|
GoogleSimilarity implements VectorSimilarity { public GoogleSimilarity(int numPages) { this.numPages = numPages; } GoogleSimilarity(int numPages); @Override synchronized void setMatrices(SparseMatrix features, SparseMatrix transpose, File dataDir); @Override double similarity(TIntFloatMap vector1, TIntFloatMap vector2); @Override double similarity(MatrixRow a, MatrixRow b); @Override SRResultList mostSimilar(TIntFloatMap query, int maxResults, TIntSet validIds); @Override double getMinValue(); @Override double getMaxValue(); }
|
@Test public void testUtils() { TIntFloatMap row1 = getMap(ROW1_IDS, ROW1_VALS); TIntFloatMap row2 = getMap(ROW2_IDS, ROW2_VALS); double expected = googleSimilarity(row1, row2); double actual = SimUtils.googleSimilarity(6, 5, 3, NUM_PAGES); assertEquals(expected, actual, 0.0001); }
|
CosineSimilarity implements VectorSimilarity { @Override public double similarity(MatrixRow a, MatrixRow b) { return SimUtils.cosineSimilarity(a, b); } @Override synchronized void setMatrices(SparseMatrix features, SparseMatrix transpose, File dataDir); @Override double similarity(MatrixRow a, MatrixRow b); @Override double similarity(TIntFloatMap vector1, TIntFloatMap vector2); @Override SRResultList mostSimilar(TIntFloatMap query, int maxResults, TIntSet validIds); @Override double getMinValue(); @Override double getMaxValue(); }
|
@Test public void testMap() { TIntFloatMap row1 = getMap(ROW1_IDS, ROW1_VALS); TIntFloatMap row2 = getMap(ROW2_IDS, ROW2_VALS); double expected = cosineSimilarity(row1, row2); double actual = new CosineSimilarity().similarity(row1, row2); assertEquals(expected, actual, 0.0001); actual = new CosineSimilarity().similarity(row2, row1); assertEquals(expected, actual, 0.0001); }
@Test public void testRows() { TIntFloatMap map1 = getMap(ROW1_IDS, ROW1_VALS); TIntFloatMap map2 = getMap(ROW2_IDS, ROW2_VALS); SparseMatrixRow row1 = getRow(ROW1_IDS, ROW1_VALS); SparseMatrixRow row2 = getRow(ROW2_IDS, ROW2_VALS); double expected = cosineSimilarity(map1, map2); double actual = new CosineSimilarity().similarity(row1, row2); assertEquals(expected, actual, 0.0001); actual = new CosineSimilarity().similarity(row2, row1); assertEquals(expected, actual, 0.0001); }
@Ignore @Test public void benchmark() { int numOuter = 100; int numInner = 10000; long before = System.currentTimeMillis(); double sum = 0; Random random = new Random(); for (int i = 0; i < numOuter; i++) { int overlap[] = new int[10]; for (int j = 0; j < overlap.length; j++) { overlap[j] = random.nextInt(Integer.MAX_VALUE / 10); } SparseMatrixRow row1 = makeRow(100, overlap); SparseMatrixRow row2 = makeRow(100, overlap); CosineSimilarity sim = new CosineSimilarity(); for (int j = 0; j < numInner; j++) { sum += sim.similarity(row1, row2); } } long after = System.currentTimeMillis(); System.out.println("elapsed is " + (after - before) + " sim is " + (sum / (numOuter * numInner))); }
|
SimUtils { public static double cosineSimilarity(TIntDoubleMap X, TIntDoubleMap Y) { double xDotX = 0.0; double yDotY = 0.0; double xDotY = 0.0; for (int id : X.keys()) { double x = X.get(id); xDotX += x * x; if (Y.containsKey(id)) { xDotY += x * Y.get(id); } } for (double y : Y.values()) { yDotY += y * y; } return xDotX * yDotY != 0 ? xDotY / Math.sqrt(xDotX * yDotY): 0.0; } static double cosineSimilarity(TIntDoubleMap X, TIntDoubleMap Y); static double cosineSimilarity(TIntFloatMap X, TIntFloatMap Y); static double cosineSimilarity(MatrixRow a, MatrixRow b); static double googleSimilarity(int sizeA, int sizeB, int intersection, int numTotal); static TIntDoubleMap normalizeVector(TIntDoubleMap X); static TIntFloatMap normalizeVector(TIntFloatMap X); static Map sortByValue(TIntDoubleHashMap unsortMap); static WikiBrainScoreDoc[] pruneSimilar(WikiBrainScoreDoc[] wikibrainScoreDocs); static double cosineSimilarity(float[] X, float[] Y); }
|
@Test public void testCosineSimilarity() { TIntDoubleHashMap zeroVector = zeroVector(keyList1); TIntDoubleHashMap testVector1 = testVector(keyList2, 0); TIntDoubleHashMap testVector2 = testVector(keyList2, 1); assertEquals("Cosine similarity between a vector and itself must be 1", 1.0, SimUtils.cosineSimilarity(testVector1, testVector1), 0.0); assertEquals("Cosine similarity between a vector and zero vector must be 0", 0.0, SimUtils.cosineSimilarity(testVector1, zeroVector), 0.0); }
|
SimUtils { public static TIntDoubleMap normalizeVector(TIntDoubleMap X) { TIntDoubleHashMap Y = new TIntDoubleHashMap(); double sumSquares = 0.0; for (double x : X.values()) { sumSquares += x * x; } if (sumSquares != 0.0) { double norm = Math.sqrt(sumSquares); for (int id : X.keys()) { Y.put(id, X.get(id) / norm); } return Y; } return X; } static double cosineSimilarity(TIntDoubleMap X, TIntDoubleMap Y); static double cosineSimilarity(TIntFloatMap X, TIntFloatMap Y); static double cosineSimilarity(MatrixRow a, MatrixRow b); static double googleSimilarity(int sizeA, int sizeB, int intersection, int numTotal); static TIntDoubleMap normalizeVector(TIntDoubleMap X); static TIntFloatMap normalizeVector(TIntFloatMap X); static Map sortByValue(TIntDoubleHashMap unsortMap); static WikiBrainScoreDoc[] pruneSimilar(WikiBrainScoreDoc[] wikibrainScoreDocs); static double cosineSimilarity(float[] X, float[] Y); }
|
@Test public void testNormalizeVector() { TIntDoubleHashMap zeroVector1 = zeroVector(keyList1); TIntDoubleHashMap testVector1 = testVector(keyList2, 0); TIntDoubleHashMap testVector2 = testVector(keyList2, 1); TIntDoubleMap zeroVector1Normalized = SimUtils.normalizeVector(zeroVector1); TIntDoubleMap testVector1Normalized = SimUtils.normalizeVector(testVector1); TIntDoubleMap testVector2Normalized = SimUtils.normalizeVector(testVector2); for (int keyNum : zeroVector1.keys()) { assertEquals("Every score in the zero vector remains the same after normalization", zeroVector1.get(keyNum), zeroVector1Normalized.get(keyNum), 0.0); } double testValue = 0.0; for (double value : testVector1Normalized.values()) { testValue += value * value; } assertEquals("Normalized vector has length of 1", 1.0, testValue, 0.00001); }
|
GraphDistanceMetric implements SpatialDistanceMetric { @Override public double distance(Geometry g1, Geometry g2) { if (adjacencyList.isEmpty()) { throw new UnsupportedOperationException(); } List<ClosestPointIndex.Result> closest = index.query(g2, 1); int maxSteps = maxDistance; if (maxSteps == 0 || closest.isEmpty()) { return Double.POSITIVE_INFINITY; } if (g1 == g2 || g1.equals(g2)) { return 0; } int targetId = closest.get(0).id; TIntSet seen = new TIntHashSet(); TIntLinkedList queue = new TIntLinkedList(); for (ClosestPointIndex.Result n : index.query(g1, numNeighbors)) { if (n.id== targetId) { return 1; } queue.add(n.id); seen.add(n.id); } for (int level = 2; level <= maxSteps; level++) { int nodes = queue.size(); for (int i = 0; i < nodes; i++) { int id = queue.removeAt(0); if (!adjacencyList.containsKey(id)) { continue; } for (int id2 : adjacencyList.get(id).toArray()) { if (id2 == targetId) { return level; } if (!seen.contains(id2)) { queue.add(id2); seen.add(id2); } } } } return Double.POSITIVE_INFINITY; } GraphDistanceMetric(SpatialDataDao dao, ClosestPointIndex index); GraphDistanceMetric(SpatialDataDao dao, SphericalDistanceMetric spherical); GraphDistanceMetric(SpatialDataDao dao); void setNumNeighbors(int numNeighbors); void setMaxDistance(int maxDistance); void setValidNodes(TIntSet nodes); @Override void setValidConcepts(TIntSet concepts); @Override void enableCache(boolean enable); @Override String getName(); @Override double distance(Geometry g1, Geometry g2); @Override float[][] distance(List<Geometry> rowGeometries, List<Geometry> colGeometries); @Override float[][] distance(List<Geometry> geometries); @Override List<Neighbor> getNeighbors(Geometry g, int maxNeighbors); @Override List<Neighbor> getNeighbors(Geometry g, int maxNeighbors, double maxDistance); void setDirected(boolean directed); }
|
@Test public void testLattice() throws DaoException { GraphDistanceMetric metric = getLatticeMetric(); assertEquals(1.0, metric.distance(lattice[2][0], lattice[0][0]), 0.01); assertEquals(1.0, metric.distance(lattice[2][0], lattice[1][0]), 0.01); assertEquals(0.0, metric.distance(lattice[2][0], lattice[2][0]), 0.01); assertEquals(1.0, metric.distance(lattice[2][0], lattice[3][0]), 0.01); assertEquals(1.0, metric.distance(lattice[2][0], lattice[4][0]), 0.01); for (int i = 0; i < LATTICE_ROWS; i++) { for (int j = 0; j < 20; j++) { int d = (int) Math.round(metric.distance(lattice[2][0], lattice[i][j])); System.err.print(" " + d); } System.err.println(""); } }
|
SimUtils { public static Map sortByValue(TIntDoubleHashMap unsortMap) { if (unsortMap.isEmpty()) { return new HashMap(); } HashMap<Integer, Double> tempMap = new HashMap<Integer, Double>(); TIntDoubleIterator iterator = unsortMap.iterator(); for ( int i = unsortMap.size(); i-- > 0; ) { iterator.advance(); tempMap.put( iterator.key(), iterator.value() ); } List<Map.Entry> list = new LinkedList<Map.Entry>(tempMap.entrySet()); Collections.sort(list, Collections.reverseOrder(new Comparator() { public int compare(Object o1, Object o2) { return ((Comparable) ((Map.Entry) (o1)).getValue()) .compareTo(((Map.Entry) (o2)).getValue()); } })); Map sortedMap = new LinkedHashMap(); for (Iterator it = list.iterator(); it.hasNext();) { Map.Entry entry = (Map.Entry) it.next(); sortedMap.put(entry.getKey(), entry.getValue()); } return sortedMap; } static double cosineSimilarity(TIntDoubleMap X, TIntDoubleMap Y); static double cosineSimilarity(TIntFloatMap X, TIntFloatMap Y); static double cosineSimilarity(MatrixRow a, MatrixRow b); static double googleSimilarity(int sizeA, int sizeB, int intersection, int numTotal); static TIntDoubleMap normalizeVector(TIntDoubleMap X); static TIntFloatMap normalizeVector(TIntFloatMap X); static Map sortByValue(TIntDoubleHashMap unsortMap); static WikiBrainScoreDoc[] pruneSimilar(WikiBrainScoreDoc[] wikibrainScoreDocs); static double cosineSimilarity(float[] X, float[] Y); }
|
@Test public void testSortByValue() { int testMapSize = 1000; Random random = new Random(System.currentTimeMillis()); TIntDoubleHashMap testMap = new TIntDoubleHashMap(); for(int i = 0 ; i < testMapSize ; ++i) { testMap.put(random.nextInt(), random.nextDouble()); } Map<Integer, Double> sortedMap = SimUtils.sortByValue(testMap); Assert.assertEquals(testMapSize, sortedMap.size() ); Double previous = null; }
|
DatasetDao { public static Collection<Info> readInfos() throws DaoException { try { return readInfos(WpIOUtils.openResource(RESOURCE_DATASET_INFO)); } catch (IOException e) { throw new DaoException(e); } } DatasetDao(); DatasetDao(Collection<Info> info); void setNormalize(boolean normalize); List<Dataset> getAllInLanguage(Language lang); Dataset read(Language language, File path); Dataset get(Language language, String name); boolean isGroup(String name); List<Dataset> getGroup(Language language, String name); List<Dataset> getDatasetOrGroup(Language language, String name); Info getInfo(String name); void setDisambiguator(Disambiguator dab); void setResolvePhrases(boolean resolvePhrases); void setGroups(Map<String, List<String>> groups); void write(Dataset dataset, File path); static Collection<Info> readInfos(); static Collection<Info> readInfos(BufferedReader reader); static final String RESOURCE_DATSET; static final String RESOURCE_DATASET_INFO; }
|
@Test public void testInfos() throws DaoException { Collection<DatasetDao.Info> infos = DatasetDao.readInfos(); assertEquals(18, infos.size()); }
|
DatasetDao { public Dataset get(Language language, String name) throws DaoException { if (groups.containsKey(name)) { List<Dataset> members = new ArrayList<Dataset>(); for (String n : groups.get(name)) { members.add(get(language, n)); } return new Dataset(name, members); } if (name.contains("/") || name.contains("\\")) { throw new DaoException("get() reads a dataset by name for a jar. Try read() instead?"); } Info info = getInfo(name); if (info == null) { throw new DaoException("no dataset with name '" + name + "'"); } if (!info.languages.containsLanguage(language)) { throw new DaoException("dataset '" + name + "' does not support language " + language); } try { return read(name, language, WpIOUtils.openResource(RESOURCE_DATSET + "/" + name)); } catch (IOException e) { throw new DaoException(e); } } DatasetDao(); DatasetDao(Collection<Info> info); void setNormalize(boolean normalize); List<Dataset> getAllInLanguage(Language lang); Dataset read(Language language, File path); Dataset get(Language language, String name); boolean isGroup(String name); List<Dataset> getGroup(Language language, String name); List<Dataset> getDatasetOrGroup(Language language, String name); Info getInfo(String name); void setDisambiguator(Disambiguator dab); void setResolvePhrases(boolean resolvePhrases); void setGroups(Map<String, List<String>> groups); void write(Dataset dataset, File path); static Collection<Info> readInfos(); static Collection<Info> readInfos(BufferedReader reader); static final String RESOURCE_DATSET; static final String RESOURCE_DATASET_INFO; }
|
@Test public void testDaoRead() throws DaoException { DatasetDao dao = new DatasetDao(); Dataset ds = dao.get(Language.getByLangCode("en"), "wordsim353.txt"); assertEquals(353, ds.getData().size()); assertEquals("en", ds.getLanguage().getLangCode()); double sim = Double.NaN; for (KnownSim ks : ds.getData()) { if (ks.phrase1.equals("morality") && ks.phrase2.equals("marriage")) { sim = ks.similarity; } } assertTrue(!Double.isNaN(sim)); assertEquals(sim, 0.354145342886, 0.000001); }
|
MostSimilarGuess { public double getNDGC() { if (observations.isEmpty()) { return 0.0; } TIntDoubleMap actual = new TIntDoubleHashMap(); for (KnownSim ks : known.getMostSimilar()) { actual.put(ks.wpId2, ks.similarity); } int ranks[] = new int[observations.size()]; double scores[] = new double[observations.size()]; double s = 0.0; for (int i = 0; i < observations.size(); i++) { Observation o = observations.get(i); double k = (o.rank == 1) ? 1 : Math.log(o.rank + 1); s += actual.get(o.id) / k; scores[i] = actual.get(o.id); ranks[i] = o.rank; } Arrays.sort(ranks); Arrays.sort(scores); ArrayUtils.reverse(scores); double t = 0; for (int i = 0; i < scores.length; i++) { double k = (ranks[i] == 1) ? 1 : Math.log(ranks[i] + 1); t += scores[i] / k; } return s / t; } MostSimilarGuess(KnownMostSim known, String str); MostSimilarGuess(KnownMostSim known, SRResultList guess); String toString(); List<Observation> getObservations(); int getLength(); KnownMostSim getKnown(); double getNDGC(); double getPenalizedNDGC(); PrecisionRecallAccumulator getPrecisionRecall(int n, double threshold); }
|
@Test public void testNdgc() { double ndgc = ( (0.80 + 0.00 / Math.log(2+1) + 0.95 / Math.log(4+1) + 0.91 / Math.log(7+1)) / (0.95 + 0.91 / Math.log(2+1) + 0.80 / Math.log(4+1) + 0.00 / Math.log(7+1))); assertEquals(ndgc, guess.getNDGC(), 0.001); }
|
MostSimilarGuess { public PrecisionRecallAccumulator getPrecisionRecall(int n, double threshold) { PrecisionRecallAccumulator pr = new PrecisionRecallAccumulator(n, threshold); TIntDoubleMap actual = new TIntDoubleHashMap(); for (KnownSim ks : known.getMostSimilar()) { pr.observe(ks.similarity); actual.put(ks.wpId2, ks.similarity); } for (Observation o : observations) { if (o.rank > n) { break; } pr.observeRetrieved(actual.get(o.id)); } return pr; } MostSimilarGuess(KnownMostSim known, String str); MostSimilarGuess(KnownMostSim known, SRResultList guess); String toString(); List<Observation> getObservations(); int getLength(); KnownMostSim getKnown(); double getNDGC(); double getPenalizedNDGC(); PrecisionRecallAccumulator getPrecisionRecall(int n, double threshold); }
|
@Test public void testPrecisionRecall() { PrecisionRecallAccumulator pr = guess.getPrecisionRecall(1, 0.7); assertEquals(pr.getN(), 1); assertEquals(1.0, pr.getPrecision(), 0.001); assertEquals(0.333333, pr.getRecall(), 0.001); pr = guess.getPrecisionRecall(2, 0.7); assertEquals(0.5, pr.getPrecision(), 0.001); assertEquals(0.333333, pr.getRecall(), 0.001); pr = guess.getPrecisionRecall(5, 0.7); assertEquals(0.6666, pr.getPrecision(), 0.001); assertEquals(0.6666, pr.getRecall(), 0.001); }
|
FileDownloader { public File download(URL url, File file) throws InterruptedException { LOG.info("beginning download of " + url + " to " + file); for (int i=1; i <= maxAttempts; i++) { try { AtomicBoolean stop = new AtomicBoolean(false); DownloadInfo info = new DownloadInfo(url); DownloadMonitor monitor = new DownloadMonitor(info); info.extract(stop, monitor); file.getParentFile().mkdirs(); WGet wget = new WGet(info, file); wget.download(stop, monitor); LOG.info("Download complete: " + file.getAbsolutePath()); while (!monitor.isFinished()) { Thread.sleep(sleepTime); } return file; } catch (DownloadIOCodeError e) { if (i < maxAttempts) { LOG.info("Failed to download " + url + ". Reconnecting in " + (i * backoffTime / 1000) + " seconds (HTTP " + e.getCode() + "-Error " + url + ")"); Thread.sleep(backoffTime * i); } else { LOG.warn("Failed to download " + file + " (HTTP " + e.getCode() + "-Error " + url + ")"); } } } return null; } FileDownloader(); File download(URL url, File file); void setSleepTime(int sleepTime); void setMaxAttempts(int maxAttempts); void setDisplayInfo(int displayInfo); void setBackoffTime(int backoffTime); static final Logger LOG; }
|
@Test public void testDownloader() throws IOException, InterruptedException { URL url = new URL("http: File tmp1 = File.createTempFile("downloader-test", ".txt"); FileDownloader downloader = new FileDownloader(); downloader.download(url, tmp1); assertTrue(tmp1.isFile()); List<String> lines = FileUtils.readLines(tmp1); assert(lines.size() > 10); assertTrue(lines.get(0).startsWith("User-agent:")); File tmp2 = File.createTempFile("downloader-test", ".txt"); FileUtils.copyURLToFile(url, tmp2); assertTrue(tmp2.isFile()); assertTrue(FileUtils.readFileToString(tmp2).startsWith("User-agent:")); assertEquals(FileUtils.readFileToString(tmp1), FileUtils.readFileToString(tmp2)); }
@Test public void testDownloaderMove() throws IOException, InterruptedException { URL url = new URL("http: File tmp1 = File.createTempFile("downloader-test", ".txt"); File tmp3 = File.createTempFile("downloader-test", ".txt"); tmp1.delete(); tmp3.delete(); tmp3.deleteOnExit(); FileDownloader downloader = new FileDownloader(); downloader.download(url, tmp3); assertTrue(tmp3.isFile()); FileUtils.moveFile(tmp3, tmp1); }
|
WpIOUtils { public static Serializable bytesToObject(byte input[]) throws IOException, ClassNotFoundException { return (Serializable) new ObjectInputStream( new ByteArrayInputStream(input)) .readObject(); } static void mkdirsQuietly(File dir); static void writeObjectToFile(File file, Object o); static Object readObjectFromFile(File file); static Serializable bytesToObject(byte input[]); static long getLastModifiedfromDir(File dir); static byte[] objectToBytes(Serializable o); static BufferedReader openBufferedReader(File path); static BufferedReader openResource(String path); static String resourceToString(String path); static Reader openReader(File path); static InputStream openInputStream(File path); static BufferedWriter openWriter(File path); static BufferedWriter openWriterForAppend(File path); static BufferedWriter openBZ2Writer(File path); static File createTempDirectory(String name, boolean deleteOnExit); static File createTempDirectory(String name); static String getRelativePath(File base, File path); static BufferedWriter openWriter(String path); }
|
@Test public void testSerialization() throws IOException, ClassNotFoundException { String hex = "aced0005757200025b460b9c818922e00c4202000078700000055b402a056a3ff3a176400797813ffba962401a234e3ff8e2093fef799d40369d60403ef48e4011aa844001c25240321b0a3fdd8e863ff8e599400d8398400a180240032f43402841f23fe5936d4016e1014010ffae3ff5bd3f401b645c3fe661fb40289c6e400222cd3fdb229f3ff38df440182ae73ff973e83ff7e56c400a930b401b6270400b62a640025eb640030c99402bc7dd3fdfe7324071865a4010f3b93fe8e3d73fe0c1263fe18afe401c3d57400060cb3feb44fe40043148401e28e93ffdbf613feff52c3ff6d1203ff1c4af40238aa040040f774020ba83401bbe9f402365b940025ddf3fe43cf93ff2f7224043759a4005515a400af950400a8d924007ddac4010aa633fe9ce4e40530708401cd9b23ff4e705400c5f79400ac2b43ff02c303fe6ec13400d94c93febd5d2407482714011f04b403c1cce3fe1049b3fff660040138d563ffbd5b5402733ba401bd7a84032a12b3fe0b0563fee4352403900a1400dcda24021c85040017d2940567ea84008c8ed40091f3c40224cf44020d33240026222400b10013ff90993402893803ffe70c7403ae100401e6b75402af9d5401725d93ff7305a4008be08400f9f9f3ff711544073c95e3fdeafd03fe8c6b640375da73ff7b5833fdb78a4402897db400e4ae8403ab57540072abf3fdd57e14017fa19400b0c52402dc64f4029cfab40012c0640093ba53ffe34fe400c63d8400578b73ffa48214009e58f401c88553fde5ed140126d7e400f18ea3fffff59401995574044359a3fec16fe4014c431404c3e5a3fe77c2640009e754000db9a400854ab408a636a3fe3f32a3fea55ff3feaa28c400e4a0140413e203ffd6f233fe6e7103ff7974e400265423ff3e9e6400af414405e2623401406ad401ca15c4028c8a0401df7d74040444f40132f51401d527c401346ee400c23f84003271740181aa73ff28ffd4006007d3fe5b0aa401725c6401ac8a44077ae433fdd5f3740129fad405e641d4007b84a3ffcdbd83ffeb2573ffecf0f4011df0b40046b21405e493340433b5b3fe9a2603fe29f0640320087401284d43fe2ea92404ac60b3ffe645c402908a43fe447e040148597401374d84005b3f3401087f4401377d6405d6e063feedb9b3fdf23143fdb6a934005c344401cfd5b3ff04071402d01733fe1230e3ffb02f9400e6bd53ff1140d3fe320013ff40799400570133ff3a70f3ff4e6ba3ff0faf540761d7e402270284021d0103ffe9835403d90c140079b2e4019b6c1402242ec3fe0b5673ffa51933ffc07bc4002241d3ff1457b3ff7b1f93ff3e0104034197140200f973fe7bc1c400ef6cb4007fce13fe9baff4010e7133ffdb7993fe2e9c83ffa80f74006053c3fefff81400e7a524019f97e4037a2253fefb1a43fdc09263fed01fd3fea81163ff110c83fefc6e9400026573fecb9de4002b6aa4014b4a3403e57233fedd6753ffd69d840552d8a400aeef640258f6440200475403708d1400c0bae401609913fe0f5ea3fea2fca3ff49f2c3ff8181a400cb18b3feaa4444018099d4017d1f7400f5f3a401276de400c69ac400a0d1e402898df402ef0994028b1014019a137405b10ef403b77d53ffe26d4407a25c44014411c401ee565400311de3fe60c3740459b023ff4155b4029e116401b764b3fe806ba401a03294008d86a3ff039ba4008c2f7404196a0401278ab4019b6123fe44abc40193063400e265d3ff8c9e44007855e3ffb103d400a80df3ff2da46401d2d9d401eb29640149ed23ffff437404978953fe23c1a3ffc2d7c4007039a3fe8fe343fde275d4025a94c3ff250a740380bd23fe78d4b3ff947e240141791401eb6644010bff940201adb400b48de403dd4f04044eb81401301bc3ffc4d0b4007628f40094511402a08c53ffaac8140009beb402ab27f3fee3e32401ac1693fe0ffd240054466402bf1ac40099ae33fdaa2e6400b3c5b4024f31c3fe7cbbc3fe7e7a53ffcd8a04007580b401c803b400cb4e54000fc353ff4b0754002def24008aebe3fec815340763b37402949294014960d40084915400de3b8401f22483fe8a6383ff7aa70400a2f2140349de63ffabcde3fde5eea4021ae50400a6699400e81a5402b91ef3ff999473feaeff04009e6894026038d404610a63ff7a35840395771401a4bf53fdbdfec3ff6d3f63ff7fddf4026774f3ffc0f8d402229863fe2eede4010c8b84000bf5e405285a94009334b40122b9640314e773ffede483ffbce5540170f933fefa0c54008315b3fea83ff3fece355400726063fea837740021a563fed73314037ef5e400885b34011b5da3ff367683ff5eece400308903ff3b76e40044b763ff7267e401b2cb04006223b3fea3a6a402aa353401ecbe5404da7ff3ff365c43feb4d5d3ff231cc401b8d923fee4cf33fea6dd24000d32c400b2c044009e47e3ff93be04042e2fc3fe262633fea52a1400b599f401e08be3ff435474016b20840195ac84004bc4e3ff089343fffc7b13ff45df84009ce603ff33d8d3ffdb87340089d024005af3d402df1683ff38f03402185e93fe2d7bf400eb51240022e21400ed7dc4002894d4026ba24400b814f400d5b893fe94d3a401258fc3fed53693fde3baa3ff8bbc4404074b73ff67058400d24d13fe32ef53fe851b24000b7403fec66c040028ff13fdf2b823febd9f53ff825953ff6f0d9401232433fe72ac3403fb5f54056d1cb3fe2e6c13ff442824002af3a4009c3974017c04e3fdbc20d3fdfd0d44032bfcf3fed2b014052b8cb401b78f03fe52a03401f3edf4016a86640007e4d4005a3f83ffb5f194012399040233dd04042cf0c3feec6083fdc8ec440170f094002ea1a3fec31d23fec4b424018b5f14001529e3fe84c4c4002f7384029e86f400a8291401e52194001174b3ff559bf401237c03ffef05f3ffd85ae4060161a4019e443400460f840616ba43fe98a4d3fff88533ff812c23ff4fbd14020cc80403df0ec3fe6a5ca4006800f400107ad402fcbbe400ca1da4016ad504005cb753fee90054009006d3ff93220401e43b0404dba2f3fe7ad2a4007a135400992533fda8eab402259244020984a3fe9b87c3ff808103fe32962402911b5400e88923fdfa7bb401b42264005609b4043dd9740152ed33fe7ffcf40135462400ff284400bb5c3402de0f7400496823fe0da953ff2841b3ff519653ffe342a40316c66400c2ff2400e783e3ffa037b402180493fe801483ffd40df3ff8b5bf4016f99d3fdea7843febb1b84069cd66402e3d763feb15f43fe545d940556af73ff2e04240338d063fe96f8d40029d9d400618503ffb88fb401575633ff97bab4018698d3ff842ea3fefc919400b977b40085b0b4004b82d401d6b38400b84643feeb9043fedfc0f402086af3ff78db840010eef3fe8d434402719874056a6133fff62233ff2adce3ff24f7240098fd1404b367840020e233ff8c575401d24414057855d400b4c7d3fe929253ffcc811400c851f3ffedc5d3fe0d07d401cc4153fe7169c3ff1a12840073fc34005f55e3feeafcb40032bed401d4b2c401fc6c53fee25be40162dcb400c9b5e4013f3444010c5303ff13b8e401f7a2d403354c2401100743ff8946f3fdbcbd24044c4353fe2d7ab3fe567c04004be853fe52cfa402501dd400b23c8402d0f334012f6b940026b723fefb4a0402c11d73ffccfe63ff57b533fffab3d4032de784004a9274003ea1f40044843402b81a84005a6c0402c62173fd8594c3ffe6c0640000a193ffd729b3ffdfceb403aa6d54006e9843fef42eb40146488402339e0401d27f33ff5efd74010547a402a306f403ee39a400c0be240592d59400b2c664035accd4024063b3ff1abe84000ec0f4000afe74000c87d3ff89ef33fe17bea400817954003723e40317b76402054534004abbc4003e3a340179fb8401387904040341a401fbd883fe12f16400d2f71400764e9407d0fae403430263fec712c400683b1400841bb3ff15c363fdeb8ea3fea0d633ff160d13fe5207c402bfda73ff3c2e74002164040020b183fe5f49a3fee32ac3fe753b24012eedd400403243ff7cfd2401bfba140171140400214c7402799f13fe063ae4016c49240022b723feac651400ef3e7402324a64025460d4009ccfc3fe2eb103ff702853fe40bcf400aef9a403d29904008b35c4007510e3fef0c8e3fff35363fe0416a4012c5873ff74a9e3fe6646a40111491400a704d4035d63a3fe026574007174640077ef93ff50c414006891c402174b940224930402e9c913fe85c23400858774033b79c400fc1ec3ff3d87d4043c7b7401dde7c400daf014009cc19401db18e401797d24005d3213fe871ec40287c6a400fcbca401265d0400851514005abb7400899e04062b47c3ff0940c401378323ff5c0b440413c5c40070d90402a1e624000a09140013474400e60e83fff33ac40051e1e3ff71ee04041d855406233ff401bcd273ffc9c4e401b05f34005d7844025f7184006bb56400b84d84005a36b3fe724713fe7e971401ab66a3fdf924d4012b45d3ff0b2633fe2c229400eb53c4016fab73ff50ec84032834e3fd8e6aa4002d8174006c2b840106804402589cb40638cf1401a22aa400ec74a3fe44e983fdea4874015526340219e9540147cfa4017529f400813cf403c7b0d401a8016406535c9401b28f140033a1a400412f2401719f04083c59b3ffa3e194036f7a83ff8198b401801f93ff5fa90400b6c963fedb76840726ee34022f1ad400da65b3fe2dd123ff8fdd4401766453fe122be40287dde3ff6f9423ff042803ff831a8403569133fe0c88a3fdcc72440073e3840020c804045cbd54002db29401e9c5f3fdde764404d756b3ff13b7a40080e98400d3613401ab8cc4011fdac40354a473fe5dcc73ff9a0494011ba7e3fecfd48400451d63feeafd24016b7bb40173d403fef1cd03ffaa2a63fe80a4940208aa43fef495b400e7764400023a3401c337f4019bcb3404a9f143ffae4b93fef4bbe4013129d400bc573401400c93ff1be47400a46704000474f404d859e40099f84400215f9401a14623ff464e24002822140020f2f4033fb074015c4643fe2c0844007865d3fe87a3a401443113fee0214403281d43febdf22400686593ffad58e3fe3ea7e3ffc7eac4022d2093ffe5e4b401c285040309a7640042e914016435a401500a0400787aa400eaaeb402675f03fec1468402735484074e013408516774018123d400ff4123ff5de923ffbb9774011d99b3ffbfb483fea40794048e54d40024e523fd995c9407787454004ee83402c1f083ff10320401d10ce3ffcf3333fe97e6a3fea52033fed704e3ff1fb943fffcf303ff2bbc24031c3b33ff961b740475331403519a03fe8bd7b40124e2b40169b334011d3a2406ff1104018f36d400296d23fd6dfdf3fed95ca3fef6bfe40379e6d3fe8cac93fe152604002a3e8401fc5363ff634e74000061e4003c1f5402f52134025cbb34011be47400d38f24032c5a24003a3e13fdfa4e63fe24050403561e03ffb5b40401bb7dc4001e28c3ff6c84c40206aa33fea4f9f3fe94d9040156458406ca2303fdfae54400215cc4006480f4004668a3fec13cb4003fb133ffeccd54003a1d1402017d7403e2082408cc321400e264a3fd959173fe5524d402dd809401376ce3fd97e0d400d942d401912593fdfba614038bf203ffe0024403cbc1c4005d8e63fe747003fe4d2c13ffeefe940266cdf3ff19bb93fefb62240064e5a3ff2cf28403182563fee7b8d4012700440052e5d404053d540105c1d401700d94027a89e400e8614400af2334007890f4014cb223ff8f26a405dbaa74001c71a401e78473fe97ff24002d2843ff7ee944008f54a4001ebe54000e91f40025147400cf32c400171a94000c2623fd7f89740253f92401ea35a40108d804001a7294007d51e3fe449ad4001f99e3fe32cc7403044ac3ffc5c73400d4232400a2e9f401efd7e403aa838402c8614400d744b4079c0094034df2d3fe4890c3fe24e58404b5a093fe632c43fe99e003fee49853fd96b463ff445ae3ffbaae8400a69a63ff84156401f385a400a4a0d4017e8c5402708ef4009225e400126ae3fe8bd7a3fed78c43fe935a53fefc2513fd954ca40095a3c3fff408f3fe9c8304004b9503feebe813fefe6304021b16c3fff0030401e2908400850cf4011e0684001be0b40111ca43fddd9cd3fe99a8b3fe468d73fe7a1ba3fe866483ff1d3cf4029d442401bb1963fdcb78c3ff1c1213fe824ec4011be5740013e0240034e1c401318674014cbbd401be9db3ff58de1402622734028668f4018b78f4009d77f401c7d3f406fe5cb4079cd243ff30417400b34e83fe50e743fe5407c4022509f40175e19402430834024a4073fe041893ffcec273ff8c6ae402e7ce03fe313c9401923633fee42753fffc6334055997e4021d36b3fedc27d405069934003c4cc401cf0a33fe220034001f5183fe37d8b40064c4540204d5f40129c534013ece3400662bd40047cbf3ff6d9a44011f2a0400091024005c4dc3ff15cb13fde86ef3ff440dc3ffe40953fed28ee3feeb0373fe0ae7b4002fd5f3ffedd8c402f37b93fea94b04020b8c23ff42f60405f05d74035a400406780ca402213a63fe598383ff5150c405d29ca3ff357693fe712d740202fcb401e65243fe4546c4013e5753fe266374012716f3fec440c400fb1504016eb023fec02ea3ff218ab3ff700fb3feae7854003c2734016b2683fef8f5d402022513ffc23d73ff37fe93ffe0ba73fe506623fdb303d4031a39d3ffe23fc400537893ffc888d403a67d03ff7c6514045884f401a3922401c5a443fe3ef20400418cb3ff06f7a3ffb5733401a3a01400a0a133fe129383fd5c683400e17e64012e94d4029f3913ff507ef3ffcc9a13fe1aa11400156be402015cf401293d2403388c84037a3143ffc3ce0401917633feccb17400a26d33ff8bf003fe274914007fdae4007dece4009ee544020e584402c77ff3fe67772400759bd400c43903ff2f8113ffa38c73ff05ddd3ff4d6ba40186783400635854000028e3feec1e2405462cb3fea7602400ba81c3ffeef87401c7b4b400bd2ca401139443ff4ef3e400c79af401919743fe8184b40027447409314f3403d5aec403032d43ff254024032d8eb4006380040282e984004c8364005d319403be909402bacc33fde28c43fe2ffa6400113eb3fea4f48400d66eb3fe48f8b3ff3b4c64013b3ea40065e95400240233fd2f22a40a4aa9e4011978740149efd3ff5e4573ff5f9f84000cd9f40088978400dd5e5401ebbc53ff4c3c540184ede3ff3c88240155d4b3fe11a53400908c2401a5fbb3ff3cd933fe082053fe439e03ffd5c353fd84f0440307ea83ff5ee0b40108cf0400139a7401428d040135e053ff6f0274009099140035b9d4013ac253ffe022840392771400fd5f840264d5840125757401e5ab53ffc7d99402bdda7400f07ba3fca60f4402c65ac3feb32da400018944014068c3fe5dfb5400ed275400dcd7b401ae088400721db40011fc94002f5da400217e34004230b400ff0b04010e37f3ffd41be40086e5d400dd4d23fea925640340aac40225779401aeadb40005c28400025f9402be9a63fe39c3f400184ce3ff30fbf400114e2400d019f40129f563ff7e7ac3fe3df8e401168253ffb33bb"; byte[] bytes = DatatypeConverter.parseHexBinary(hex); float f[] = (float[]) WpIOUtils.bytesToObject(bytes); System.out.println("f.length is " + f.length); for (int i = 0; i < 100; i++) { System.out.println("byte " + i + " is " + bytes[i]); } }
|
JvmUtils { public synchronized static String getFullClassName(String shortName) { if (NAME_TO_CLASS != null) { return NAME_TO_CLASS.get(shortName); } NAME_TO_CLASS = new HashMap<String, String>(); for (File file : getClassPathAsList()) { if (file.length() > MAX_FILE_SIZE) { LOG.debug("skipping looking for providers in large file " + file); continue; } ClassFinder finder = new ClassFinder(); finder.add(file); ClassFilter filter = new AndClassFilter( new RegexClassFilter(WIKIBRAIN_CLASS_PATTERN.pattern()), new NotClassFilter(new RegexClassFilter(WIKIBRAIN_CLASS_BLACKLIST.pattern())) ); Collection<ClassInfo> foundClasses = new ArrayList<ClassInfo>(); finder.findClasses(foundClasses,filter); for (ClassInfo info : foundClasses) { String tokens[] = info.getClassName().split("[.]"); if (tokens.length == 0) { continue; } String n = tokens[tokens.length - 1]; if (!NAME_TO_CLASS.containsKey(n)) { NAME_TO_CLASS.put(n, info.getClassName()); } } } LOG.info("found " + NAME_TO_CLASS.size() + " classes when constructing short to full class name mapping"); return NAME_TO_CLASS.get(shortName); } static String getClassPath(); synchronized static List<File> getClassPathAsList(); static Process launch(Class klass, String args[]); static Process launch(Class klass, String args[], OutputStream out, OutputStream err, String heapSize); static void setWikiBrainClassPattern(Pattern pattern, Pattern blacklist); static Class classForShortName(String shortName); synchronized static String getFullClassName(String shortName); static final int MAX_FILE_SIZE; }
|
@Test public void testFullClassName() { assertEquals("org.wikibrain.utils.JvmUtils", JvmUtils.getFullClassName("JvmUtils")); assertNull(JvmUtils.getFullClassName("Foozkjasdf")); }
|
JvmUtils { public static Class classForShortName(String shortName) { String fullName = getFullClassName(shortName); if (fullName == null) { return null; } try { return Class.forName(fullName); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } static String getClassPath(); synchronized static List<File> getClassPathAsList(); static Process launch(Class klass, String args[]); static Process launch(Class klass, String args[], OutputStream out, OutputStream err, String heapSize); static void setWikiBrainClassPattern(Pattern pattern, Pattern blacklist); static Class classForShortName(String shortName); synchronized static String getFullClassName(String shortName); static final int MAX_FILE_SIZE; }
|
@Test public void testClassForShortName() { assertEquals(JvmUtils.class, JvmUtils.classForShortName("JvmUtils")); assertNull(JvmUtils.classForShortName("Foozkjasdf")); }
|
MetricsService { public Map<String, String> getMetrics(ProcessGroupStatus status, boolean appendPgId) { final Map<String, String> metrics = new HashMap<>(); metrics.put(appendPgId(MetricNames.FLOW_FILES_RECEIVED, status, appendPgId), String.valueOf(status.getFlowFilesReceived())); metrics.put(appendPgId(MetricNames.BYTES_RECEIVED, status, appendPgId), String.valueOf(status.getBytesReceived())); metrics.put(appendPgId(MetricNames.FLOW_FILES_SENT, status, appendPgId), String.valueOf(status.getFlowFilesSent())); metrics.put(appendPgId(MetricNames.BYTES_SENT, status, appendPgId), String.valueOf(status.getBytesSent())); metrics.put(appendPgId(MetricNames.FLOW_FILES_QUEUED, status, appendPgId), String.valueOf(status.getQueuedCount())); metrics.put(appendPgId(MetricNames.BYTES_QUEUED, status, appendPgId), String.valueOf(status.getQueuedContentSize())); metrics.put(appendPgId(MetricNames.BYTES_READ, status, appendPgId), String.valueOf(status.getBytesRead())); metrics.put(appendPgId(MetricNames.BYTES_WRITTEN, status, appendPgId), String.valueOf(status.getBytesWritten())); metrics.put(appendPgId(MetricNames.ACTIVE_THREADS, status, appendPgId), String.valueOf(status.getActiveThreadCount())); final long durationNanos = calculateProcessingNanos(status); metrics.put(appendPgId(MetricNames.TOTAL_TASK_DURATION_NANOS, status, appendPgId), String.valueOf(durationNanos)); final long durationSeconds = TimeUnit.SECONDS.convert(durationNanos, TimeUnit.NANOSECONDS); metrics.put(appendPgId(MetricNames.TOTAL_TASK_DURATION_SECONDS, status, appendPgId), String.valueOf(durationSeconds)); return metrics; } Map<String, String> getMetrics(ProcessGroupStatus status, boolean appendPgId); Map<String, String> getMetrics(VirtualMachineMetrics virtualMachineMetrics); }
|
@Test public void testGetProcessGroupStatusMetrics() { ProcessGroupStatus status = new ProcessGroupStatus(); status.setId("1234"); status.setFlowFilesReceived(5); status.setBytesReceived(10000); status.setFlowFilesSent(10); status.setBytesSent(20000); status.setQueuedCount(100); status.setQueuedContentSize(1024L); status.setBytesRead(60000L); status.setBytesWritten(80000L); status.setActiveThreadCount(5); ProcessorStatus procStatus = new ProcessorStatus(); procStatus.setProcessingNanos(123456789); Collection<ProcessorStatus> processorStatuses = new ArrayList<>(); processorStatuses.add(procStatus); status.setProcessorStatus(processorStatuses); ProcessGroupStatus groupStatus = new ProcessGroupStatus(); groupStatus.setProcessorStatus(processorStatuses); Collection<ProcessGroupStatus> groupStatuses = new ArrayList<>(); groupStatuses.add(groupStatus); status.setProcessGroupStatus(groupStatuses); final MetricsService service = new MetricsService(); final Map<String, String> metrics = service.getMetrics(status, false); Assert.assertTrue(metrics.containsKey(MetricNames.FLOW_FILES_RECEIVED)); Assert.assertTrue(metrics.containsKey(MetricNames.BYTES_RECEIVED)); Assert.assertTrue(metrics.containsKey(MetricNames.FLOW_FILES_SENT)); Assert.assertTrue(metrics.containsKey(MetricNames.BYTES_SENT)); Assert.assertTrue(metrics.containsKey(MetricNames.FLOW_FILES_QUEUED)); Assert.assertTrue(metrics.containsKey(MetricNames.BYTES_QUEUED)); Assert.assertTrue(metrics.containsKey(MetricNames.BYTES_READ)); Assert.assertTrue(metrics.containsKey(MetricNames.BYTES_WRITTEN)); Assert.assertTrue(metrics.containsKey(MetricNames.ACTIVE_THREADS)); Assert.assertTrue(metrics.containsKey(MetricNames.TOTAL_TASK_DURATION_SECONDS)); Assert.assertTrue(metrics.containsKey(MetricNames.TOTAL_TASK_DURATION_NANOS)); }
@Test public void testGetProcessGroupStatusMetricsWithID() { ProcessGroupStatus status = new ProcessGroupStatus(); String id = "1234"; status.setId(id); status.setFlowFilesReceived(5); status.setBytesReceived(10000); status.setFlowFilesSent(10); status.setBytesSent(20000); status.setQueuedCount(100); status.setQueuedContentSize(1024L); status.setBytesRead(60000L); status.setBytesWritten(80000L); status.setActiveThreadCount(5); ProcessorStatus procStatus = new ProcessorStatus(); procStatus.setProcessingNanos(123456789); Collection<ProcessorStatus> processorStatuses = new ArrayList<>(); processorStatuses.add(procStatus); status.setProcessorStatus(processorStatuses); ProcessGroupStatus groupStatus = new ProcessGroupStatus(); groupStatus.setProcessorStatus(processorStatuses); Collection<ProcessGroupStatus> groupStatuses = new ArrayList<>(); groupStatuses.add(groupStatus); status.setProcessGroupStatus(groupStatuses); final MetricsService service = new MetricsService(); final Map<String, String> metrics = service.getMetrics(status, true); Assert.assertTrue(metrics.containsKey(MetricNames.FLOW_FILES_RECEIVED + MetricNames.METRIC_NAME_SEPARATOR + id)); }
@Test public void testGetVirtualMachineMetrics() { final VirtualMachineMetrics virtualMachineMetrics = VirtualMachineMetrics.getInstance(); final MetricsService service = new MetricsService(); final Map<String, String> metrics = service.getMetrics(virtualMachineMetrics); Assert.assertTrue(metrics.containsKey(MetricNames.JVM_UPTIME)); Assert.assertTrue(metrics.containsKey(MetricNames.JVM_HEAP_USED)); Assert.assertTrue(metrics.containsKey(MetricNames.JVM_HEAP_USAGE)); Assert.assertTrue(metrics.containsKey(MetricNames.JVM_NON_HEAP_USAGE)); Assert.assertTrue(metrics.containsKey(MetricNames.JVM_THREAD_STATES_RUNNABLE)); Assert.assertTrue(metrics.containsKey(MetricNames.JVM_THREAD_STATES_BLOCKED)); Assert.assertTrue(metrics.containsKey(MetricNames.JVM_THREAD_STATES_TIMED_WAITING)); Assert.assertTrue(metrics.containsKey(MetricNames.JVM_THREAD_STATES_TERMINATED)); Assert.assertTrue(metrics.containsKey(MetricNames.JVM_THREAD_COUNT)); Assert.assertTrue(metrics.containsKey(MetricNames.JVM_DAEMON_THREAD_COUNT)); Assert.assertTrue(metrics.containsKey(MetricNames.JVM_FILE_DESCRIPTOR_USAGE)); }
|
PrometheusReportingTask extends AbstractReportingTask { @Override public void onTrigger(final ReportingContext context) { final String metricsCollectorUrl = context.getProperty(METRICS_COLLECTOR_URL) .evaluateAttributeExpressions().getValue() .replace("http: final String applicationId = context.getProperty(APPLICATION_ID).evaluateAttributeExpressions().getValue(); final String jobName = context.getProperty(JOB_NAME).getValue(); final String instance = context.getProperty(INSTANCE_ID).evaluateAttributeExpressions().getValue(); final Map<String,String> groupingKey = Collections.singletonMap("instance", instance); final PushGateway pushGateway = new PushGateway(metricsCollectorUrl); if(context.getProperty(USE_AUTHENTICATION).asBoolean()){ final String username = context.getProperty(AUTH_USERNAME).getValue(); final String password = context.getProperty(AUTH_PASSWORD).getValue(); pushGateway.setConnectionFactory(new BasicAuthHttpConnectionFactory(username, password)); } try { if (context.getProperty(SEND_JVM_METRICS).asBoolean()) { pushGateway.pushAdd(PrometheusMetricsFactory.createJvmMetrics(VirtualMachineMetrics.getInstance()), JVM_JOB_NAME, groupingKey); } } catch (IOException e) { getLogger().error("Failed pushing JVM-metrics to Prometheus PushGateway due to {}; routing to failure", e); } for (ProcessGroupStatus status : searchProcessGroups(context, context.getProperty(PROCESS_GROUP_IDS))) { try { pushGateway.pushAdd(PrometheusMetricsFactory.createNifiMetrics(status, applicationId), jobName, groupingKey); } catch (IOException e) { getLogger().error("Failed pushing Nifi-metrics to Prometheus PushGateway due to {}; routing to failure", e); } } } @Override void onTrigger(final ReportingContext context); }
|
@Test public void testOnTrigger() throws InitializationException { final String metricsUrl = "http: final String applicationId = "nifi"; final String hostName = "localhost"; final String jobName = "nifi_reporting_job"; final boolean jvmMetrics = true; final boolean authentication = false; final Client client = Mockito.mock(Client.class); final WebTarget target = Mockito.mock(WebTarget.class); final Invocation.Builder builder = Mockito.mock(Invocation.Builder.class); final Response response = Mockito.mock(Response.class); Mockito.when(response.getStatus()).thenReturn(200); Mockito.when(client.target(metricsUrl)).thenReturn(target); Mockito.when(target.request()).thenReturn(builder); Mockito.when(builder.post(Matchers.any(Entity.class))).thenReturn(response); final ComponentLog logger = Mockito.mock(ComponentLog.class); final ReportingInitializationContext initContext = Mockito.mock(ReportingInitializationContext.class); Mockito.when(initContext.getIdentifier()).thenReturn(UUID.randomUUID().toString()); Mockito.when(initContext.getLogger()).thenReturn(logger); final ReportingContext context = Mockito.mock(ReportingContext.class); Mockito.when(context.getProperty(PrometheusReportingTask.METRICS_COLLECTOR_URL)) .thenReturn(new MockPropertyValue(metricsUrl)); Mockito.when(context.getProperty(PrometheusReportingTask.APPLICATION_ID)) .thenReturn(new MockPropertyValue(applicationId)); Mockito.when(context.getProperty(PrometheusReportingTask.INSTANCE_ID)) .thenReturn(new MockPropertyValue(hostName)); Mockito.when(context.getProperty(PrometheusReportingTask.PROCESS_GROUP_IDS)) .thenReturn(new MockPropertyValue("1234")); Mockito.when(context.getProperty(PrometheusReportingTask.JOB_NAME)) .thenReturn(new MockPropertyValue(jobName)); Mockito.when(context.getProperty(PrometheusReportingTask.SEND_JVM_METRICS)) .thenReturn(new MockPropertyValue(Boolean.toString(jvmMetrics))); Mockito.when(context.getProperty(PrometheusReportingTask.USE_AUTHENTICATION)) .thenReturn(new MockPropertyValue(Boolean.toString(authentication))); final EventAccess eventAccess = Mockito.mock(EventAccess.class); Mockito.when(context.getEventAccess()).thenReturn(eventAccess); Mockito.when(eventAccess.getControllerStatus()).thenReturn(status); Mockito.when(eventAccess.getGroupStatus("1234")).thenReturn(status); final PrometheusReportingTask task = new PrometheusReportingTask(); task.initialize(initContext); task.onTrigger(context); }
|
KafkaSinkFactory implements SinkFactory { @Override public String name() { return "kafka"; } @Override String name(); @Override Single<Sink<T>> create(Vertx vertx, String name, Config config); }
|
@Test public void testName() { assertThat(factory.name()).isEqualTo("kafka"); }
|
ReflectionHelper { public static void invokeTransformationMethod(Object mediator, Method method) { method = ReflectionHelper.makeAccessibleIfNot(method); List<Object> values = getParameterFromTransformationMethod(method); try { Class<?> returnType = method.getReturnType(); Outbound outbound = method.getAnnotation(Outbound.class); if (returnType.equals(Void.TYPE)) { method.invoke(mediator, values.toArray()); } else { if (outbound == null) { throw new IllegalStateException("The method " + method.getName() + " from " + mediator.getClass() + " needs to be annotated with @Outbound indicating the sink"); } else { Sink<Object> sink = getSinkOrFail(outbound.value()); Flowable<Object> flowable; if (Publisher.class.isAssignableFrom(returnType)) { flowable = Flowable.fromPublisher( (Publisher) method.invoke(mediator, values.toArray())); } else { throw new IllegalStateException("The method " + method.getName() + " from " + mediator.getClass() + " does not return a valid type"); } Type type = method.getGenericReturnType(); if (type instanceof ParameterizedType) { Type enclosed = ((ParameterizedType) type).getActualTypeArguments()[0]; if (!enclosed.getTypeName().startsWith(Message.class.getName())) { flowable.flatMapCompletable(sink::dispatch) .doOnError(Throwable::printStackTrace) .subscribe(); } else { flowable .flatMapCompletable(d -> sink.dispatch((Message) d)) .doOnError(Throwable::printStackTrace) .subscribe(); } } else { flowable.flatMapCompletable(sink::dispatch) .doOnError(Throwable::printStackTrace) .subscribe(); } } } } catch (Exception e) { throw new IllegalStateException("Unable to invoke " + method.getName() + " from " + mediator.getClass() .getName(), e); } } private ReflectionHelper(); static void set(Object mediator, Field field, Object source); static void invokeFunction(Object mediator, Method method); static void invokeTransformationMethod(Object mediator, Method method); static Object getSourceToInject(Class<?> clazz, Type type, Source<Object> source); static void inject(Object mediator); }
|
@Test(expected = IllegalArgumentException.class) public void testTransformationWithNotAnnotatedParameter() throws NoSuchMethodException { InvalidBecauseOfBadParams test = new InvalidBecauseOfBadParams(); Method method = test.getClass().getMethod("trans", Source.class); ReflectionHelper.invokeTransformationMethod(test, method); }
|
ReflectionHelper { static Sink<Object> getSinkOrFail(String name) { Sink<Object> sink = FluidRegistry.sink(Objects.requireNonNull(name)); if (sink == null) { throw new IllegalArgumentException("Unable to find the sink " + name); } return sink; } private ReflectionHelper(); static void set(Object mediator, Field field, Object source); static void invokeFunction(Object mediator, Method method); static void invokeTransformationMethod(Object mediator, Method method); static Object getSourceToInject(Class<?> clazz, Type type, Source<Object> source); static void inject(Object mediator); }
|
@Test public void testGettingAMissingSink() { FluidRegistry.register("my-sink", Sink.list()); Sink<Object> sink = ReflectionHelper.getSinkOrFail("my-sink"); assertThat(sink).isNotNull(); try { ReflectionHelper.getSinkOrFail("missing"); fail("The sink should be missing"); } catch (IllegalArgumentException e) { } }
|
ReflectionHelper { static Source<Object> getSourceOrFail(String name) { Source<Object> src = FluidRegistry.source(Objects.requireNonNull(name)); if (src == null) { throw new IllegalArgumentException("Unable to find the source " + name); } return src; } private ReflectionHelper(); static void set(Object mediator, Field field, Object source); static void invokeFunction(Object mediator, Method method); static void invokeTransformationMethod(Object mediator, Method method); static Object getSourceToInject(Class<?> clazz, Type type, Source<Object> source); static void inject(Object mediator); }
|
@Test public void testGettingAMissingSource() { FluidRegistry.register("my-source", Source.empty()); Source<Object> source = ReflectionHelper.getSourceOrFail("my-source"); assertThat(source).isNotNull(); try { ReflectionHelper.getSourceOrFail("missing"); fail("The source should be missing"); } catch (IllegalArgumentException e) { } }
|
FluidRegistry { public static synchronized <T> void register(Source<T> source) { sources.put(Objects.requireNonNull(source.name(), NAME_NOT_PROVIDED_MESSAGE), source); } private FluidRegistry(); static synchronized void initialize(Vertx vertx, FluidConfig config); static void reset(); static synchronized void register(Source<T> source); static synchronized void register(Sink<T> sink); static synchronized void register(String name, Source<T> source); static synchronized void register(String name, Sink<T> sink); static synchronized void unregisterSource(String name); static synchronized void unregisterSink(String name); @SuppressWarnings("unchecked") static Source<T> source(String name); @SuppressWarnings("unchecked") static Sink<T> sink(String name); @SuppressWarnings({"unused", "unchecked"}) static Source<T> source(String name, Class<T> clazz); }
|
@Test(expected = NullPointerException.class) public void testRegistrationOfSinkWithNullName() { Sink<String> discard = Sink.discard(); FluidRegistry.register(null, discard); }
@Test(expected = NullPointerException.class) public void testRegistrationOfSourceWithNullName() { Source<String> source = Source.empty(); FluidRegistry.register(null, source); }
|
FluidRegistry { public static synchronized void initialize(Vertx vertx, FluidConfig config) { sinks.putAll(SourceAndSinkBuilder.createSinksFromConfiguration(vertx, config)); sources.putAll(SourceAndSinkBuilder.createSourcesFromConfiguration(vertx, config)); } private FluidRegistry(); static synchronized void initialize(Vertx vertx, FluidConfig config); static void reset(); static synchronized void register(Source<T> source); static synchronized void register(Sink<T> sink); static synchronized void register(String name, Source<T> source); static synchronized void register(String name, Sink<T> sink); static synchronized void unregisterSource(String name); static synchronized void unregisterSink(String name); @SuppressWarnings("unchecked") static Source<T> source(String name); @SuppressWarnings("unchecked") static Sink<T> sink(String name); @SuppressWarnings({"unused", "unchecked"}) static Source<T> source(String name, Class<T> clazz); }
|
@Test public void testInitialize() { Fluid fluid = Fluid.create(); assertThat(FluidRegistry.source("unknown")).isNull(); assertThat(FluidRegistry.sink("unknown")).isNull(); fluid.vertx().close(); }
|
SourceAndSinkBuilder { public static Map<String, Source> createSourcesFromConfiguration(Vertx vertx, FluidConfig config) { Map<String, Source> map = new HashMap<>(); Optional<Config> sources = config.getConfig("sources"); if (sources.isPresent()) { Iterator<String> names = sources.get().names(); while (names.hasNext()) { String name = names.next(); LOGGER.info("Creating source from configuration `" + name + "`"); Optional<Config> conf = sources.get().getConfig(name); Source<?> source = buildSource(vertx, name, conf.orElseThrow(() -> new IllegalStateException("Illegal configuration for source `" + name + "`"))); map.put(name, source); } } else { LOGGER.warn("No sources configured from the fluid configuration"); } return map; } static Map<String, Source> createSourcesFromConfiguration(Vertx vertx, FluidConfig config); static Map<String, Sink> createSinksFromConfiguration(Vertx vertx, FluidConfig config); }
|
@SuppressWarnings("unchecked") @Test public void loadSourceTest() { Map<String, Source> sources = SourceAndSinkBuilder.createSourcesFromConfiguration(vertx, fluid.getConfig()); assertThat(sources).hasSize(2); Source<String> source1 = sources.get("source1"); assertThat(source1).isNotNull(); Source<Integer> source2 = sources.get("source2"); assertThat(source2).isNotNull(); ListSink<String> list = new ListSink<>(); source1.to(list); assertThat(list.values()).containsExactly("a", "b", "c"); assertThat(source1.name()).isEqualTo("source1"); ListSink<Integer> list2 = new ListSink<>(); source2.to(list2); assertThat(list2.values()).containsExactly(0, 1, 2, 3); assertThat(source2.name()).isEqualTo("source2"); }
|
SourceAndSinkBuilder { public static Map<String, Sink> createSinksFromConfiguration(Vertx vertx, FluidConfig config) { Map<String, Sink> map = new HashMap<>(); Optional<Config> sinks = config.getConfig("sinks"); if (sinks.isPresent()) { Iterator<String> names = sinks.get().names(); while (names.hasNext()) { String name = names.next(); LOGGER.info("Creating sink from configuration `" + name + "`"); Optional<Config> conf = sinks.get().getConfig(name); Sink<?> sink = buildSink(vertx, name, conf.orElseThrow(() -> new IllegalStateException("Illegal configuration for source `" + name + "`"))); map.put(name, sink); } } else { LOGGER.warn("No sinks configured from the fluid configuration"); } return map; } static Map<String, Source> createSourcesFromConfiguration(Vertx vertx, FluidConfig config); static Map<String, Sink> createSinksFromConfiguration(Vertx vertx, FluidConfig config); }
|
@SuppressWarnings("unchecked") @Test public void loadSinkTest() { Map<String, Sink> sinks = SourceAndSinkBuilder.createSinksFromConfiguration(vertx, fluid.getConfig()); assertThat(sinks).hasSize(2); Sink<String> sink1 = sinks.get("sink1"); assertThat(sink1).isNotNull().isInstanceOf(ListSink.class); Sink<Integer> sink2 = sinks.get("sink2"); assertThat(sink2).isNotNull().isInstanceOf(ListSink.class); Source.from("1", "2", "3").to(sink1); Source.from(4, 5).to(sink2); assertThat(((ListSink) sink1).values()).containsExactly("1", "2", "3"); assertThat(((ListSink) sink2).values()).containsExactly(4, 5); }
|
CommonHeaders { public static String address(Message message) { return (String) message.get(ADDRESS); } private CommonHeaders(); @SuppressWarnings("unchecked") static T original(Message message); @SuppressWarnings("unchecked") static Optional<T> originalOpt(Message message); static String address(Message message); @SuppressWarnings("unchecked") static Optional<String> addressOpt(Message message); static String key(Message message); @SuppressWarnings("unchecked") static Optional<String> keyOpt(Message message); static ResponseCallback responseCallback(Message message); static Optional<ResponseCallback> responseCallbackOpt(Message message); static final String ORIGINAL; static final String ADDRESS; static final String KEY; static final String RESPONSE_CALLBACK; static final String GROUP_KEY; }
|
@Test public void shouldGetRequiredAddress() { assertThat(address(messageWithCommonHeaders)).isEqualTo("address"); }
|
CommonHeaders { @SuppressWarnings("unchecked") public static Optional<String> addressOpt(Message message) { return message.getOpt(ADDRESS); } private CommonHeaders(); @SuppressWarnings("unchecked") static T original(Message message); @SuppressWarnings("unchecked") static Optional<T> originalOpt(Message message); static String address(Message message); @SuppressWarnings("unchecked") static Optional<String> addressOpt(Message message); static String key(Message message); @SuppressWarnings("unchecked") static Optional<String> keyOpt(Message message); static ResponseCallback responseCallback(Message message); static Optional<ResponseCallback> responseCallbackOpt(Message message); static final String ORIGINAL; static final String ADDRESS; static final String KEY; static final String RESPONSE_CALLBACK; static final String GROUP_KEY; }
|
@Test public void shouldHandleEmptyAddress() { assertThat(addressOpt(messageWithoutCommonHeaders)).isEmpty(); }
|
KafkaSinkFactory implements SinkFactory { @Override public <T> Single<Sink<T>> create(Vertx vertx, String name, Config config) { return Single.just(new KafkaSink<>(vertx, name, config)); } @Override String name(); @Override Single<Sink<T>> create(Vertx vertx, String name, Config config); }
|
@Test(expected = ConfigException.class) public void testCreationWithoutParameter() { factory.create(vertx, null, new Config(NullNode.getInstance())); }
@Test public void testCreationWithMinimalConfiguration() throws IOException { Single<Sink<Object>> single = factory.create(vertx, null, new Config(new JsonObject() .put("bootstrap.servers", "localhost:9092") .put("key.serializer", JsonObjectSerializer.class.getName()) .put("value.serializer", JsonObjectSerializer.class.getName()))); Sink<Object> sink = single.blockingGet(); assertThat(sink).isInstanceOf(KafkaSink.class); }
|
CommonHeaders { public static String key(Message message) { return (String) message.get(KEY); } private CommonHeaders(); @SuppressWarnings("unchecked") static T original(Message message); @SuppressWarnings("unchecked") static Optional<T> originalOpt(Message message); static String address(Message message); @SuppressWarnings("unchecked") static Optional<String> addressOpt(Message message); static String key(Message message); @SuppressWarnings("unchecked") static Optional<String> keyOpt(Message message); static ResponseCallback responseCallback(Message message); static Optional<ResponseCallback> responseCallbackOpt(Message message); static final String ORIGINAL; static final String ADDRESS; static final String KEY; static final String RESPONSE_CALLBACK; static final String GROUP_KEY; }
|
@Test public void shouldGetRequiredKey() { assertThat(key(messageWithCommonHeaders)).isEqualTo("key"); }
|
CommonHeaders { @SuppressWarnings("unchecked") public static Optional<String> keyOpt(Message message) { return message.getOpt(KEY); } private CommonHeaders(); @SuppressWarnings("unchecked") static T original(Message message); @SuppressWarnings("unchecked") static Optional<T> originalOpt(Message message); static String address(Message message); @SuppressWarnings("unchecked") static Optional<String> addressOpt(Message message); static String key(Message message); @SuppressWarnings("unchecked") static Optional<String> keyOpt(Message message); static ResponseCallback responseCallback(Message message); static Optional<ResponseCallback> responseCallbackOpt(Message message); static final String ORIGINAL; static final String ADDRESS; static final String KEY; static final String RESPONSE_CALLBACK; static final String GROUP_KEY; }
|
@Test public void shouldHandleEmptyKey() { assertThat(keyOpt(messageWithoutCommonHeaders)).isEmpty(); }
|
CommonHeaders { @SuppressWarnings("unchecked") public static <T> T original(Message message) { return (T) message.get(ORIGINAL); } private CommonHeaders(); @SuppressWarnings("unchecked") static T original(Message message); @SuppressWarnings("unchecked") static Optional<T> originalOpt(Message message); static String address(Message message); @SuppressWarnings("unchecked") static Optional<String> addressOpt(Message message); static String key(Message message); @SuppressWarnings("unchecked") static Optional<String> keyOpt(Message message); static ResponseCallback responseCallback(Message message); static Optional<ResponseCallback> responseCallbackOpt(Message message); static final String ORIGINAL; static final String ADDRESS; static final String KEY; static final String RESPONSE_CALLBACK; static final String GROUP_KEY; }
|
@Test public void shouldGetRequiredOriginalData() { assertThat(CommonHeaders.<String>original(messageWithCommonHeaders)).isEqualTo("original"); }
|
CommonHeaders { @SuppressWarnings("unchecked") public static <T> Optional<T> originalOpt(Message message) { return message.getOpt(ORIGINAL); } private CommonHeaders(); @SuppressWarnings("unchecked") static T original(Message message); @SuppressWarnings("unchecked") static Optional<T> originalOpt(Message message); static String address(Message message); @SuppressWarnings("unchecked") static Optional<String> addressOpt(Message message); static String key(Message message); @SuppressWarnings("unchecked") static Optional<String> keyOpt(Message message); static ResponseCallback responseCallback(Message message); static Optional<ResponseCallback> responseCallbackOpt(Message message); static final String ORIGINAL; static final String ADDRESS; static final String KEY; static final String RESPONSE_CALLBACK; static final String GROUP_KEY; }
|
@Test public void shouldHandleEmptyOriginal() { assertThat(originalOpt(messageWithoutCommonHeaders)).isEmpty(); }
|
DefaultSource implements Source<T> { @Override public Source<T> named(String name) { if (Strings.isBlank(name)) { throw new IllegalArgumentException("The name cannot be `null` or blank"); } return new DefaultSource<>(flow, name, attributes); } DefaultSource(Publisher<Message<T>> items, String name, Map<String, Object> attr); @Override Source<T> named(String name); @Override Source<T> unnamed(); @Override Source<T> withAttribute(String key, Object value); @Override Source<T> withoutAttribute(String key); @Override void subscribe(Subscriber<? super Message<T>> s); @Override Source<T> orElse(Source<T> alt); @Override String name(); @Override Optional<T> attr(String key); @Override Source<X> map(Function<Message<T>, Message<X>> mapper); @Override Source<X> mapPayload(Function<T, X> mapper); @Override Source<T> filter(Predicate<Message<T>> filter); @Override Source<T> filterPayload(Predicate<T> filter); @Override Source<T> filterNot(Predicate<Message<T>> filter); @Override Source<T> filterNotPayload(Predicate<T> filter); @Override Source<X> flatMap(Function<Message<T>, Publisher<Message<X>>> mapper); @Override Source<X> concatMap(Function<Message<T>, Publisher<Message<X>>> mapper); @Override Source<X> flatMap(Function<Message<T>, Publisher<Message<X>>> mapper, int maxConcurrency); @Override Source<X> flatMapPayload(Function<T, Publisher<X>> mapper); @Override Source<X> concatMapPayload(Function<T, Publisher<X>> mapper); @Override Source<X> flatMapPayload(Function<T, Publisher<X>> mapper, int maxConcurrency); @Override Source<X> scan(Message<X> zero, BiFunction<Message<X>, Message<T>, Message<X>> function); @Override Source<X> scanPayloads(X zero, BiFunction<X, T, X> function); @Override Publisher<GroupedDataStream<K, T>> groupBy(Function<Message<T>, K> keySupplier); @Override Source<T> log(String loggerName); @Override List<Source<T>> broadcast(int numberOfBranches); @Override Map<String, Source<T>> broadcast(String... names); @Override Pair<Source<T>, Source<T>> branch(Predicate<Message<T>> condition); @Override Pair<Source<T>, Source<T>> branchOnPayload(Predicate<T> condition); @Override Sink<T> to(Sink<T> sink); @Override Flowable<Message<T>> asFlowable(); @Override Source<Pair<T, O>> zipWith(Publisher<Message<O>> source); @Override Source<Tuple> zipWith(Source... sources); Source<Tuple> zipWith(Publisher<Message>... sources); @Override Source<T> mergeWith(Publisher<Message<T>> source); @Override Source<T> mergeWith(Publisher<Message<T>>... sources); @Override Source<X> compose(Function<Publisher<Message<T>>, Publisher<Message<X>>> mapper); @Override Source<X> composeFlowable(Function<Flowable<Message<T>>, Flowable<Message<X>>> mapper); @Override Source<X> composePayloadFlowable(Function<Flowable<T>, Flowable<X>> function); static final String FUNCTION_CANNOT_BE_NULL_MESSAGE; static final String FILTER_CANNOT_BE_NULL_MESSAGE; }
|
@Test(expected = IllegalArgumentException.class) public void testNullName() { Source.from("a", "b", "c").named(null); }
@Test(expected = IllegalArgumentException.class) public void testBlankName() { Source.from("a", "b", "c").named(" "); }
|
DefaultSource implements Source<T> { @Override public Source<T> orElse(Source<T> alt) { Objects.requireNonNull(alt, "The alternative source must not be `null`"); return new DefaultSource<>(Flowable.fromPublisher(flow).switchIfEmpty(alt), name, attributes); } DefaultSource(Publisher<Message<T>> items, String name, Map<String, Object> attr); @Override Source<T> named(String name); @Override Source<T> unnamed(); @Override Source<T> withAttribute(String key, Object value); @Override Source<T> withoutAttribute(String key); @Override void subscribe(Subscriber<? super Message<T>> s); @Override Source<T> orElse(Source<T> alt); @Override String name(); @Override Optional<T> attr(String key); @Override Source<X> map(Function<Message<T>, Message<X>> mapper); @Override Source<X> mapPayload(Function<T, X> mapper); @Override Source<T> filter(Predicate<Message<T>> filter); @Override Source<T> filterPayload(Predicate<T> filter); @Override Source<T> filterNot(Predicate<Message<T>> filter); @Override Source<T> filterNotPayload(Predicate<T> filter); @Override Source<X> flatMap(Function<Message<T>, Publisher<Message<X>>> mapper); @Override Source<X> concatMap(Function<Message<T>, Publisher<Message<X>>> mapper); @Override Source<X> flatMap(Function<Message<T>, Publisher<Message<X>>> mapper, int maxConcurrency); @Override Source<X> flatMapPayload(Function<T, Publisher<X>> mapper); @Override Source<X> concatMapPayload(Function<T, Publisher<X>> mapper); @Override Source<X> flatMapPayload(Function<T, Publisher<X>> mapper, int maxConcurrency); @Override Source<X> scan(Message<X> zero, BiFunction<Message<X>, Message<T>, Message<X>> function); @Override Source<X> scanPayloads(X zero, BiFunction<X, T, X> function); @Override Publisher<GroupedDataStream<K, T>> groupBy(Function<Message<T>, K> keySupplier); @Override Source<T> log(String loggerName); @Override List<Source<T>> broadcast(int numberOfBranches); @Override Map<String, Source<T>> broadcast(String... names); @Override Pair<Source<T>, Source<T>> branch(Predicate<Message<T>> condition); @Override Pair<Source<T>, Source<T>> branchOnPayload(Predicate<T> condition); @Override Sink<T> to(Sink<T> sink); @Override Flowable<Message<T>> asFlowable(); @Override Source<Pair<T, O>> zipWith(Publisher<Message<O>> source); @Override Source<Tuple> zipWith(Source... sources); Source<Tuple> zipWith(Publisher<Message>... sources); @Override Source<T> mergeWith(Publisher<Message<T>> source); @Override Source<T> mergeWith(Publisher<Message<T>>... sources); @Override Source<X> compose(Function<Publisher<Message<T>>, Publisher<Message<X>>> mapper); @Override Source<X> composeFlowable(Function<Flowable<Message<T>>, Flowable<Message<X>>> mapper); @Override Source<X> composePayloadFlowable(Function<Flowable<T>, Flowable<X>> function); static final String FUNCTION_CANNOT_BE_NULL_MESSAGE; static final String FILTER_CANNOT_BE_NULL_MESSAGE; }
|
@Test public void testOrElse() { Source<String> source = Source.from("a", "b", "c") .named("my source"); Source<String> empty = Source.<String>empty().named("empty"); Source<String> another = Source.from("d", "e", "f"); ListSink<String> sink = Sink.list(); source.orElse(another).to(sink); assertThat(sink.values()).containsExactly("a", "b", "c"); sink = Sink.list(); empty.orElse(source).to(sink); assertThat(sink.values()).containsExactly("a", "b", "c"); sink = Sink.list(); empty.orElse(another).to(sink); assertThat(sink.values()).containsExactly("d", "e", "f"); sink = Sink.list(); empty.orElse(empty).to(sink); assertThat(sink.values()).isEmpty(); }
|
DefaultSource implements Source<T> { @Override public <X> Source<X> flatMap(Function<Message<T>, Publisher<Message<X>>> mapper) { Objects.requireNonNull(mapper, FUNCTION_CANNOT_BE_NULL_MESSAGE); return new DefaultSource<>(Flowable.fromPublisher(flow).flatMap(mapper::apply), name, attributes); } DefaultSource(Publisher<Message<T>> items, String name, Map<String, Object> attr); @Override Source<T> named(String name); @Override Source<T> unnamed(); @Override Source<T> withAttribute(String key, Object value); @Override Source<T> withoutAttribute(String key); @Override void subscribe(Subscriber<? super Message<T>> s); @Override Source<T> orElse(Source<T> alt); @Override String name(); @Override Optional<T> attr(String key); @Override Source<X> map(Function<Message<T>, Message<X>> mapper); @Override Source<X> mapPayload(Function<T, X> mapper); @Override Source<T> filter(Predicate<Message<T>> filter); @Override Source<T> filterPayload(Predicate<T> filter); @Override Source<T> filterNot(Predicate<Message<T>> filter); @Override Source<T> filterNotPayload(Predicate<T> filter); @Override Source<X> flatMap(Function<Message<T>, Publisher<Message<X>>> mapper); @Override Source<X> concatMap(Function<Message<T>, Publisher<Message<X>>> mapper); @Override Source<X> flatMap(Function<Message<T>, Publisher<Message<X>>> mapper, int maxConcurrency); @Override Source<X> flatMapPayload(Function<T, Publisher<X>> mapper); @Override Source<X> concatMapPayload(Function<T, Publisher<X>> mapper); @Override Source<X> flatMapPayload(Function<T, Publisher<X>> mapper, int maxConcurrency); @Override Source<X> scan(Message<X> zero, BiFunction<Message<X>, Message<T>, Message<X>> function); @Override Source<X> scanPayloads(X zero, BiFunction<X, T, X> function); @Override Publisher<GroupedDataStream<K, T>> groupBy(Function<Message<T>, K> keySupplier); @Override Source<T> log(String loggerName); @Override List<Source<T>> broadcast(int numberOfBranches); @Override Map<String, Source<T>> broadcast(String... names); @Override Pair<Source<T>, Source<T>> branch(Predicate<Message<T>> condition); @Override Pair<Source<T>, Source<T>> branchOnPayload(Predicate<T> condition); @Override Sink<T> to(Sink<T> sink); @Override Flowable<Message<T>> asFlowable(); @Override Source<Pair<T, O>> zipWith(Publisher<Message<O>> source); @Override Source<Tuple> zipWith(Source... sources); Source<Tuple> zipWith(Publisher<Message>... sources); @Override Source<T> mergeWith(Publisher<Message<T>> source); @Override Source<T> mergeWith(Publisher<Message<T>>... sources); @Override Source<X> compose(Function<Publisher<Message<T>>, Publisher<Message<X>>> mapper); @Override Source<X> composeFlowable(Function<Flowable<Message<T>>, Flowable<Message<X>>> mapper); @Override Source<X> composePayloadFlowable(Function<Flowable<T>, Flowable<X>> function); static final String FUNCTION_CANNOT_BE_NULL_MESSAGE; static final String FILTER_CANNOT_BE_NULL_MESSAGE; }
|
@Test public void testFlatMap() { ListSink<Integer> sink = Sink.list(); Random random = new Random(); Source.from(1, 2, 3, 4, 5) .flatMap(i -> Flowable.fromArray(i, i).delay(random.nextInt(10), TimeUnit.MILLISECONDS)) .to(sink); await().until(() -> sink.values().size() == 10); assertThat(sink.values()).contains(1, 1, 2, 2, 3, 3, 4, 4, 5, 5); }
|
DefaultSource implements Source<T> { @Override public <X> Source<X> concatMap(Function<Message<T>, Publisher<Message<X>>> mapper) { Objects.requireNonNull(mapper, FUNCTION_CANNOT_BE_NULL_MESSAGE); return new DefaultSource<>(Flowable.fromPublisher(flow).concatMap(mapper::apply), name, attributes); } DefaultSource(Publisher<Message<T>> items, String name, Map<String, Object> attr); @Override Source<T> named(String name); @Override Source<T> unnamed(); @Override Source<T> withAttribute(String key, Object value); @Override Source<T> withoutAttribute(String key); @Override void subscribe(Subscriber<? super Message<T>> s); @Override Source<T> orElse(Source<T> alt); @Override String name(); @Override Optional<T> attr(String key); @Override Source<X> map(Function<Message<T>, Message<X>> mapper); @Override Source<X> mapPayload(Function<T, X> mapper); @Override Source<T> filter(Predicate<Message<T>> filter); @Override Source<T> filterPayload(Predicate<T> filter); @Override Source<T> filterNot(Predicate<Message<T>> filter); @Override Source<T> filterNotPayload(Predicate<T> filter); @Override Source<X> flatMap(Function<Message<T>, Publisher<Message<X>>> mapper); @Override Source<X> concatMap(Function<Message<T>, Publisher<Message<X>>> mapper); @Override Source<X> flatMap(Function<Message<T>, Publisher<Message<X>>> mapper, int maxConcurrency); @Override Source<X> flatMapPayload(Function<T, Publisher<X>> mapper); @Override Source<X> concatMapPayload(Function<T, Publisher<X>> mapper); @Override Source<X> flatMapPayload(Function<T, Publisher<X>> mapper, int maxConcurrency); @Override Source<X> scan(Message<X> zero, BiFunction<Message<X>, Message<T>, Message<X>> function); @Override Source<X> scanPayloads(X zero, BiFunction<X, T, X> function); @Override Publisher<GroupedDataStream<K, T>> groupBy(Function<Message<T>, K> keySupplier); @Override Source<T> log(String loggerName); @Override List<Source<T>> broadcast(int numberOfBranches); @Override Map<String, Source<T>> broadcast(String... names); @Override Pair<Source<T>, Source<T>> branch(Predicate<Message<T>> condition); @Override Pair<Source<T>, Source<T>> branchOnPayload(Predicate<T> condition); @Override Sink<T> to(Sink<T> sink); @Override Flowable<Message<T>> asFlowable(); @Override Source<Pair<T, O>> zipWith(Publisher<Message<O>> source); @Override Source<Tuple> zipWith(Source... sources); Source<Tuple> zipWith(Publisher<Message>... sources); @Override Source<T> mergeWith(Publisher<Message<T>> source); @Override Source<T> mergeWith(Publisher<Message<T>>... sources); @Override Source<X> compose(Function<Publisher<Message<T>>, Publisher<Message<X>>> mapper); @Override Source<X> composeFlowable(Function<Flowable<Message<T>>, Flowable<Message<X>>> mapper); @Override Source<X> composePayloadFlowable(Function<Flowable<T>, Flowable<X>> function); static final String FUNCTION_CANNOT_BE_NULL_MESSAGE; static final String FILTER_CANNOT_BE_NULL_MESSAGE; }
|
@Test public void testConcatMap() { ListSink<Integer> sink = Sink.list(); Random random = new Random(); Source.from(1, 2, 3, 4, 5) .concatMap(i -> Flowable.fromArray(i, i).delay(random.nextInt(10), TimeUnit.MILLISECONDS)) .to(sink); await().until(() -> sink.values().size() == 10); assertThat(sink.values()).containsExactly(1, 1, 2, 2, 3, 3, 4, 4, 5, 5); }
|
DefaultSource implements Source<T> { @Override public <X> Source<X> compose(Function<Publisher<Message<T>>, Publisher<Message<X>>> mapper) { return new DefaultSource<>(asFlowable().compose(mapper::apply), name, attributes); } DefaultSource(Publisher<Message<T>> items, String name, Map<String, Object> attr); @Override Source<T> named(String name); @Override Source<T> unnamed(); @Override Source<T> withAttribute(String key, Object value); @Override Source<T> withoutAttribute(String key); @Override void subscribe(Subscriber<? super Message<T>> s); @Override Source<T> orElse(Source<T> alt); @Override String name(); @Override Optional<T> attr(String key); @Override Source<X> map(Function<Message<T>, Message<X>> mapper); @Override Source<X> mapPayload(Function<T, X> mapper); @Override Source<T> filter(Predicate<Message<T>> filter); @Override Source<T> filterPayload(Predicate<T> filter); @Override Source<T> filterNot(Predicate<Message<T>> filter); @Override Source<T> filterNotPayload(Predicate<T> filter); @Override Source<X> flatMap(Function<Message<T>, Publisher<Message<X>>> mapper); @Override Source<X> concatMap(Function<Message<T>, Publisher<Message<X>>> mapper); @Override Source<X> flatMap(Function<Message<T>, Publisher<Message<X>>> mapper, int maxConcurrency); @Override Source<X> flatMapPayload(Function<T, Publisher<X>> mapper); @Override Source<X> concatMapPayload(Function<T, Publisher<X>> mapper); @Override Source<X> flatMapPayload(Function<T, Publisher<X>> mapper, int maxConcurrency); @Override Source<X> scan(Message<X> zero, BiFunction<Message<X>, Message<T>, Message<X>> function); @Override Source<X> scanPayloads(X zero, BiFunction<X, T, X> function); @Override Publisher<GroupedDataStream<K, T>> groupBy(Function<Message<T>, K> keySupplier); @Override Source<T> log(String loggerName); @Override List<Source<T>> broadcast(int numberOfBranches); @Override Map<String, Source<T>> broadcast(String... names); @Override Pair<Source<T>, Source<T>> branch(Predicate<Message<T>> condition); @Override Pair<Source<T>, Source<T>> branchOnPayload(Predicate<T> condition); @Override Sink<T> to(Sink<T> sink); @Override Flowable<Message<T>> asFlowable(); @Override Source<Pair<T, O>> zipWith(Publisher<Message<O>> source); @Override Source<Tuple> zipWith(Source... sources); Source<Tuple> zipWith(Publisher<Message>... sources); @Override Source<T> mergeWith(Publisher<Message<T>> source); @Override Source<T> mergeWith(Publisher<Message<T>>... sources); @Override Source<X> compose(Function<Publisher<Message<T>>, Publisher<Message<X>>> mapper); @Override Source<X> composeFlowable(Function<Flowable<Message<T>>, Flowable<Message<X>>> mapper); @Override Source<X> composePayloadFlowable(Function<Flowable<T>, Flowable<X>> function); static final String FUNCTION_CANNOT_BE_NULL_MESSAGE; static final String FILTER_CANNOT_BE_NULL_MESSAGE; }
|
@Test public void testCompose() { ListSink<Integer> list = Sink.list(); Source.from(1, 2, 3, 4, 5) .composeFlowable(flow -> flow.map(Message::payload).map(i -> i + 1).map(Message::new)) .to(list); assertThat(list.values()).containsExactly(2, 3, 4, 5, 6); }
|
DefaultSource implements Source<T> { @Override public Source<T> mergeWith(Publisher<Message<T>> source) { return new DefaultSource<>(asFlowable().mergeWith(source), name, attributes); } DefaultSource(Publisher<Message<T>> items, String name, Map<String, Object> attr); @Override Source<T> named(String name); @Override Source<T> unnamed(); @Override Source<T> withAttribute(String key, Object value); @Override Source<T> withoutAttribute(String key); @Override void subscribe(Subscriber<? super Message<T>> s); @Override Source<T> orElse(Source<T> alt); @Override String name(); @Override Optional<T> attr(String key); @Override Source<X> map(Function<Message<T>, Message<X>> mapper); @Override Source<X> mapPayload(Function<T, X> mapper); @Override Source<T> filter(Predicate<Message<T>> filter); @Override Source<T> filterPayload(Predicate<T> filter); @Override Source<T> filterNot(Predicate<Message<T>> filter); @Override Source<T> filterNotPayload(Predicate<T> filter); @Override Source<X> flatMap(Function<Message<T>, Publisher<Message<X>>> mapper); @Override Source<X> concatMap(Function<Message<T>, Publisher<Message<X>>> mapper); @Override Source<X> flatMap(Function<Message<T>, Publisher<Message<X>>> mapper, int maxConcurrency); @Override Source<X> flatMapPayload(Function<T, Publisher<X>> mapper); @Override Source<X> concatMapPayload(Function<T, Publisher<X>> mapper); @Override Source<X> flatMapPayload(Function<T, Publisher<X>> mapper, int maxConcurrency); @Override Source<X> scan(Message<X> zero, BiFunction<Message<X>, Message<T>, Message<X>> function); @Override Source<X> scanPayloads(X zero, BiFunction<X, T, X> function); @Override Publisher<GroupedDataStream<K, T>> groupBy(Function<Message<T>, K> keySupplier); @Override Source<T> log(String loggerName); @Override List<Source<T>> broadcast(int numberOfBranches); @Override Map<String, Source<T>> broadcast(String... names); @Override Pair<Source<T>, Source<T>> branch(Predicate<Message<T>> condition); @Override Pair<Source<T>, Source<T>> branchOnPayload(Predicate<T> condition); @Override Sink<T> to(Sink<T> sink); @Override Flowable<Message<T>> asFlowable(); @Override Source<Pair<T, O>> zipWith(Publisher<Message<O>> source); @Override Source<Tuple> zipWith(Source... sources); Source<Tuple> zipWith(Publisher<Message>... sources); @Override Source<T> mergeWith(Publisher<Message<T>> source); @Override Source<T> mergeWith(Publisher<Message<T>>... sources); @Override Source<X> compose(Function<Publisher<Message<T>>, Publisher<Message<X>>> mapper); @Override Source<X> composeFlowable(Function<Flowable<Message<T>>, Flowable<Message<X>>> mapper); @Override Source<X> composePayloadFlowable(Function<Flowable<T>, Flowable<X>> function); static final String FUNCTION_CANNOT_BE_NULL_MESSAGE; static final String FILTER_CANNOT_BE_NULL_MESSAGE; }
|
@Test public void testMergeWith() { Source<String> s1 = Source.from("a", "b", "c"); Source<String> s2 = Source.from("d", "e", "f"); Source<String> s3 = Source.from("g", "h"); ListSink<String> list = Sink.list(); s1.mergeWith(s2).to(list); assertThat(list.values()).containsExactly("a", "b", "c", "d", "e", "f"); Random random = new Random(); ListSink<String> list2 = Sink.list(); s1.composeFlowable(flow -> flow.delay(s -> Single.just(s) .delay(random.nextInt(10), TimeUnit.MILLISECONDS) .toFlowable())) .mergeWith(s2, s3) .to(list2); await().atMost(1, TimeUnit.MINUTES).until(() -> list2.values().size() == 8); assertThat(list2.values()).contains("a", "b", "c", "d", "e", "f", "g", "h"); }
|
DefaultSource implements Source<T> { @Override public <K> Publisher<GroupedDataStream<K, T>> groupBy(Function<Message<T>, K> keySupplier) { Objects.requireNonNull(keySupplier, "The function computing the key must not be `null`"); return Flowable.fromPublisher(flow) .groupBy(keySupplier::apply) .flatMapSingle(gf -> { GroupedDataStream<K, T> stream = new GroupedDataStream<>(gf.getKey(), gf); return Single.just(stream); }); } DefaultSource(Publisher<Message<T>> items, String name, Map<String, Object> attr); @Override Source<T> named(String name); @Override Source<T> unnamed(); @Override Source<T> withAttribute(String key, Object value); @Override Source<T> withoutAttribute(String key); @Override void subscribe(Subscriber<? super Message<T>> s); @Override Source<T> orElse(Source<T> alt); @Override String name(); @Override Optional<T> attr(String key); @Override Source<X> map(Function<Message<T>, Message<X>> mapper); @Override Source<X> mapPayload(Function<T, X> mapper); @Override Source<T> filter(Predicate<Message<T>> filter); @Override Source<T> filterPayload(Predicate<T> filter); @Override Source<T> filterNot(Predicate<Message<T>> filter); @Override Source<T> filterNotPayload(Predicate<T> filter); @Override Source<X> flatMap(Function<Message<T>, Publisher<Message<X>>> mapper); @Override Source<X> concatMap(Function<Message<T>, Publisher<Message<X>>> mapper); @Override Source<X> flatMap(Function<Message<T>, Publisher<Message<X>>> mapper, int maxConcurrency); @Override Source<X> flatMapPayload(Function<T, Publisher<X>> mapper); @Override Source<X> concatMapPayload(Function<T, Publisher<X>> mapper); @Override Source<X> flatMapPayload(Function<T, Publisher<X>> mapper, int maxConcurrency); @Override Source<X> scan(Message<X> zero, BiFunction<Message<X>, Message<T>, Message<X>> function); @Override Source<X> scanPayloads(X zero, BiFunction<X, T, X> function); @Override Publisher<GroupedDataStream<K, T>> groupBy(Function<Message<T>, K> keySupplier); @Override Source<T> log(String loggerName); @Override List<Source<T>> broadcast(int numberOfBranches); @Override Map<String, Source<T>> broadcast(String... names); @Override Pair<Source<T>, Source<T>> branch(Predicate<Message<T>> condition); @Override Pair<Source<T>, Source<T>> branchOnPayload(Predicate<T> condition); @Override Sink<T> to(Sink<T> sink); @Override Flowable<Message<T>> asFlowable(); @Override Source<Pair<T, O>> zipWith(Publisher<Message<O>> source); @Override Source<Tuple> zipWith(Source... sources); Source<Tuple> zipWith(Publisher<Message>... sources); @Override Source<T> mergeWith(Publisher<Message<T>> source); @Override Source<T> mergeWith(Publisher<Message<T>>... sources); @Override Source<X> compose(Function<Publisher<Message<T>>, Publisher<Message<X>>> mapper); @Override Source<X> composeFlowable(Function<Flowable<Message<T>>, Flowable<Message<X>>> mapper); @Override Source<X> composePayloadFlowable(Function<Flowable<T>, Flowable<X>> function); static final String FUNCTION_CANNOT_BE_NULL_MESSAGE; static final String FILTER_CANNOT_BE_NULL_MESSAGE; }
|
@Test public void testGroupBy() { String text = "In 1815, M. Charles–Francois-Bienvenu Myriel was Bishop of D He was an old man of about seventy-five " + "years of age; he had occupied the see of D since 1806. Although this detail has no connection whatever with the " + "real substance of what we are about to relate, it will not be superfluous, if merely for the sake of exactness in all" + " points, to mention here the various rumors and remarks which had been in circulation about him from the very moment " + "when he arrived in the diocese. True or false, that which is said of men often occupies as important a place in their" + " lives, and above all in their destinies, as that which they do. M. Myriel was the son of a councillor of the " + "Parliament of Aix; hence he belonged to the nobility of the bar. It was said that his father, destining him to be the" + " heir of his own post, had married him at a very early age, eighteen or twenty, in accordance with a custom which is " + "rather widely prevalent in parliamentary families. In spite of this marriage, however, it was said that Charles Myriel" + " created a great deal of talk. He was well formed, though rather short in stature, elegant, graceful, intelligent; the" + " whole of the first portion of his life had been devoted to the world and to gallantry."; Publisher<GroupedDataStream<Character, String>> publisher = Source.from( text.replaceAll("[^a-zA-Z ]", "").toLowerCase().split("\\s+")) .mapPayload(String::toLowerCase) .groupBy(msg -> msg.payload().charAt(0)); Multimap<Character, String> wordsByFirstLetter = ArrayListMultimap.create(); Flowable.fromPublisher(publisher) .doOnNext(stream -> Flowable.fromPublisher(stream).subscribe(word -> wordsByFirstLetter.put(stream.key(), word.payload()))) .subscribe(); assertThat(wordsByFirstLetter.get('a')).hasSize(25).contains("aix"); assertThat(wordsByFirstLetter.get('y')).hasSize(1).contains("years"); assertThat(wordsByFirstLetter.get('w')).hasSize(21).contains("widely"); assertThat(wordsByFirstLetter.get('j')).isEmpty(); }
|
DefaultSource implements Source<T> { @Override public Pair<Source<T>, Source<T>> branch(Predicate<Message<T>> condition) { List<Source<T>> sources = broadcast(2); Source<T> left = sources.get(0).filter(condition); Source<T> right = sources.get(0).filterNot(condition); return pair(left, right); } DefaultSource(Publisher<Message<T>> items, String name, Map<String, Object> attr); @Override Source<T> named(String name); @Override Source<T> unnamed(); @Override Source<T> withAttribute(String key, Object value); @Override Source<T> withoutAttribute(String key); @Override void subscribe(Subscriber<? super Message<T>> s); @Override Source<T> orElse(Source<T> alt); @Override String name(); @Override Optional<T> attr(String key); @Override Source<X> map(Function<Message<T>, Message<X>> mapper); @Override Source<X> mapPayload(Function<T, X> mapper); @Override Source<T> filter(Predicate<Message<T>> filter); @Override Source<T> filterPayload(Predicate<T> filter); @Override Source<T> filterNot(Predicate<Message<T>> filter); @Override Source<T> filterNotPayload(Predicate<T> filter); @Override Source<X> flatMap(Function<Message<T>, Publisher<Message<X>>> mapper); @Override Source<X> concatMap(Function<Message<T>, Publisher<Message<X>>> mapper); @Override Source<X> flatMap(Function<Message<T>, Publisher<Message<X>>> mapper, int maxConcurrency); @Override Source<X> flatMapPayload(Function<T, Publisher<X>> mapper); @Override Source<X> concatMapPayload(Function<T, Publisher<X>> mapper); @Override Source<X> flatMapPayload(Function<T, Publisher<X>> mapper, int maxConcurrency); @Override Source<X> scan(Message<X> zero, BiFunction<Message<X>, Message<T>, Message<X>> function); @Override Source<X> scanPayloads(X zero, BiFunction<X, T, X> function); @Override Publisher<GroupedDataStream<K, T>> groupBy(Function<Message<T>, K> keySupplier); @Override Source<T> log(String loggerName); @Override List<Source<T>> broadcast(int numberOfBranches); @Override Map<String, Source<T>> broadcast(String... names); @Override Pair<Source<T>, Source<T>> branch(Predicate<Message<T>> condition); @Override Pair<Source<T>, Source<T>> branchOnPayload(Predicate<T> condition); @Override Sink<T> to(Sink<T> sink); @Override Flowable<Message<T>> asFlowable(); @Override Source<Pair<T, O>> zipWith(Publisher<Message<O>> source); @Override Source<Tuple> zipWith(Source... sources); Source<Tuple> zipWith(Publisher<Message>... sources); @Override Source<T> mergeWith(Publisher<Message<T>> source); @Override Source<T> mergeWith(Publisher<Message<T>>... sources); @Override Source<X> compose(Function<Publisher<Message<T>>, Publisher<Message<X>>> mapper); @Override Source<X> composeFlowable(Function<Flowable<Message<T>>, Flowable<Message<X>>> mapper); @Override Source<X> composePayloadFlowable(Function<Flowable<T>, Flowable<X>> function); static final String FUNCTION_CANNOT_BE_NULL_MESSAGE; static final String FILTER_CANNOT_BE_NULL_MESSAGE; }
|
@Test public void testBranch() { Pair<Source<Integer>, Source<Integer>> branches = Source.from(1, 2, 3, 4, 5, 6, 7, 8, 9, 10) .branch(i -> i.payload() % 3 == 0); ListSink<String> left = Sink.list(); ListSink<String> right = Sink.list(); branches.left().mapPayload(i -> Integer.toString(i)).to(left); branches.right().mapPayload(i -> Integer.toString(i)).to(right); assertThat(left.values()).containsExactly("3", "6", "9"); assertThat(right.values()).containsExactly("1", "2", "4", "5", "7", "8", "10"); }
|
DefaultSource implements Source<T> { @Override public Pair<Source<T>, Source<T>> branchOnPayload(Predicate<T> condition) { List<Source<T>> sources = broadcast(2); Source<T> left = sources.get(0).filterPayload(condition); Source<T> right = sources.get(0).filterNotPayload(condition); return pair(left, right); } DefaultSource(Publisher<Message<T>> items, String name, Map<String, Object> attr); @Override Source<T> named(String name); @Override Source<T> unnamed(); @Override Source<T> withAttribute(String key, Object value); @Override Source<T> withoutAttribute(String key); @Override void subscribe(Subscriber<? super Message<T>> s); @Override Source<T> orElse(Source<T> alt); @Override String name(); @Override Optional<T> attr(String key); @Override Source<X> map(Function<Message<T>, Message<X>> mapper); @Override Source<X> mapPayload(Function<T, X> mapper); @Override Source<T> filter(Predicate<Message<T>> filter); @Override Source<T> filterPayload(Predicate<T> filter); @Override Source<T> filterNot(Predicate<Message<T>> filter); @Override Source<T> filterNotPayload(Predicate<T> filter); @Override Source<X> flatMap(Function<Message<T>, Publisher<Message<X>>> mapper); @Override Source<X> concatMap(Function<Message<T>, Publisher<Message<X>>> mapper); @Override Source<X> flatMap(Function<Message<T>, Publisher<Message<X>>> mapper, int maxConcurrency); @Override Source<X> flatMapPayload(Function<T, Publisher<X>> mapper); @Override Source<X> concatMapPayload(Function<T, Publisher<X>> mapper); @Override Source<X> flatMapPayload(Function<T, Publisher<X>> mapper, int maxConcurrency); @Override Source<X> scan(Message<X> zero, BiFunction<Message<X>, Message<T>, Message<X>> function); @Override Source<X> scanPayloads(X zero, BiFunction<X, T, X> function); @Override Publisher<GroupedDataStream<K, T>> groupBy(Function<Message<T>, K> keySupplier); @Override Source<T> log(String loggerName); @Override List<Source<T>> broadcast(int numberOfBranches); @Override Map<String, Source<T>> broadcast(String... names); @Override Pair<Source<T>, Source<T>> branch(Predicate<Message<T>> condition); @Override Pair<Source<T>, Source<T>> branchOnPayload(Predicate<T> condition); @Override Sink<T> to(Sink<T> sink); @Override Flowable<Message<T>> asFlowable(); @Override Source<Pair<T, O>> zipWith(Publisher<Message<O>> source); @Override Source<Tuple> zipWith(Source... sources); Source<Tuple> zipWith(Publisher<Message>... sources); @Override Source<T> mergeWith(Publisher<Message<T>> source); @Override Source<T> mergeWith(Publisher<Message<T>>... sources); @Override Source<X> compose(Function<Publisher<Message<T>>, Publisher<Message<X>>> mapper); @Override Source<X> composeFlowable(Function<Flowable<Message<T>>, Flowable<Message<X>>> mapper); @Override Source<X> composePayloadFlowable(Function<Flowable<T>, Flowable<X>> function); static final String FUNCTION_CANNOT_BE_NULL_MESSAGE; static final String FILTER_CANNOT_BE_NULL_MESSAGE; }
|
@Test public void testBranchOnPayload() { Pair<Source<Integer>, Source<Integer>> branches = Source.from(1, 2, 3, 4, 5, 6, 7, 8, 9, 10) .branchOnPayload(i -> i % 3 == 0); ListSink<String> left = Sink.list(); ListSink<String> right = Sink.list(); branches.left().mapPayload(i -> Integer.toString(i)).to(left); branches.right().mapPayload(i -> Integer.toString(i)).to(right); assertThat(left.values()).containsExactly("3", "6", "9"); assertThat(right.values()).containsExactly("1", "2", "4", "5", "7", "8", "10"); }
|
Tuple implements Iterable<Object> { Tuple(Object... items) { if (items == null) { this.items = Collections.emptyList(); } else { for (Object o : items) { if (o == null) { throw new IllegalArgumentException("A tuple cannot contain a `null` value"); } } this.items = Collections.unmodifiableList(Arrays.asList(items)); } } Tuple(Object... items); static Tuple tuple(Object... items); int size(); @SuppressWarnings("unchecked") T nth(int pos); @Override Iterator<Object> iterator(); boolean contains(Object value); final boolean containsAll(Collection<?> collection); final boolean containsAll(final Object... values); final int indexOf(Object value); final int lastIndexOf(Object value); final List<Object> asList(); @Override int hashCode(); @Override boolean equals(final Object obj); }
|
@Test(expected = IllegalArgumentException.class) public void testTupleWithNull() { Tuple.tuple("a", "b", null, "c"); }
|
EventBusSourceFactory implements SourceFactory { @Override public String name() { return "eventbus"; } @Override String name(); @Override Single<Source<T>> create(Vertx vertx, String name, Config config); }
|
@Test public void testName() { assertThat(factory.name()).isEqualTo("eventbus"); }
|
EventBusSourceFactory implements SourceFactory { @Override public <T> Single<Source<T>> create(Vertx vertx, String name, Config config) { String address = config.getString("address") .orElse(name); if (address == null) { throw new IllegalArgumentException("Either address or name must be set"); } return Single.just(new EventBusSource<>(vertx, name, address, config)); } @Override String name(); @Override Single<Source<T>> create(Vertx vertx, String name, Config config); }
|
@Test(expected = IllegalArgumentException.class) public void testCreationWithoutParameter() { factory.create(vertx, null, new Config(NullNode.getInstance())); }
@Test public void testCreationWithAddress() throws IOException { Single<Source<Object>> single = factory.create(vertx, null, new Config(new JsonObject().put("address", "an-address"))); Source<Object> sink = single.blockingGet(); assertThat(sink).isInstanceOf(EventBusSource.class); }
|
EventBusSinkFactory implements SinkFactory { @Override public String name() { return "eventbus"; } @Override String name(); @Override Single<Sink<T>> create(Vertx vertx, String name, Config conf); }
|
@Test public void testName() { assertThat(factory.name()).isEqualTo("eventbus"); }
|
EventBusSinkFactory implements SinkFactory { @Override public <T> Single<Sink<T>> create(Vertx vertx, String name, Config conf) { return Single.just(new EventBusSink<>(vertx, conf)); } @Override String name(); @Override Single<Sink<T>> create(Vertx vertx, String name, Config conf); }
|
@Test(expected = IllegalArgumentException.class) public void testCreationWithoutParameter() { factory.create(vertx, null, new Config(NullNode.getInstance())); }
@Test public void testCreationWithAddress() throws IOException { Single<Sink<Object>> single = factory.create(vertx, null, new Config(new JsonObject().put("address", "an-address"))); Sink<Object> sink = single.blockingGet(); assertThat(sink).isInstanceOf(EventBusSink.class); }
|
KafkaSourceFactory implements SourceFactory { @Override public String name() { return "kafka"; } @Override String name(); @Override Single<Source<T>> create(Vertx vertx, String name, Config config); }
|
@Test public void testName() { assertThat(factory.name()).isEqualTo("kafka"); }
|
CamelSink implements Sink<T> { public CamelContext camelContext() { return camelContext; } CamelSink(String name, Config config); @Override String name(); @Override Completable dispatch(Message<T> message); CamelContext camelContext(); }
|
@Test public void shouldWrapIntegersIntoCamelBodies(TestContext context) throws Exception { Async async = context.async(); CamelSink<Integer> sink = new CamelSink<>( null, new Config( new JsonObject().put("endpoint", "direct:test") ) ); CamelContext camelContext = sink.camelContext(); camelContext.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { from("direct:test").process(event -> { if (event.getIn().getBody(Integer.class) == 10) { context.assertEquals(event.getIn().getBody(Integer.class), 10); async.complete(); } }); } }); Source.from(1, 2, 3, 4, 5, 6, 7, 8, 9, 10).to(sink); }
|
CamelSinkFactory implements SinkFactory { @Override public String name() { return "camel"; } @Override String name(); @Override Single<Sink<T>> create(Vertx vertx, String name, Config config); }
|
@Test public void testName() { CamelSinkFactory factory = new CamelSinkFactory(); assertThat(factory.name()).isEqualTo("camel"); }
|
CamelSinkFactory implements SinkFactory { @Override public <T> Single<Sink<T>> create(Vertx vertx, String name, Config config) { return Single.just(new CamelSink<>(name, config)); } @Override String name(); @Override Single<Sink<T>> create(Vertx vertx, String name, Config config); }
|
@Test(expected = IllegalArgumentException.class) public void testCreationWithoutParameter() { CamelSinkFactory factory = new CamelSinkFactory(); factory.create(vertx, null , new Config(NullNode.getInstance())); }
@Test public void testCreationWithEndpoint() throws IOException { CamelSinkFactory factory = new CamelSinkFactory(); Single<Sink<Object>> single = factory.create(vertx, null , new Config(new JsonObject().put("endpoint", "my-endpoint"))); Sink<Object> sink = single.blockingGet(); assertThat(sink).isInstanceOf(CamelSink.class); }
|
InMemoryDocumentView implements DocumentView { @Override public synchronized Completable save(String collection, String key, Map<String, Object> document) { Objects.requireNonNull(collection, NULL_COLLECTION_MESSAGE); Objects.requireNonNull(key, NULL_KEY_MESSAGE); Objects.requireNonNull(collection, "The `document` must not be `null`"); return Completable.fromAction(() -> { Map<String, Map<String, Object>> collectionData = documents.computeIfAbsent(collection, k -> new LinkedHashMap<>()); collectionData.put(key, document); }); } @Override synchronized Completable save(String collection, String key, Map<String, Object> document); @Override synchronized Single<Map<String, Object>> findById(String collection, String key); @Override synchronized Single<Long> count(String collection); @Override synchronized Flowable<DocumentWithKey> findAll(String collection); @Override synchronized Completable remove(String collection, String key); static final String NULL_COLLECTION_MESSAGE; static final String NULL_KEY_MESSAGE; }
|
@Test public void shouldCallSubscriberOnSave(TestContext context) { Async async = context.async(); view.save(collection, key, document).subscribe(async::complete); }
|
KafkaSourceFactory implements SourceFactory { @Override public <T> Single<Source<T>> create(Vertx vertx, String name, Config config) { return Single.just(new KafkaSource<>(vertx, name, config)); } @Override String name(); @Override Single<Source<T>> create(Vertx vertx, String name, Config config); }
|
@Test(expected = ConfigException.class) public void testCreationWithoutParameter() { factory.create(vertx, null, new Config(NullNode.getInstance())); }
@Test public void testCreationWithMinimalConfiguration() throws IOException { Single<Source<Object>> single = factory.create(vertx, null, new Config(new JsonObject() .put("bootstrap.servers", "localhost:9092") .put("key.deserializer", JsonObjectDeserializer.class.getName()) .put("value.deserializer", JsonObjectDeserializer.class.getName()))); Source<Object> sink = single.blockingGet(); assertThat(sink).isInstanceOf(KafkaSource.class); }
|
ReflectionHelper { public static void set(Object mediator, Field field, Object source) { if (!field.isAccessible()) { field.setAccessible(true); } try { field.set(mediator, source); } catch (IllegalAccessException e) { throw new IllegalStateException("Unable to set field " + field.getName() + " from " + mediator.getClass().getName() + " to " + source, e); } } private ReflectionHelper(); static void set(Object mediator, Field field, Object source); static void invokeFunction(Object mediator, Method method); static void invokeTransformationMethod(Object mediator, Method method); static Object getSourceToInject(Class<?> clazz, Type type, Source<Object> source); static void inject(Object mediator); }
|
@Test public void testValidSet() throws NoSuchFieldException { Test1 test = new Test1(); Field field = Test1.class.getDeclaredField("foo"); ReflectionHelper.set(test, field, "hello"); assertThat(test.foo).isEqualTo("hello"); }
@Test(expected = IllegalArgumentException.class) public void testInvalidSet() throws NoSuchFieldException { Test1 test = new Test1(); Field field = Test1.class.getDeclaredField("foo"); ReflectionHelper.set(test, field, 10); }
|
ReflectionHelper { public static void invokeFunction(Object mediator, Method method) { method = ReflectionHelper.makeAccessibleIfNot(method); List<Flowable<Object>> sources = getFlowableForParameters(method); Function function = method.getAnnotation(Function.class); Sink<Object> sink = null; if (function.outbound().length() != 0) { sink = getSinkOrFail(function.outbound()); } Method methodToBeInvoked = method; Sink<Object> theSink = sink; Flowable<Optional<Object>> result; if (sources.size() == 1) { result = sources.get(0) .map(item -> Optional.ofNullable(methodToBeInvoked.invoke(mediator, item))); } else { result = Flowable.zip(sources, args -> args) .map(args -> Optional.ofNullable(methodToBeInvoked.invoke(mediator, args))); } result .flatMapCompletable(maybeResult -> { if (! maybeResult.isPresent()) { return Completable.complete(); } else { return propagateResult(maybeResult.get(), theSink); } }) .doOnError(Throwable::printStackTrace) .subscribe(); } private ReflectionHelper(); static void set(Object mediator, Field field, Object source); static void invokeFunction(Object mediator, Method method); static void invokeTransformationMethod(Object mediator, Method method); static Object getSourceToInject(Class<?> clazz, Type type, Source<Object> source); static void inject(Object mediator); }
|
@Test(expected = IllegalArgumentException.class) public void testFunctionWithNotAnnotatedParameter() throws NoSuchMethodException { InvalidBecauseOfBadParams test = new InvalidBecauseOfBadParams(); Method method = test.getClass().getMethod("function", String.class); ReflectionHelper.invokeFunction(test, method); }
@Test(expected = IllegalArgumentException.class) public void testFunctionWithoutParam() throws NoSuchMethodException { InvalidBecauseOfBadParams test = new InvalidBecauseOfBadParams(); Method method = test.getClass().getMethod("functionWithoutParam"); ReflectionHelper.invokeFunction(test, method); }
|
ObjectMapperJsonSerializer implements JsonSerializer { @Override public <T> T clone(T object) { if (object instanceof Collection) { Object firstElement = findFirstNonNullElement((Collection) object); if (firstElement != null && !(firstElement instanceof Serializable)) { JavaType type = TypeFactory.defaultInstance().constructParametricType(object.getClass(), firstElement.getClass()); return objectMapperWrapper.fromBytes(objectMapperWrapper.toBytes(object), type); } } if (object instanceof Map) { Map.Entry firstEntry = this.findFirstNonNullEntry((Map) object); if (firstEntry != null) { Object key = firstEntry.getKey(); Object value = firstEntry.getValue(); if (!(key instanceof Serializable) || !(value instanceof Serializable)) { JavaType type = TypeFactory.defaultInstance().constructParametricType(object.getClass(), key.getClass(), value.getClass()); return (T) objectMapperWrapper.fromBytes(objectMapperWrapper.toBytes(object), type); } } } return object instanceof Serializable ? (T) SerializationHelper.clone((Serializable) object) : objectMapperWrapper.fromBytes(objectMapperWrapper.toBytes(object), (Class<T>) object.getClass()); } ObjectMapperJsonSerializer(ObjectMapperWrapper objectMapperWrapper); @Override T clone(T object); }
|
@Test public void should_clone_empty_collection() { List<?> original = new ArrayList(); Object cloned = serializer.clone(original); assertEquals(original, cloned); assertNotSame(original, cloned); }
@Test public void should_clone_map_of_non_serializable_key() { Map<NonSerializableObject, String> original = new HashMap<NonSerializableObject, String>(); original.put(new NonSerializableObject("key"), "value"); Object cloned = serializer.clone(original); assertEquals(original, cloned); assertNotSame(original, cloned); }
@Test public void should_clone_map_of_non_serializable_value() { Map<String, NonSerializableObject> original = new HashMap<String, NonSerializableObject>(); original.put("key", new NonSerializableObject("value")); Object cloned = serializer.clone(original); assertEquals(original, cloned); assertNotSame(original, cloned); }
@Test public void should_clone_map_of_serializable_key_and_value() { Map<String, SerializableObject> original = new HashMap<String, SerializableObject>(); original.put("key", new SerializableObject("value")); Object cloned = serializer.clone(original); assertEquals(original, cloned); assertNotSame(original, cloned); }
@Test public void should_clone_map_with_null_value() { Map<String, Object> original = new HashMap<String, Object>(); original.put("null", null); Object cloned = serializer.clone(original); assertEquals(original, cloned); assertNotSame(original, cloned); }
@Test public void should_clone_map_of_non_serializable_value_with_null_value() { Map<String, NonSerializableObject> original = new LinkedHashMap<String, NonSerializableObject>(); original.put("null", null); original.put("key", new NonSerializableObject("value")); Object cloned = serializer.clone(original); assertEquals(original, cloned); assertNotSame(original, cloned); }
@Test public void should_clone_map_of_serializable_key_and_value_with_null() { Map<String, SerializableObject> original = new LinkedHashMap<String, SerializableObject>(); original.put("null", null); original.put("key", new SerializableObject("value")); Object cloned = serializer.clone(original); assertEquals(original, cloned); assertNotSame(original, cloned); }
@Test public void should_clone_collection_of_serializable_object() { List<SerializableObject> original = new ArrayList<>(); original.add(new SerializableObject("value")); List<SerializableObject> cloned = serializer.clone(original); assertEquals(original, cloned); assertNotSame(original, cloned); }
@Test public void should_clone_collection_of_non_serializable_object() { List<NonSerializableObject> original = new ArrayList<>(); original.add(new NonSerializableObject("value")); List<NonSerializableObject> cloned = serializer.clone(original); assertEquals(original, cloned); assertNotSame(original, cloned); }
@Test public void should_clone_map_of_non_serializable_key() { Map<NonSerializableObject, String> original = new HashMap<>(); original.put(new NonSerializableObject("key"), "value"); Object cloned = serializer.clone(original); assertEquals(original, cloned); assertNotSame(original, cloned); }
@Test public void should_clone_map_of_non_serializable_value() { Map<String, NonSerializableObject> original = new HashMap<>(); original.put("key", new NonSerializableObject("value")); Object cloned = serializer.clone(original); assertEquals(original, cloned); assertNotSame(original, cloned); }
@Test public void should_clone_map_of_serializable_key_and_value() { Map<String, SerializableObject> original = new HashMap<>(); original.put("key", new SerializableObject("value")); Object cloned = serializer.clone(original); assertEquals(original, cloned); assertNotSame(original, cloned); }
@Test public void should_clone_map_with_null_value() { Map<String, Object> original = new HashMap<>(); original.put("null", null); Object cloned = serializer.clone(original); assertEquals(original, cloned); assertNotSame(original, cloned); }
@Test public void should_clone_map_of_non_serializable_value_with_null_value() { Map<String, NonSerializableObject> original = new LinkedHashMap<>(); original.put("null", null); original.put("key", new NonSerializableObject("value")); Object cloned = serializer.clone(original); assertEquals(original, cloned); assertNotSame(original, cloned); }
@Test public void should_clone_map_of_serializable_key_and_value_with_null() { Map<String, SerializableObject> original = new LinkedHashMap<>(); original.put("null", null); original.put("key", new SerializableObject("value")); Object cloned = serializer.clone(original); assertEquals(original, cloned); assertNotSame(original, cloned); }
@Test public void should_clone_serializable_object() { Object original = new SerializableObject("value"); Object cloned = serializer.clone(original); assertEquals(original, cloned); assertNotSame(original, cloned); }
@Test public void should_clone_non_serializable_object() { Object original = new NonSerializableObject("value"); Object cloned = serializer.clone(original); assertEquals(original, cloned); assertNotSame(original, cloned); }
@Test public void should_clone_collection_of_serializable_object() { List<SerializableObject> original = new ArrayList<SerializableObject>(); original.add(new SerializableObject("value")); List<SerializableObject> cloned = serializer.clone(original); assertEquals(original, cloned); assertNotSame(original, cloned); }
@Test public void should_clone_collection_of_non_serializable_object() { List<NonSerializableObject> original = new ArrayList<NonSerializableObject>(); original.add(new NonSerializableObject("value")); List<NonSerializableObject> cloned = serializer.clone(original); assertEquals(original, cloned); assertNotSame(original, cloned); }
|
PostgreSQLHStoreType extends ImmutableType<Map> { @Override protected Map get(ResultSet rs, String[] names, SessionImplementor session, Object owner) throws SQLException { return (Map) rs.getObject(names[0]); } PostgreSQLHStoreType(); @Override int[] sqlTypes(); static final PostgreSQLHStoreType INSTANCE; }
|
@Test public void test() { doInJPA(new JPATransactionFunction<Void>() { @Override public Void apply(EntityManager entityManager) { Book book = new Book(); book.setIsbn("978-9730228236"); book.getProperties().put("title", "High-Performance Java Persistence"); book.getProperties().put("author", "Vlad Mihalcea"); book.getProperties().put("publisher", "Amazon"); book.getProperties().put("price", "$44.95"); entityManager.persist(book); return null; } }); doInJPA(new JPATransactionFunction<Void>() { @Override public Void apply(EntityManager entityManager) { Book book = (Book) entityManager.unwrap(Session.class) .bySimpleNaturalId(Book.class) .load("978-9730228236"); assertEquals("High-Performance Java Persistence", book.getProperties().get("title")); assertEquals("Vlad Mihalcea", book.getProperties().get("author")); return null; } }); }
@Test public void test() { doInJPA(new JPATransactionFunction<Void>() { @Override public Void apply(EntityManager entityManager) { Book book = new Book(); book.setIsbn("978-9730228236"); book.getProperties().put("title", "High-Performance Java Persistence"); book.getProperties().put("author", "Vlad Mihalcea"); book.getProperties().put("publisher", "Amazon"); book.getProperties().put("price", "$44.95"); entityManager.persist(book); return null; } }); doInJPA(new JPATransactionFunction<Void>() { @Override public Void apply(EntityManager entityManager) { Book book = entityManager.unwrap(Session.class) .bySimpleNaturalId(Book.class) .load("978-9730228236"); assertEquals("High-Performance Java Persistence", book.getProperties().get("title")); assertEquals("Vlad Mihalcea", book.getProperties().get("author")); return null; } }); }
|
Range implements Serializable { @SuppressWarnings("unchecked") public static <T extends Comparable> Range<T> ofString(String str, Function<String, T> converter, Class<T> clazz) { if(str.equals(EMPTY)) { return emptyRange(clazz); } int mask = str.charAt(0) == '[' ? LOWER_INCLUSIVE : LOWER_EXCLUSIVE; mask |= str.charAt(str.length() - 1) == ']' ? UPPER_INCLUSIVE : UPPER_EXCLUSIVE; int delim = str.indexOf(','); if (delim == -1) { throw new IllegalArgumentException("Cannot find comma character"); } String lowerStr = str.substring(1, delim); String upperStr = str.substring(delim + 1, str.length() - 1); if (lowerStr.length() == 0 || lowerStr.endsWith(INFINITY)) { mask |= LOWER_INFINITE; } if (upperStr.length() == 0 || upperStr.endsWith(INFINITY)) { mask |= UPPER_INFINITE; } T lower = null; T upper = null; if ((mask & LOWER_INFINITE) != LOWER_INFINITE) { lower = converter.apply(lowerStr); } if ((mask & UPPER_INFINITE) != UPPER_INFINITE) { upper = converter.apply(upperStr); } return new Range<T>(lower, upper, mask, clazz); } private Range(T lower, T upper, int mask, Class<T> clazz); @SuppressWarnings("unchecked") static Range<T> closed(T lower, T upper); @SuppressWarnings("unchecked") static Range<T> open(T lower, T upper); @SuppressWarnings("unchecked") static Range<T> openClosed(T lower, T upper); @SuppressWarnings("unchecked") static Range<T> closedOpen(T lower, T upper); @SuppressWarnings("unchecked") static Range<T> openInfinite(T lower); @SuppressWarnings("unchecked") static Range<T> closedInfinite(T lower); @SuppressWarnings("unchecked") static Range<T> infiniteOpen(T upper); @SuppressWarnings("unchecked") static Range<T> infiniteClosed(T upper); @SuppressWarnings("unchecked") static Range<T> infinite(Class<T> cls); @SuppressWarnings("unchecked") static Range<T> ofString(String str, Function<String, T> converter, Class<T> clazz); static Range<BigDecimal> bigDecimalRange(String range); static Range<Integer> integerRange(String range); static Range<Long> longRange(String range); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); boolean hasMask(int flag); boolean isLowerBoundClosed(); boolean isUpperBoundClosed(); boolean hasLowerBound(); boolean hasUpperBound(); T lower(); T upper(); @SuppressWarnings("unchecked") boolean contains(T point); boolean contains(Range<T> range); String asString(); static Range<R> emptyRange(Class<R> clazz); static final int LOWER_INCLUSIVE; static final int LOWER_EXCLUSIVE; static final int UPPER_INCLUSIVE; static final int UPPER_EXCLUSIVE; static final int LOWER_INFINITE; static final int UPPER_INFINITE; static final String EMPTY; static final String INFINITY; }
|
@Test public void ofStringTest() { assertThat(integerRange("[1,3]").lower(), is(1)); assertThat(integerRange("[1,3]").upper(), is(3)); assertThat(integerRange("[1,3]").isUpperBoundClosed(), is(true)); assertThat(integerRange("[1,3]").isLowerBoundClosed(), is(true)); assertThat(integerRange("[,3]").lower(), is(nullValue())); assertThat(integerRange("[,3]").upper(), is(3)); assertThat(integerRange("[,3]").hasLowerBound(), is(false)); assertThat(integerRange("[,3]").hasUpperBound(), is(true)); assertThat(integerRange("[,3]").isUpperBoundClosed(), is(true)); assertThat(integerRange("[,3]").isLowerBoundClosed(), is(false)); assertThat(integerRange("[,]").lower(), is(nullValue())); assertThat(integerRange("[,]").upper(), is(nullValue())); assertThat(integerRange("[,]").hasLowerBound(), is(false)); assertThat(integerRange("[,]").hasUpperBound(), is(false)); assertThat(integerRange("[,]").isUpperBoundClosed(), is(false)); assertThat(integerRange("[,]").isLowerBoundClosed(), is(false)); assertThat(integerRange("(-5,5]").isUpperBoundClosed(), is(true)); assertThat(integerRange("(-5,5]").isLowerBoundClosed(), is(false)); }
|
PostgreSQLGuavaRangeType extends ImmutableType<Range> implements DynamicParameterizedType { public static Range<Integer> integerRange(String range) { return ofString(range, new Function<String, Integer>() { @Override public Integer apply(String s) { return Integer.parseInt(s); } }, Integer.class); } PostgreSQLGuavaRangeType(); @Override int[] sqlTypes(); @SuppressWarnings("unchecked") static Range<T> ofString(String str, Function<String, T> converter, Class<T> cls); static Range<BigDecimal> bigDecimalRange(String range); static Range<Integer> integerRange(String range); static Range<Long> longRange(String range); String asString(Range range); @Override void setParameterValues(Properties parameters); Class<?> getElementType(); static final PostgreSQLGuavaRangeType INSTANCE; }
|
@Test public void testUnboundedRangeStringIsRejected() { try { PostgreSQLGuavaRangeType instance = PostgreSQLGuavaRangeType.INSTANCE; instance.integerRange("(,)"); fail("An unbounded range string should throw an exception!"); } catch (Exception e) { Throwable rootCause = Throwables.getRootCause(e); assertTrue(rootCause instanceof IllegalArgumentException); assertTrue(rootCause.getMessage().contains("Cannot find bound type")); } }
|
JacksonUtil { public static <T> T clone(T value) { return ObjectMapperWrapper.INSTANCE.clone(value); } static T fromString(String string, Class<T> clazz); static T fromString(String string, Type type); static String toString(Object value); static JsonNode toJsonNode(String value); static T clone(T value); }
|
@Test public void cloneDeserializeStepErrorTest() { MyEntity entity = new MyEntity(); entity.setValue("some value"); entity.setPojos(Arrays.asList( createMyPojo("first value", MyType.A, "1.1", createOtherPojo("USD")), createMyPojo("second value", MyType.B, "1.2", createOtherPojo("BRL")) )); MyEntity clone = JacksonUtil.clone(entity); assertEquals(clone, entity); List<MyPojo> clonePojos = JacksonUtil.clone(entity.getPojos()); assertEquals(clonePojos, entity.getPojos()); }
|
JsonTypeDescriptor extends AbstractTypeDescriptor<Object> implements DynamicParameterizedType { @Override public boolean areEqual(Object one, Object another) { if (one == another) { return true; } if (one == null || another == null) { return false; } if (one instanceof String && another instanceof String) { return one.equals(another); } if (one instanceof Collection && another instanceof Collection) { return Objects.equals(one, another); } if (one.getClass().equals(another.getClass()) && ReflectionUtils.getDeclaredMethodOrNull(one.getClass(), "equals", Object.class) != null) { return one.equals(another); } return objectMapperWrapper.toJsonNode(objectMapperWrapper.toString(one)).equals( objectMapperWrapper.toJsonNode(objectMapperWrapper.toString(another)) ); } JsonTypeDescriptor(); JsonTypeDescriptor(Type type); JsonTypeDescriptor(final ObjectMapperWrapper objectMapperWrapper); JsonTypeDescriptor(final ObjectMapperWrapper objectMapperWrapper, Type type); @Override void setParameterValues(Properties parameters); @Override boolean areEqual(Object one, Object another); @Override String toString(Object value); @Override Object fromString(String string); @SuppressWarnings({"unchecked"}) @Override X unwrap(Object value, Class<X> type, WrapperOptions options); @Override Object wrap(X value, WrapperOptions options); }
|
@Test public void testSetsAreEqual() { JsonTypeDescriptor descriptor = new JsonTypeDescriptor(); Form theFirst = createForm(1, 2, 3); Form theSecond = createForm(3, 2, 1); assertTrue(descriptor.areEqual(theFirst, theSecond)); }
|
SQLExtractor { public static String from(Query query) { AbstractProducedQuery abstractProducedQuery = query.unwrap(AbstractProducedQuery.class); String[] sqls = abstractProducedQuery .getProducer() .getFactory() .getQueryPlanCache() .getHQLQueryPlan(abstractProducedQuery.getQueryString(), false, Collections.emptyMap()) .getSqlStrings(); return sqls.length > 0 ? sqls[0] : null; } private SQLExtractor(); static String from(Query query); }
|
@Test public void testJPQL() { doInJPA(entityManager -> { Query jpql = entityManager .createQuery( "select " + " YEAR(p.createdOn) as year, " + " count(p) as postCount " + "from " + " Post p " + "group by " + " YEAR(p.createdOn)", Tuple.class); String sql = SQLExtractor.from(jpql); assertNotNull(sql); LOGGER.info( "The JPQL query: [\n{}\n]\ngenerates the following SQL query: [\n{}\n]", jpql.unwrap(org.hibernate.query.Query.class).getQueryString(), sql ); }); }
@Test public void testCriteriaAPI() { doInJPA(entityManager -> { CriteriaBuilder builder = entityManager.getCriteriaBuilder(); CriteriaQuery<PostComment> criteria = builder.createQuery(PostComment.class); Root<PostComment> postComment = criteria.from(PostComment.class); Join<PostComment, Post> post = postComment.join("post"); criteria.where( builder.like(post.get("title"), "%Java%") ); criteria.orderBy( builder.asc(postComment.get("id")) ); Query criteriaQuery = entityManager.createQuery(criteria); String sql = SQLExtractor.from(criteriaQuery); assertNotNull(sql); LOGGER.info( "The Criteria API query: [\n{}\n]\ngenerates the following SQL query: [\n{}\n]", criteriaQuery.unwrap(org.hibernate.query.Query.class).getQueryString(), sql ); }); }
|
Configuration { public Properties getProperties() { return properties; } private Configuration(); Properties getProperties(); ObjectMapperWrapper getObjectMapperWrapper(); Integer integerProperty(PropertyKey propertyKey); Long longProperty(PropertyKey propertyKey); Boolean booleanProperty(PropertyKey propertyKey); Class<T> classProperty(PropertyKey propertyKey); static final Configuration INSTANCE; static final String PROPERTIES_FILE_PATH; static final String PROPERTIES_FILE_NAME; }
|
@Test public void testHibernateProperties() { assertNull(Configuration.INSTANCE.getProperties().getProperty("hibernate.types.nothing")); assertEquals("def", Configuration.INSTANCE.getProperties().getProperty("hibernate.types.abc")); }
@Test public void testHibernateTypesOverrideProperties() { assertEquals("ghi", Configuration.INSTANCE.getProperties().getProperty("hibernate.types.def")); }
|
PostgreSQLHStoreType extends ImmutableType<Map> { @Override protected Map get(ResultSet rs, String[] names, SharedSessionContractImplementor session, Object owner) throws SQLException { return (Map) rs.getObject(names[0]); } PostgreSQLHStoreType(); @Override int[] sqlTypes(); static final PostgreSQLHStoreType INSTANCE; }
|
@Test public void test() { doInJPA(entityManager -> { Book book = new Book(); book.setIsbn("978-9730228236"); book.getProperties().put("title", "High-Performance Java Persistence"); book.getProperties().put("author", "Vlad Mihalcea"); book.getProperties().put("publisher", "Amazon"); book.getProperties().put("price", "$44.95"); entityManager.persist(book); }); doInJPA(entityManager -> { Book book = entityManager.unwrap(Session.class) .bySimpleNaturalId(Book.class) .load("978-9730228236"); assertEquals("High-Performance Java Persistence", book.getProperties().get("title")); assertEquals("Vlad Mihalcea", book.getProperties().get("author")); }); }
|
Range implements Serializable { @SuppressWarnings("unchecked") public static <T extends Comparable> Range<T> ofString(String str, Function<String, T> converter, Class<T> clazz) { if(str.equals(EMPTY)) { return emptyRange(clazz); } int mask = str.charAt(0) == '[' ? LOWER_INCLUSIVE : LOWER_EXCLUSIVE; mask |= str.charAt(str.length() - 1) == ']' ? UPPER_INCLUSIVE : UPPER_EXCLUSIVE; int delim = str.indexOf(','); if (delim == -1) { throw new IllegalArgumentException("Cannot find comma character"); } String lowerStr = str.substring(1, delim); String upperStr = str.substring(delim + 1, str.length() - 1); if (lowerStr.length() == 0 || lowerStr.endsWith(INFINITY)) { mask |= LOWER_INFINITE; } if (upperStr.length() == 0 || upperStr.endsWith(INFINITY)) { mask |= UPPER_INFINITE; } T lower = null; T upper = null; if ((mask & LOWER_INFINITE) != LOWER_INFINITE) { lower = converter.apply(lowerStr); } if ((mask & UPPER_INFINITE) != UPPER_INFINITE) { upper = converter.apply(upperStr); } return new Range<>(lower, upper, mask, clazz); } private Range(T lower, T upper, int mask, Class<T> clazz); @SuppressWarnings("unchecked") static Range<T> closed(T lower, T upper); @SuppressWarnings("unchecked") static Range<T> open(T lower, T upper); @SuppressWarnings("unchecked") static Range<T> openClosed(T lower, T upper); @SuppressWarnings("unchecked") static Range<T> closedOpen(T lower, T upper); @SuppressWarnings("unchecked") static Range<T> openInfinite(T lower); @SuppressWarnings("unchecked") static Range<T> closedInfinite(T lower); @SuppressWarnings("unchecked") static Range<T> infiniteOpen(T upper); @SuppressWarnings("unchecked") static Range<T> infiniteClosed(T upper); @SuppressWarnings("unchecked") static Range<T> infinite(Class<T> cls); @SuppressWarnings("unchecked") static Range<T> ofString(String str, Function<String, T> converter, Class<T> clazz); static Range<BigDecimal> bigDecimalRange(String range); static Range<Integer> integerRange(String range); static Range<Long> longRange(String range); static Range<LocalDateTime> localDateTimeRange(String range); static Range<LocalDate> localDateRange(String range); static Range<ZonedDateTime> zonedDateTimeRange(String rangeStr); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); boolean hasMask(int flag); boolean isLowerBoundClosed(); boolean isUpperBoundClosed(); boolean hasLowerBound(); boolean hasUpperBound(); T lower(); T upper(); @SuppressWarnings("unchecked") boolean contains(T point); boolean contains(Range<T> range); String asString(); static Range<R> emptyRange(Class<R> clazz); static final int LOWER_INCLUSIVE; static final int LOWER_EXCLUSIVE; static final int UPPER_INCLUSIVE; static final int UPPER_EXCLUSIVE; static final int LOWER_INFINITE; static final int UPPER_INFINITE; static final String EMPTY; static final String INFINITY; }
|
@Test public void ofStringTest() { assertThat(integerRange("[1,3]").lower(), is(1)); assertThat(integerRange("[1,3]").upper(), is(3)); assertThat(integerRange("[1,3]").isUpperBoundClosed(), is(true)); assertThat(integerRange("[1,3]").isLowerBoundClosed(), is(true)); assertThat(integerRange("[,3]").lower(), is(nullValue())); assertThat(integerRange("[,3]").upper(), is(3)); assertThat(integerRange("[,3]").hasLowerBound(), is(false)); assertThat(integerRange("[,3]").hasUpperBound(), is(true)); assertThat(integerRange("[,3]").isUpperBoundClosed(), is(true)); assertThat(integerRange("[,3]").isLowerBoundClosed(), is(false)); assertThat(integerRange("[,]").lower(), is(nullValue())); assertThat(integerRange("[,]").upper(), is(nullValue())); assertThat(integerRange("[,]").hasLowerBound(), is(false)); assertThat(integerRange("[,]").hasUpperBound(), is(false)); assertThat(integerRange("[,]").isUpperBoundClosed(), is(false)); assertThat(integerRange("[,]").isLowerBoundClosed(), is(false)); assertThat(integerRange("(-5,5]").isUpperBoundClosed(), is(true)); assertThat(integerRange("(-5,5]").isLowerBoundClosed(), is(false)); }
|
Range implements Serializable { public static Range<LocalDateTime> localDateTimeRange(String range) { return ofString(range, parseLocalDateTime().compose(unquote()), LocalDateTime.class); } private Range(T lower, T upper, int mask, Class<T> clazz); @SuppressWarnings("unchecked") static Range<T> closed(T lower, T upper); @SuppressWarnings("unchecked") static Range<T> open(T lower, T upper); @SuppressWarnings("unchecked") static Range<T> openClosed(T lower, T upper); @SuppressWarnings("unchecked") static Range<T> closedOpen(T lower, T upper); @SuppressWarnings("unchecked") static Range<T> openInfinite(T lower); @SuppressWarnings("unchecked") static Range<T> closedInfinite(T lower); @SuppressWarnings("unchecked") static Range<T> infiniteOpen(T upper); @SuppressWarnings("unchecked") static Range<T> infiniteClosed(T upper); @SuppressWarnings("unchecked") static Range<T> infinite(Class<T> cls); @SuppressWarnings("unchecked") static Range<T> ofString(String str, Function<String, T> converter, Class<T> clazz); static Range<BigDecimal> bigDecimalRange(String range); static Range<Integer> integerRange(String range); static Range<Long> longRange(String range); static Range<LocalDateTime> localDateTimeRange(String range); static Range<LocalDate> localDateRange(String range); static Range<ZonedDateTime> zonedDateTimeRange(String rangeStr); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); boolean hasMask(int flag); boolean isLowerBoundClosed(); boolean isUpperBoundClosed(); boolean hasLowerBound(); boolean hasUpperBound(); T lower(); T upper(); @SuppressWarnings("unchecked") boolean contains(T point); boolean contains(Range<T> range); String asString(); static Range<R> emptyRange(Class<R> clazz); static final int LOWER_INCLUSIVE; static final int LOWER_EXCLUSIVE; static final int UPPER_INCLUSIVE; static final int UPPER_EXCLUSIVE; static final int LOWER_INFINITE; static final int UPPER_INFINITE; static final String EMPTY; static final String INFINITY; }
|
@Test public void localDateTimeTest() { assertNotNull(Range.localDateTimeRange("[2019-03-27 16:33:10.1,)")); assertNotNull(Range.localDateTimeRange("[2019-03-27 16:33:10.12,)")); assertNotNull(Range.localDateTimeRange("[2019-03-27 16:33:10.123,)")); assertNotNull(Range.localDateTimeRange("[2019-03-27 16:33:10.1234,)")); assertNotNull(Range.localDateTimeRange("[2019-03-27 16:33:10.12345,)")); assertNotNull(Range.localDateTimeRange("[2019-03-27 16:33:10.123456,)")); assertNotNull(Range.localDateTimeRange("[2019-03-27 16:33:10.123456,infinity)")); }
|
Range implements Serializable { public static Range<ZonedDateTime> zonedDateTimeRange(String rangeStr) { Range<ZonedDateTime> range = ofString(rangeStr, parseZonedDateTime().compose(unquote()), ZonedDateTime.class); if (range.hasLowerBound() && range.hasUpperBound()) { ZoneId lowerZone = range.lower().getZone(); ZoneId upperZone = range.upper().getZone(); if (!lowerZone.equals(upperZone)) { Duration lowerDst = ZoneId.systemDefault().getRules().getDaylightSavings(range.lower().toInstant()); Duration upperDst = ZoneId.systemDefault().getRules().getDaylightSavings(range.upper().toInstant()); long dstSeconds = upperDst.minus(lowerDst).getSeconds(); if(dstSeconds < 0 ) { dstSeconds *= -1; } long zoneDriftSeconds = ((ZoneOffset) lowerZone).getTotalSeconds() - ((ZoneOffset) upperZone).getTotalSeconds(); if (zoneDriftSeconds < 0) { zoneDriftSeconds *= -1; } if (dstSeconds != zoneDriftSeconds) { throw new IllegalArgumentException("The upper and lower bounds must be in same time zone!"); } } } return range; } private Range(T lower, T upper, int mask, Class<T> clazz); @SuppressWarnings("unchecked") static Range<T> closed(T lower, T upper); @SuppressWarnings("unchecked") static Range<T> open(T lower, T upper); @SuppressWarnings("unchecked") static Range<T> openClosed(T lower, T upper); @SuppressWarnings("unchecked") static Range<T> closedOpen(T lower, T upper); @SuppressWarnings("unchecked") static Range<T> openInfinite(T lower); @SuppressWarnings("unchecked") static Range<T> closedInfinite(T lower); @SuppressWarnings("unchecked") static Range<T> infiniteOpen(T upper); @SuppressWarnings("unchecked") static Range<T> infiniteClosed(T upper); @SuppressWarnings("unchecked") static Range<T> infinite(Class<T> cls); @SuppressWarnings("unchecked") static Range<T> ofString(String str, Function<String, T> converter, Class<T> clazz); static Range<BigDecimal> bigDecimalRange(String range); static Range<Integer> integerRange(String range); static Range<Long> longRange(String range); static Range<LocalDateTime> localDateTimeRange(String range); static Range<LocalDate> localDateRange(String range); static Range<ZonedDateTime> zonedDateTimeRange(String rangeStr); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); boolean hasMask(int flag); boolean isLowerBoundClosed(); boolean isUpperBoundClosed(); boolean hasLowerBound(); boolean hasUpperBound(); T lower(); T upper(); @SuppressWarnings("unchecked") boolean contains(T point); boolean contains(Range<T> range); String asString(); static Range<R> emptyRange(Class<R> clazz); static final int LOWER_INCLUSIVE; static final int LOWER_EXCLUSIVE; static final int UPPER_INCLUSIVE; static final int UPPER_EXCLUSIVE; static final int LOWER_INFINITE; static final int UPPER_INFINITE; static final String EMPTY; static final String INFINITY; }
|
@Test public void zonedDateTimeTest() { assertNotNull(Range.zonedDateTimeRange("[2019-03-27 16:33:10.1-06,)")); assertNotNull(Range.zonedDateTimeRange("[2019-03-27 16:33:10.12-06,)")); assertNotNull(Range.zonedDateTimeRange("[2019-03-27 16:33:10.123-06,)")); assertNotNull(Range.zonedDateTimeRange("[2019-03-27 16:33:10.1234-06,)")); assertNotNull(Range.zonedDateTimeRange("[2019-03-27 16:33:10.12345-06,)")); assertNotNull(Range.zonedDateTimeRange("[2019-03-27 16:33:10.123456-06,)")); assertNotNull(Range.zonedDateTimeRange("[2019-03-27 16:33:10.123456-06,infinity)")); }
|
PostgreSQLGuavaRangeType extends ImmutableType<Range> implements DynamicParameterizedType { public static Range<Integer> integerRange(String range) { return ofString(range, Integer::parseInt, Integer.class); } PostgreSQLGuavaRangeType(); @Override int[] sqlTypes(); @SuppressWarnings("unchecked") static Range<T> ofString(String str, Function<String, T> converter, Class<T> cls); static Range<BigDecimal> bigDecimalRange(String range); static Range<Integer> integerRange(String range); static Range<Long> longRange(String range); static Range<LocalDateTime> localDateTimeRange(String range); static Range<LocalDate> localDateRange(String range); static Range<ZonedDateTime> zonedDateTimeRange(String rangeStr); String asString(Range range); @Override void setParameterValues(Properties parameters); Class<?> getElementType(); static final PostgreSQLGuavaRangeType INSTANCE; }
|
@Test public void testUnboundedRangeStringIsRejected() { try { PostgreSQLGuavaRangeType instance = PostgreSQLGuavaRangeType.INSTANCE; instance.integerRange("(,)"); fail("An unbounded range string should throw an exception!"); } catch (Exception e) { IllegalArgumentException rootCause = ExceptionUtil.rootCause(e); assertTrue(rootCause.getMessage().contains("Cannot find bound type")); } }
|
SQLExtractor { public static String from(Query query) { AbstractQueryImpl abstractQuery = query.unwrap(AbstractQueryImpl.class); SessionImplementor session = ReflectionUtils.getFieldValue(abstractQuery, "session"); String[] sqls = session.getFactory() .getQueryPlanCache() .getHQLQueryPlan(abstractQuery.getQueryString(), false, Collections.<String, Filter>emptyMap()) .getSqlStrings(); return sqls.length > 0 ? sqls[0] : null; } private SQLExtractor(); static String from(Query query); }
|
@Test public void testJPQL() { doInJPA(new JPATransactionFunction<Void>() { @Override public Void apply(EntityManager entityManager) { Query query = entityManager .createQuery( "select " + " YEAR(p.createdOn) as year, " + " count(p) as postCount " + "from " + " Post p " + "group by " + " YEAR(p.createdOn)", Tuple.class); String sql = SQLExtractor.from(query); assertNotNull(sql); LOGGER.info("SQL query: {}", sql); return null; } }); }
@Test public void testCriteriaAPI() { doInJPA(new JPATransactionFunction<Void>() { @Override public Void apply(EntityManager entityManager) { CriteriaBuilder builder = entityManager.getCriteriaBuilder(); CriteriaQuery<PostComment> criteria = builder.createQuery(PostComment.class); Root<PostComment> postComment = criteria.from(PostComment.class); Join<PostComment, Post> post = postComment.join("post"); Path<String> postTitle = post.get("title"); criteria.where( builder.like(postTitle, "%Java%") ); criteria.orderBy( builder.asc(postComment.get("id")) ); Query query = entityManager.createQuery(criteria); String sql = SQLExtractor.from(query); assertNotNull(sql); LOGGER.info("SQL query: {}", sql); return null; } }); }
|
PostgreSQLGuavaRangeType extends ImmutableType<Range> { public static Range<Integer> integerRange(String range) { return ofString(range, new Function<String, Integer>() { @Override public Integer apply(String s) { return Integer.parseInt(s); } }, Integer.class); } PostgreSQLGuavaRangeType(); @Override int[] sqlTypes(); @SuppressWarnings("unchecked") static Range<T> ofString(String str, Function<String, T> converter, Class<T> cls); static Range<BigDecimal> bigDecimalRange(String range); static Range<Integer> integerRange(String range); static Range<Long> longRange(String range); String asString(Range range); static final PostgreSQLGuavaRangeType INSTANCE; }
|
@Test public void testUnboundedRangeStringIsRejected() { try { PostgreSQLGuavaRangeType instance = PostgreSQLGuavaRangeType.INSTANCE; instance.integerRange("(,)"); fail("An unbounded range string should throw an exception!"); } catch (Exception e) { Throwable rootCause = Throwables.getRootCause(e); assertTrue(rootCause instanceof IllegalArgumentException); assertTrue(rootCause.getMessage().contains("Cannot find bound type")); } }
|
AuthorizationsCollector implements IAuthorizator { protected Authorization parseAuthLine(String line) throws ParseException { String[] tokens = line.split("\\s+"); String keyword = tokens[0].toLowerCase(); switch (keyword) { case "topic": return createAuthorization(line, tokens); case "user": m_parsingUsersSpecificSection = true; m_currentUser = tokens[1]; m_parsingPatternSpecificSection = false; return null; case "pattern": m_parsingUsersSpecificSection = false; m_currentUser = ""; m_parsingPatternSpecificSection = true; return createAuthorization(line, tokens); default: throw new ParseException(String.format("invalid line definition found %s", line), 1); } } @Override boolean canWrite(Topic topic, String user, String client); @Override boolean canRead(Topic topic, String user, String client); boolean isEmpty(); }
|
@Test public void testParseAuthLineValid() throws ParseException { Authorization authorization = authorizator.parseAuthLine("topic /weather/italy/anemometer"); assertEquals(RW_ANEMOMETER, authorization); }
@Test public void testParseAuthLineValid_read() throws ParseException { Authorization authorization = authorizator.parseAuthLine("topic read /weather/italy/anemometer"); assertEquals(R_ANEMOMETER, authorization); }
@Test public void testParseAuthLineValid_write() throws ParseException { Authorization authorization = authorizator.parseAuthLine("topic write /weather/italy/anemometer"); assertEquals(W_ANEMOMETER, authorization); }
@Test public void testParseAuthLineValid_readwrite() throws ParseException { Authorization authorization = authorizator.parseAuthLine("topic readwrite /weather/italy/anemometer"); assertEquals(RW_ANEMOMETER, authorization); }
@Test public void testParseAuthLineValid_topic_with_space() throws ParseException { Authorization expected = new Authorization(new Topic("/weather/eastern italy/anemometer")); Authorization authorization = authorizator.parseAuthLine("topic readwrite /weather/eastern italy/anemometer"); assertEquals(expected, authorization); }
@Test(expected = ParseException.class) public void testParseAuthLineValid_invalid() throws ParseException { authorizator.parseAuthLine("topic faker /weather/italy/anemometer"); }
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.