repo stringclasses 1k
values | file_url stringlengths 96 373 | file_path stringlengths 11 294 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 6
values | commit_sha stringclasses 1k
values | retrieved_at stringdate 2026-01-04 14:45:56 2026-01-04 18:30:23 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/test/java/org/apache/flink/graph/streaming/example/test/DegreeDistributionITCase.java | src/test/java/org/apache/flink/graph/streaming/example/test/DegreeDistributionITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.example.test;
import org.apache.flink.graph.streaming.example.DegreeDistribution;
import org.apache.flink.graph.streaming.util.ExamplesTestData;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.test.util.AbstractTestBase;
import org.junit.Test;
public class DegreeDistributionITCase extends AbstractTestBase {
@Test
public void test() throws Exception {
final String textPath = createTempFile("edges.txt", ExamplesTestData.DEGREES_DATA);
final String resultPath = getTempDirPath("result");
final String textPath2 = createTempFile("edges2.txt", ExamplesTestData.DEGREES_DATA_ZERO);
final String resultPath2 = getTempDirPath("result2");
DegreeDistribution.main(new String[]{textPath, resultPath, "1"});
DegreeDistribution.main(new String[]{textPath2, resultPath2, "1"});
compareResultsByLinesInMemory(ExamplesTestData.DEGREES_RESULT, resultPath);
compareResultsByLinesInMemory(ExamplesTestData.DEGREES_RESULT_ZERO, resultPath2);
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/test/java/org/apache/flink/graph/streaming/example/test/ConnectedComponentsTest.java | src/test/java/org/apache/flink/graph/streaming/example/test/ConnectedComponentsTest.java | package org.apache.flink.graph.streaming.example.test;
import org.apache.flink.graph.Edge;
import org.apache.flink.graph.streaming.GraphStream;
import org.apache.flink.graph.streaming.SimpleEdgeStream;
import org.apache.flink.graph.streaming.library.ConnectedComponents;
import org.apache.flink.graph.streaming.summaries.DisjointSet;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.test.util.AbstractTestBase;
import org.apache.flink.types.NullValue;
import org.junit.Assert;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.assertEquals;
public class ConnectedComponentsTest extends AbstractTestBase {
@Test
public void test() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1); //needed to ensure total ordering for windows
CollectSink.values.clear();
DataStream<Edge<Long, NullValue>> edges = getGraphStream(env);
GraphStream<Long, NullValue, NullValue> graph = new SimpleEdgeStream<>(edges, env);
graph
.aggregate(new ConnectedComponents<>(5))
.addSink(new CollectSink());
env.execute("Streaming Connected Components Check");
// verify the results
String expectedResultStr = "1, 2, 3, 5\n" + "6, 7\n" + "8, 9\n";
String[] result = parser(CollectSink.values);
String[] expected = expectedResultStr.isEmpty() ? new String[0] : expectedResultStr.split("\n");
assertEquals("Different number of lines in expected and obtained result.", expected.length, result.length);
Assert.assertArrayEquals("Different connected components.", expected, result);
}
@SuppressWarnings("serial")
private static DataStream<Edge<Long, NullValue>> getGraphStream(StreamExecutionEnvironment env) {
return env.fromCollection(getEdges());
}
public static List<Edge<Long, NullValue>> getEdges() {
List<Edge<Long, NullValue>> edges = new ArrayList<>();
edges.add(new Edge<>(1L, 2L, NullValue.getInstance()));
edges.add(new Edge<>(1L, 3L, NullValue.getInstance()));
edges.add(new Edge<>(2L, 3L, NullValue.getInstance()));
edges.add(new Edge<>(1L, 5L, NullValue.getInstance()));
edges.add(new Edge<>(6L, 7L, NullValue.getInstance()));
edges.add(new Edge<>(8L, 9L, NullValue.getInstance()));
return edges;
}
static String[] parser(List<String> list) {
int s = list.size();
String r = list.get(s - 1); // to get the final combine result which is stored at the end of result
String t;
list.clear();
String[] G = r.split("=");
for (int i = 0; i < G.length; i++) {
if (G[i].contains("[")) {
String[] k = G[i].split("]");
t = k[0].substring(1, k[0].length());
list.add(t);
}
}
String[] result = list.toArray(new String[list.size()]);
Arrays.sort(result);
return result;
}
// a testing sink
public static final class CollectSink implements SinkFunction<DisjointSet<Long>> {
static final List<String> values = new ArrayList<>();
@Override
public void invoke(DisjointSet value, Context context) throws Exception {
values.add(value.toString());
}
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/test/java/org/apache/flink/graph/streaming/example/test/WindowTrianglesITCase.java | src/test/java/org/apache/flink/graph/streaming/example/test/WindowTrianglesITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.example.test;
import org.apache.flink.graph.streaming.example.WindowTriangles;
import org.apache.flink.graph.streaming.util.ExamplesTestData;
import org.apache.flink.test.util.AbstractTestBase;
import org.junit.Test;
public class WindowTrianglesITCase extends AbstractTestBase {
@Test
public void test() throws Exception {
final String resultPath = getTempDirPath("result");
final String textPath = createTempFile("edges.txt", ExamplesTestData.TRIANGLES_DATA);
WindowTriangles.main(new String[]{textPath, resultPath, "400", "1"});
compareResultsByLinesInMemory(ExamplesTestData.TRIANGLES_RESULT, resultPath);
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/test/java/org/apache/flink/graph/streaming/example/test/TriangleCountTest.java | src/test/java/org/apache/flink/graph/streaming/example/test/TriangleCountTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.example.test;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.graph.Edge;
import org.apache.flink.graph.streaming.SimpleEdgeStream;
import org.apache.flink.graph.streaming.example.ExactTriangleCount;
import org.apache.flink.types.NullValue;
import org.apache.flink.util.Collector;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import java.util.TreeSet;
public class TriangleCountTest {
// first flatMap collector and result Tuple
private Collector<Tuple3<Integer, Integer, TreeSet<Integer>>> out1;
private Tuple3<Integer, Integer, TreeSet<Integer>> resultTuple1;
// second flatMap collector and result List
private Collector<Tuple2<Integer, Integer>> out2;
private List<Tuple2<Integer, Integer>> resultList2;
// third flatMap collector and result Tuple
private Collector<Tuple2<Integer, Integer>> out3;
private Tuple2<Integer, Integer> resultTuple3;
@Before
public void setUp() throws Exception {
out1 = new Collector<Tuple3<Integer, Integer, TreeSet<Integer>>>() {
@Override
public void collect(Tuple3<Integer, Integer, TreeSet<Integer>> t) {
resultTuple1 = t;
}
@Override
public void close() {
;
}
};
resultList2 = new ArrayList<>();
out2 = new Collector<Tuple2<Integer, Integer>>() {
@Override
public void collect(Tuple2<Integer, Integer> t) {
resultList2.add(t);
}
@Override
public void close() {
resultList2.clear();
}
};
out3 = new Collector<Tuple2<Integer, Integer>>() {
@Override
public void collect(Tuple2<Integer, Integer> t) {
resultTuple3 = t;
}
@Override
public void close() {
;
}
};
}
@Test
public void testIntersection() throws Exception {
FlatMapFunction f = new ExactTriangleCount.IntersectNeighborhoods();
TreeSet<Integer> t1 = new TreeSet<>();
t1.add(2);
t1.add(3);
t1.add(5);
t1.add(7);
t1.add(9);
Tuple3<Integer, Integer, TreeSet<Integer>> input1 = new Tuple3<>(1, 2, t1);
Tuple3<Integer, Integer, TreeSet<Integer>> input2 = new Tuple3<>(1, 3, t1);
TreeSet<Integer> t2 = new TreeSet<>();
t2.add(1);
t2.add(3);
t2.add(4);
t2.add(5);
t2.add(15);
t2.add(18);
Tuple3<Integer, Integer, TreeSet<Integer>> input3 = new Tuple3<>(1, 2, t2);
f.flatMap(input1, out2);
Assert.assertEquals(0, resultList2.size());
f.flatMap(input2, out2);
Assert.assertEquals(0, resultList2.size());
f.flatMap(input3, out2);
Assert.assertEquals(5, resultList2.size());
Assert.assertEquals(true, resultList2.contains(new Tuple2<>(3, 1)));
Assert.assertEquals(true, resultList2.contains(new Tuple2<>(5, 1)));
Assert.assertEquals(true, resultList2.contains(new Tuple2<>(1, 2)));
Assert.assertEquals(true, resultList2.contains(new Tuple2<>(2, 2)));
Assert.assertEquals(true, resultList2.contains(new Tuple2<>(-1, 2)));
TreeSet<Integer> t3 = new TreeSet<>();
t3.add(1);
t3.add(2);
t3.add(7);
t3.add(8);
Tuple3<Integer, Integer, TreeSet<Integer>> input4 = new Tuple3<>(1, 3, t3);
resultList2.clear();
f.flatMap(input4, out2);
Assert.assertEquals(5, resultList2.size());
Assert.assertEquals(true, resultList2.contains(new Tuple2<>(2, 1)));
Assert.assertEquals(true, resultList2.contains(new Tuple2<>(7, 1)));
Assert.assertEquals(true, resultList2.contains(new Tuple2<>(1, 2)));
Assert.assertEquals(true, resultList2.contains(new Tuple2<>(3, 2)));
Assert.assertEquals(true, resultList2.contains(new Tuple2<>(-1, 2)));
}
@Test
public void testCounts() throws Exception {
FlatMapFunction f = new ExactTriangleCount.SumAndEmitCounters();
Tuple2<Integer, Integer> expected = new Tuple2<>();
f.flatMap(new Tuple2<>(-1, 1), out3);
expected.setField(-1, 0);
expected.setField(1, 1);
Assert.assertEquals(expected, resultTuple3);
f.flatMap(new Tuple2<>(-1, 5), out3);
expected.setField(-1, 0);
expected.setField(6, 1);
Assert.assertEquals(expected, resultTuple3);
f.flatMap(new Tuple2<>(2, 2), out3);
expected.setField(2, 0);
expected.setField(2, 1);
Assert.assertEquals(expected, resultTuple3);
f.flatMap(new Tuple2<>(-1, 4), out3);
expected.setField(-1, 0);
expected.setField(10, 1);
Assert.assertEquals(expected, resultTuple3);
f.flatMap(new Tuple2<>(2, 4), out3);
expected.setField(2, 0);
expected.setField(6, 1);
Assert.assertEquals(expected, resultTuple3);
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/test/java/org/apache/flink/graph/streaming/example/test/BipartitenessCheckTest.java | src/test/java/org/apache/flink/graph/streaming/example/test/BipartitenessCheckTest.java | package org.apache.flink.graph.streaming.example.test;
import com.google.common.collect.Lists;
import org.apache.flink.graph.Edge;
import org.apache.flink.graph.streaming.GraphStream;
import org.apache.flink.graph.streaming.SimpleEdgeStream;
import org.apache.flink.graph.streaming.library.BipartitenessCheck;
import org.apache.flink.graph.streaming.summaries.Candidates;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.test.util.AbstractTestBase;
import org.apache.flink.types.NullValue;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import static org.junit.Assert.assertEquals;
public class BipartitenessCheckTest extends AbstractTestBase {
@Test
public void testBipartite() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1); //needed to ensure total ordering for windows
CollectSink.values.clear();
DataStream<Edge<Long, NullValue>> edges = env.fromCollection(getBipartiteEdges());
GraphStream<Long, NullValue, NullValue> graph = new SimpleEdgeStream<>(edges, env);
graph
.aggregate(new BipartitenessCheck<>((long) 500))
.addSink(new CollectSink());
env.execute("Bipartiteness check");
// verify the results
assertEquals(Lists.newArrayList(
"(true,{1={1=(1,true), 2=(2,false), 3=(3,false), 4=(4,false), 5=(5,true), 7=(7,true), 9=(9,true)}})"),
CollectSink.values);
}
@Test
public void testNonBipartite() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
CollectSink.values.clear();
DataStream<Edge<Long, NullValue>> edges = env.fromCollection(getNonBipartiteEdges());
GraphStream<Long, NullValue, NullValue> graph = new SimpleEdgeStream<>(edges, env);
graph.
aggregate(new BipartitenessCheck<>((long) 500))
.addSink(new CollectSink());
env.execute("Non Bipartiteness check");
// verify the results
assertEquals(Lists.newArrayList(
"(false,{})"),
CollectSink.values);
}
static List<Edge<Long, NullValue>> getBipartiteEdges () {
List<Edge<Long, NullValue>> edges = new ArrayList<>();
edges.add(new Edge<>(1L, 2L, NullValue.getInstance()));
edges.add(new Edge<>(1L, 3L, NullValue.getInstance()));
edges.add(new Edge<>(1L, 4L, NullValue.getInstance()));
edges.add(new Edge<>(4L, 5L, NullValue.getInstance()));
edges.add(new Edge<>(4L, 7L, NullValue.getInstance()));
edges.add(new Edge<>(4L, 9L, NullValue.getInstance()));
return edges;
}
static List<Edge<Long, NullValue>> getNonBipartiteEdges () {
List<Edge<Long, NullValue>> edges = new ArrayList<>();
edges.add(new Edge<>(1L, 2L, NullValue.getInstance()));
edges.add(new Edge<>(2L, 3L, NullValue.getInstance()));
edges.add(new Edge<>(3L, 1L, NullValue.getInstance()));
edges.add(new Edge<>(4L, 5L, NullValue.getInstance()));
edges.add(new Edge<>(5L, 7L, NullValue.getInstance()));
edges.add(new Edge<>(4L, 1L, NullValue.getInstance()));
return edges;
}
// a testing sink
public static final class CollectSink implements SinkFunction<Candidates> {
static final List<String> values = new ArrayList<>();
@Override
public void invoke(Candidates value, Context context) throws Exception {
values.add(value.toString());
}
}
} | java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/EdgesReduce.java | src/main/java/org/apache/flink/graph/streaming/EdgesReduce.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming;
import java.io.Serializable;
import org.apache.flink.api.common.functions.Function;
/**
* Interface to be implemented by the function applied to a vertex neighborhood
* in the {@link SnapshotStream#reduceOnEdges(EdgesReduce)} method.
*
* @param <EV> the edge value type
*/
public interface EdgesReduce<EV> extends Function, Serializable {
/**
* Combines two edge values into one value of the same type.
* The reduceEdges function is consecutively applied to all pairs of edges of a neighborhood,
* until only a single value remains.
*
* @param firstEdgeValue the value of the first edge
* @param secondEdgeValue the value of the second edge
* @return The data stream that is the result of applying the reduceEdges function to the graph window.
* @throws Exception
*/
EV reduceEdges(EV firstEdgeValue, EV secondEdgeValue) throws Exception;
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/SummaryBulkAggregation.java | src/main/java/org/apache/flink/graph/streaming/SummaryBulkAggregation.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming;
import org.apache.flink.api.common.functions.FoldFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.typeutils.ResultTypeQueryable;
import org.apache.flink.api.java.typeutils.TupleTypeInfo;
import org.apache.flink.api.java.typeutils.TypeExtractor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.graph.Edge;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.windowing.time.Time;
import java.io.Serializable;
import java.util.concurrent.TimeUnit;
/**
* Graph Aggregation on Parallel Time Window
*
* TODO add documentation
*
* @param <K> the edge stream's key type
* @param <EV> the edges stream's value type
* @param <S> the output type of the partial aggregation
* @param <T> the output type of the result
*/
public class SummaryBulkAggregation<K, EV, S extends Serializable, T> extends SummaryAggregation<K, EV, S, T> {
private static final long serialVersionUID = 1L;
protected long timeMillis;
public SummaryBulkAggregation(EdgesFold<K, EV, S> updateFun, ReduceFunction<S> combineFun, MapFunction<S, T> transformFun, S initialVal, long timeMillis, boolean transientState) {
super(updateFun, combineFun, transformFun, initialVal, transientState);
this.timeMillis = timeMillis;
}
public SummaryBulkAggregation(EdgesFold<K, EV, S> updateFun, ReduceFunction<S> combineFun, S initialVal, long timeMillis, boolean transientState) {
this(updateFun, combineFun, null, initialVal, timeMillis, transientState);
}
@SuppressWarnings("unchecked")
@Override
public DataStream<T> run(final DataStream<Edge<K, EV>> edgeStream) {
//For parallel window support we key the edge stream by partition and apply a parallel fold per partition.
//Finally, we merge all locally combined results into our final graph aggregation property.
TupleTypeInfo edgeTypeInfo = (TupleTypeInfo) edgeStream.getType();
TypeInformation<S> returnType = TypeExtractor.createTypeInfo(EdgesFold.class, getUpdateFun().getClass(), 2, edgeTypeInfo.getTypeAt(0), edgeTypeInfo.getTypeAt(2));
TypeInformation<Tuple2<Integer, Edge<K, EV>>> typeInfo = new TupleTypeInfo<>(BasicTypeInfo.INT_TYPE_INFO, edgeStream.getType());
DataStream<S> partialAgg = edgeStream
.map(new PartitionMapper<>()).returns(typeInfo)
.keyBy(0)
.timeWindow(Time.of(timeMillis, TimeUnit.MILLISECONDS))
.fold(getInitialValue(), new PartialAgg<>(getUpdateFun(),returnType))
.timeWindowAll(Time.of(timeMillis, TimeUnit.MILLISECONDS))
.reduce(getCombineFun())
.flatMap(getAggregator(edgeStream)).setParallelism(1);
if (getTransform() != null) {
return partialAgg.map(getTransform());
}
return (DataStream<T>) partialAgg;
}
@SuppressWarnings("serial")
protected static final class PartitionMapper<Y> extends RichMapFunction<Y, Tuple2<Integer, Y>> {
private int partitionIndex;
@Override
public void open(Configuration parameters) throws Exception {
this.partitionIndex = getRuntimeContext().getIndexOfThisSubtask();
}
@Override
public Tuple2<Integer, Y> map(Y state) throws Exception {
return new Tuple2<>(partitionIndex, state);
}
}
@SuppressWarnings("serial")
protected static final class PartialAgg<K, EV, S>
implements ResultTypeQueryable<S>, FoldFunction<Tuple2<Integer, Edge<K, EV>>, S> {
private EdgesFold<K, EV, S> foldFunction;
private TypeInformation<S> returnType;
public PartialAgg(EdgesFold<K, EV, S> foldFunction, TypeInformation<S> returnType) {
this.foldFunction = foldFunction;
this.returnType = returnType;
}
@Override
public S fold(S s, Tuple2<Integer, Edge<K, EV>> o) throws Exception {
return this.foldFunction.foldEdges(s, o.f1.getSource(), o.f1.getTarget(), o.f1.getValue());
}
@Override
public TypeInformation<S> getProducedType() {
return returnType;
}
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/SimpleEdgeStream.java | src/main/java/org/apache/flink/graph/streaming/SimpleEdgeStream.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.typeutils.ResultTypeQueryable;
import org.apache.flink.api.java.typeutils.TupleTypeInfo;
import org.apache.flink.api.java.typeutils.TypeExtractor;
import org.apache.flink.graph.Edge;
import org.apache.flink.graph.EdgeDirection;
import org.apache.flink.graph.Vertex;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.AscendingTimestampExtractor;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.types.NullValue;
import org.apache.flink.util.Collector;
import java.io.Serializable;
import java.util.*;
/**
*
* Represents a graph stream where the stream consists solely of {@link org.apache.flink.graph.Edge edges}.
* <p>
*
* @see org.apache.flink.graph.Edge
*
* @param <K> the key type for edge and vertex identifiers.
* @param <EV> the value type for edges.
*/
@SuppressWarnings("serial")
public class SimpleEdgeStream<K, EV> extends GraphStream<K, NullValue, EV> {
private final StreamExecutionEnvironment context;
private final DataStream<Edge<K, EV>> edges;
/**
* Creates a graph from an edge stream.
* The time characteristic is set to ingestion time by default.
*
* @see {@link org.apache.flink.streaming.api.TimeCharacteristic}
*
* @param edges a DataStream of edges.
* @param context the flink execution environment.
*/
public SimpleEdgeStream(DataStream<Edge<K, EV>> edges, StreamExecutionEnvironment context) {
context.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime);
this.edges = edges;
this.context = context;
}
/**
* Creates a graph from an edge stream operating in event time specified by timeExtractor .
*
* The time characteristic is set to event time.
*
* @see {@link org.apache.flink.streaming.api.TimeCharacteristic}
*
* @param edges a DataStream of edges.
* @param timeExtractor the timestamp extractor.
* @param context the execution environment.
*/
public SimpleEdgeStream(DataStream<Edge<K, EV>> edges, AscendingTimestampExtractor<Edge<K,EV>> timeExtractor, StreamExecutionEnvironment context) {
context.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
this.edges = edges.assignTimestampsAndWatermarks(timeExtractor);
this.context = context;
}
/**
* Applies an incremental aggregation on a graphstream and returns a stream of aggregation results
*
* @param summaryAggregation
* @param <S>
* @param <T>
* @return
*/
public <S extends Serializable, T> DataStream<T> aggregate(SummaryAggregation<K,EV,S,T> summaryAggregation) {
return summaryAggregation.run(getEdges());//FIXME
}
/**
* @return the flink streaming execution environment.
*/
@Override
public StreamExecutionEnvironment getContext() {
return this.context;
}
/**
* @return the vertex DataStream.
*/
@Override
public DataStream<Vertex<K, NullValue>> getVertices() {
return this.edges
.flatMap(new EmitSrcAndTarget<K, EV>())
.keyBy(0)
.filter(new FilterDistinctVertices<K>());
}
/**
* Discretizes the edge stream into tumbling windows of the specified size.
* <p>
* The edge stream is partitioned so that all neighbors of a vertex belong to the same partition.
* The KeyedStream is then windowed into tumbling time windows.
* <p>
* By default, each vertex is grouped with its outgoing edges.
* Use {@link #slice(Time, EdgeDirection)} to manually set the edge direction grouping.
*
* @param size the size of the window
* @return a GraphWindowStream of the specified size
*/
public SnapshotStream<K, EV> slice(Time size) {
return slice(size, EdgeDirection.OUT);
}
/**
* Discretizes the edge stream into tumbling windows of the specified size.
* <p>
* The edge stream is partitioned so that all neighbors of a vertex belong to the same partition.
* The KeyedStream is then windowed into tumbling time windows.
*
* @param size the size of the window
* @param direction the EdgeDirection to key by
* @return a GraphWindowStream of the specified size, keyed by
*/
public SnapshotStream<K, EV> slice(Time size, EdgeDirection direction)
throws IllegalArgumentException {
switch (direction) {
case IN:
return new SnapshotStream<K, EV>(
this.reverse().getEdges().keyBy(new NeighborKeySelector<K, EV>(0)).timeWindow(size));
case OUT:
return new SnapshotStream<K, EV>(
getEdges().keyBy(new NeighborKeySelector<K, EV>(0)).timeWindow(size));
case ALL:
getEdges().keyBy(0).timeWindow(size);
return new SnapshotStream<K, EV>(
this.undirected().getEdges().keyBy(
new NeighborKeySelector<K, EV>(0)).timeWindow(size));
default:
throw new IllegalArgumentException("Illegal edge direction");
}
}
private static final class NeighborKeySelector<K, EV> implements KeySelector<Edge<K, EV>, K> {
private final int key;
public NeighborKeySelector(int k) {
this.key = k;
}
public K getKey(Edge<K, EV> edge) throws Exception {
return edge.getField(key);
}
}
private static final class EmitSrcAndTarget<K, EV>
implements FlatMapFunction<Edge<K, EV>, Vertex<K, NullValue>> {
@Override
public void flatMap(Edge<K, EV> edge, Collector<Vertex<K, NullValue>> out) throws Exception {
out.collect(new Vertex<>(edge.getSource(), NullValue.getInstance()));
out.collect(new Vertex<>(edge.getTarget(), NullValue.getInstance()));
}
}
private static final class FilterDistinctVertices<K>
implements FilterFunction<Vertex<K, NullValue>> {
Set<K> keys = new HashSet<>();
@Override
public boolean filter(Vertex<K, NullValue> vertex) throws Exception {
if (!keys.contains(vertex.getId())) {
keys.add(vertex.getId());
return true;
}
return false;
}
}
/**
* @return the edge DataStream.
*/
public DataStream<Edge<K, EV>> getEdges() {
return this.edges;
}
/**
* Apply a function to the attribute of each edge in the graph stream.
*
* @param mapper the map function to apply.
* @return a new graph stream.
*/
public <NV> SimpleEdgeStream<K, NV> mapEdges(final MapFunction<Edge<K, EV>, NV> mapper) {
TypeInformation<K> keyType = ((TupleTypeInfo<?>) edges.getType()).getTypeAt(0);
DataStream<Edge<K, NV>> mappedEdges = edges.map(new ApplyMapperToEdgeWithType<>(mapper,
keyType));
return new SimpleEdgeStream<>(mappedEdges, this.context);
}
private static final class ApplyMapperToEdgeWithType<K, EV, NV>
implements MapFunction<Edge<K, EV>, Edge<K, NV>>, ResultTypeQueryable<Edge<K, NV>> {
private MapFunction<Edge<K, EV>, NV> innerMapper;
private transient TypeInformation<K> keyType;
public ApplyMapperToEdgeWithType(MapFunction<Edge<K, EV>, NV> theMapper, TypeInformation<K> keyType) {
this.innerMapper = theMapper;
this.keyType = keyType;
}
public Edge<K, NV> map(Edge<K, EV> edge) throws Exception {
return new Edge<>(edge.getSource(), edge.getTarget(), innerMapper.map(edge));
}
@SuppressWarnings("unchecked")
@Override
public TypeInformation<Edge<K, NV>> getProducedType() {
TypeInformation<NV> valueType = TypeExtractor
.createTypeInfo(MapFunction.class, innerMapper.getClass(), 1, null, null);
TypeInformation<?> returnType = new TupleTypeInfo<>(Edge.class, keyType, keyType, valueType);
return (TypeInformation<Edge<K, NV>>) returnType;
}
}
/**
* Apply a filter to each vertex in the graph stream
* Since this is an edge-only stream, the vertex filter can only access the key of vertices
*
* @param filter the filter function to apply.
* @return the filtered graph stream.
*/
@Override
public SimpleEdgeStream<K, EV> filterVertices(FilterFunction<Vertex<K, NullValue>> filter) {
DataStream<Edge<K, EV>> remainingEdges = this.edges
.filter(new ApplyVertexFilterToEdges<K, EV>(filter));
return new SimpleEdgeStream<>(remainingEdges, this.context);
}
private static final class ApplyVertexFilterToEdges<K, EV>
implements FilterFunction<Edge<K, EV>> {
private FilterFunction<Vertex<K, NullValue>> vertexFilter;
public ApplyVertexFilterToEdges(FilterFunction<Vertex<K, NullValue>> vertexFilter) {
this.vertexFilter = vertexFilter;
}
@Override
public boolean filter(Edge<K, EV> edge) throws Exception {
boolean sourceVertexKept = vertexFilter.filter(new Vertex<>(edge.getSource(),
NullValue.getInstance()));
boolean targetVertexKept = vertexFilter.filter(new Vertex<>(edge.getTarget(),
NullValue.getInstance()));
return sourceVertexKept && targetVertexKept;
}
}
/**
* Apply a filter to each edge in the graph stream
*
* @param filter the filter function to apply.
* @return the filtered graph stream.
*/
@Override
public SimpleEdgeStream<K, EV> filterEdges(FilterFunction<Edge<K, EV>> filter) {
DataStream<Edge<K, EV>> remainingEdges = this.edges.filter(filter);
return new SimpleEdgeStream<>(remainingEdges, this.context);
}
/**
* Removes the duplicate edges by storing a neighborhood set for each vertex
*
* @return a graph stream with no duplicate edges
*/
@Override
public SimpleEdgeStream<K, EV> distinct() {
DataStream<Edge<K, EV>> edgeStream = this.edges
.keyBy(0)
.flatMap(new DistinctEdgeMapper<K, EV>());
return new SimpleEdgeStream<>(edgeStream, this.getContext());
}
private static final class DistinctEdgeMapper<K, EV> implements FlatMapFunction<Edge<K, EV>, Edge<K, EV>> {
private final Set<K> neighbors;
public DistinctEdgeMapper() {
this.neighbors = new HashSet<>();
}
@Override
public void flatMap(Edge<K, EV> edge, Collector<Edge<K, EV>> out) throws Exception {
if (!neighbors.contains(edge.getTarget())) {
neighbors.add(edge.getTarget());
out.collect(edge);
}
}
}
/**
* @return a graph stream with the edge directions reversed
*/
public SimpleEdgeStream<K, EV> reverse() {
return new SimpleEdgeStream<>(this.edges.map(new ReverseEdgeMapper<K, EV>()), this.getContext());
}
private static final class ReverseEdgeMapper<K, EV> implements MapFunction<Edge<K, EV>, Edge<K, EV>> {
@Override
public Edge<K, EV> map(Edge<K, EV> edge) throws Exception {
return edge.reverse();
}
}
/**
* @param graph the streamed graph to union with
* @return a streamed graph where the two edge streams are merged
*/
public SimpleEdgeStream<K, EV> union(SimpleEdgeStream<K, EV> graph) {
return new SimpleEdgeStream<>(this.edges.union(graph.getEdges()), this.getContext());
}
/**
* @return a graph stream where edges are undirected
*/
public SimpleEdgeStream<K, EV> undirected() {
DataStream<Edge<K, EV>> reversedEdges = this.edges.flatMap(new UndirectEdges<K, EV>());
return new SimpleEdgeStream<>(reversedEdges, context);
}
private static final class UndirectEdges<K, EV> implements FlatMapFunction<Edge<K,EV>, Edge<K,EV>> {
@Override
public void flatMap(Edge<K, EV> edge, Collector<Edge<K, EV>> out) {
out.collect(edge);
out.collect(edge.reverse());
}
};
/**
* @return a continuously improving data stream representing the number of vertices in the streamed graph
*/
public DataStream<Long> numberOfVertices() {
return this.globalAggregate(new DegreeTypeSeparator<K, EV>(true, true),
new VertexCountMapper<K>(), true);
}
private static final class VertexCountMapper<K> implements FlatMapFunction<Vertex<K, Long>, Long> {
private Set<K> vertices;
public VertexCountMapper() {
this.vertices = new HashSet<>();
}
@Override
public void flatMap(Vertex<K, Long> vertex, Collector<Long> out) throws Exception {
vertices.add(vertex.getId());
out.collect((long) vertices.size());
}
}
/**
* @return a data stream representing the number of all edges in the streamed graph, including possible duplicates
*/
public DataStream<Long> numberOfEdges() {
return this.edges.map(new TotalEdgeCountMapper<K, EV>()).setParallelism(1);
}
private static final class TotalEdgeCountMapper<K, EV> implements MapFunction<Edge<K, EV>, Long> {
private long edgeCount;
public TotalEdgeCountMapper() {
edgeCount = 0;
}
@Override
public Long map(Edge<K, EV> edge) throws Exception {
edgeCount++;
return edgeCount;
}
}
/**
* Get the degree stream
*
* @return a stream of vertices, with the degree as the vertex value
* @throws Exception
*/
@Override
public DataStream<Vertex<K, Long>> getDegrees() throws Exception {
return this.aggregate(new DegreeTypeSeparator<K, EV>(true, true),
new DegreeMapFunction<K>());
}
/**
* Get the in-degree stream
*
* @return a stream of vertices, with the in-degree as the vertex value
* @throws Exception
*/
public DataStream<Vertex<K, Long>> getInDegrees() throws Exception {
return this.aggregate(new DegreeTypeSeparator<K, EV>(true, false),
new DegreeMapFunction<K>());
}
/**
* Get the out-degree stream
*
* @return a stream of vertices, with the out-degree as the vertex value
* @throws Exception
*/
public DataStream<Vertex<K, Long>> getOutDegrees() throws Exception {
return this.aggregate(new DegreeTypeSeparator<K, EV>(false, true),
new DegreeMapFunction<K>());
}
private static final class DegreeTypeSeparator <K, EV>
implements FlatMapFunction<Edge<K, EV>, Vertex<K, Long>> {
private final boolean collectIn;
private final boolean collectOut;
public DegreeTypeSeparator(boolean collectIn, boolean collectOut) {
this.collectIn = collectIn;
this.collectOut = collectOut;
}
@Override
public void flatMap(Edge<K, EV> edge, Collector<Vertex<K, Long>> out) throws Exception {
if (collectOut) {
out.collect(new Vertex<>(edge.getSource(), 1L));
}
if (collectIn) {
out.collect(new Vertex<>(edge.getTarget(), 1L));
}
}
}
private static final class DegreeMapFunction <K>
implements MapFunction<Vertex<K, Long>, Vertex<K, Long>> {
private final Map<K, Long> localDegrees;
public DegreeMapFunction() {
localDegrees = new HashMap<>();
}
@Override
public Vertex<K, Long> map(Vertex<K, Long> degree) throws Exception {
K key = degree.getId();
if (!localDegrees.containsKey(key)) {
localDegrees.put(key, 0L);
}
localDegrees.put(key, localDegrees.get(key) + degree.getValue());
return new Vertex<>(key, localDegrees.get(key));
}
}
/**
* The aggregate function splits the edge stream up into a vertex stream and applies
* a mapper on the resulting vertices
*
* @param edgeMapper the mapper that converts the edge stream to a vertex stream
* @param vertexMapper the mapper that aggregates vertex values
* @param <VV> the vertex value used
* @return a stream of vertices with the aggregated vertex value
*/
public <VV> DataStream<Vertex<K, VV>> aggregate(FlatMapFunction<Edge<K, EV>, Vertex<K, VV>> edgeMapper,
MapFunction<Vertex<K, VV>, Vertex<K, VV>> vertexMapper) {
return this.edges.flatMap(edgeMapper)
.keyBy(0)
.map(vertexMapper);
}
/**
* Returns a global aggregate on the previously split vertex stream
*
* @param edgeMapper the mapper that converts the edge stream to a vertex stream
* @param vertexMapper the mapper that aggregates vertex values
* @param collectUpdates boolean specifying whether the aggregate should only be collected when there is an update
* @param <VV> the return value type
* @return a stream of the aggregated values
*/
public <VV> DataStream<VV> globalAggregate(FlatMapFunction<Edge<K, EV>, Vertex<K, VV>> edgeMapper,
FlatMapFunction<Vertex<K, VV>, VV> vertexMapper, boolean collectUpdates) {
DataStream<VV> result = this.edges.flatMap(edgeMapper)
.setParallelism(1)
.flatMap(vertexMapper)
.setParallelism(1);
if (collectUpdates) {
result = result.flatMap(new GlobalAggregateMapper<VV>())
.setParallelism(1);
}
return result;
}
//TODO: write tests
/**
* Builds the neighborhood state by creating adjacency lists.
* Neighborhoods are currently built using a TreeSet.
*
* @param directed if true, only the out-neighbors will be stored
* otherwise both directions are considered
* @return a stream of Tuple3, where the first 2 fields identify the edge processed
* and the third field is the adjacency list that was updated by processing this edge.
*/
public DataStream<Tuple3<K, K, TreeSet<K>>> buildNeighborhood(boolean directed) {
DataStream<Edge<K, EV>> edges = this.getEdges();
if (!directed) {
edges = this.undirected().getEdges();
}
return edges.keyBy(0).flatMap(new BuildNeighborhoods<K, EV>());
}
private static final class BuildNeighborhoods<K, EV> implements FlatMapFunction<Edge<K, EV>, Tuple3<K, K, TreeSet<K>>> {
Map<K, TreeSet<K>> neighborhoods = new HashMap<>();
Tuple3<K, K, TreeSet<K>> outTuple = new Tuple3<>();
public void flatMap(Edge<K, EV> e, Collector<Tuple3<K, K, TreeSet<K>>> out) {
TreeSet<K> t;
if (neighborhoods.containsKey(e.getSource())) {
t = neighborhoods.get(e.getSource());
} else {
t = new TreeSet<>();
}
t.add(e.getTarget());
neighborhoods.put(e.getSource(), t);
outTuple.setField(e.getSource(), 0);
outTuple.setField(e.getTarget(), 1);
outTuple.setField(t, 2);
out.collect(outTuple);
}
}
private static final class GlobalAggregateMapper<VV> implements FlatMapFunction<VV, VV> {
VV previousValue;
public GlobalAggregateMapper() {
previousValue = null;
}
@Override
public void flatMap(VV vv, Collector<VV> out) throws Exception {
if (!vv.equals(previousValue)) {
previousValue = vv;
out.collect(vv);
}
}
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/SnapshotStream.java | src/main/java/org/apache/flink/graph/streaming/SnapshotStream.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming;
import java.util.Iterator;
import org.apache.flink.api.common.functions.FoldFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.typeutils.ResultTypeQueryable;
import org.apache.flink.api.java.typeutils.TypeExtractor;
import org.apache.flink.graph.Edge;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
/**
* A stream of discrete graphs, each maintaining
* the graph state of the edges contained in the respective window.
* It is created by calling {@link SimpleEdgeStream#slice()}.
* The graph slice is keyed by the source or target vertex of the edge stream,
* so that all edges of a vertex are in the same tumbling window.
*
* @param <K> the vertex ID type
* @param <EV> the edge value type
*/
public class SnapshotStream<K, EV> {
private WindowedStream<Edge<K, EV>, K, TimeWindow> windowedStream;
SnapshotStream(WindowedStream<Edge<K, EV>, K, TimeWindow> window) {
this.windowedStream = window;
}
/**
* Performs a neighborhood fold on the graph window stream.
*
* @param initialValue
* @param foldFunction
* @return the result stream after applying the user-defined fold operation on the window
*/
public <T> DataStream<T> foldNeighbors(T initialValue, final EdgesFold<K, EV, T> foldFunction) {
return windowedStream.fold(initialValue, new EdgesFoldFunction<K, EV, T>(foldFunction));
}
@SuppressWarnings("serial")
public static final class EdgesFoldFunction<K, EV, T>
implements FoldFunction<Edge<K, EV>, T>, ResultTypeQueryable<T>
{
private final EdgesFold<K, EV, T> foldFunction;
public EdgesFoldFunction(EdgesFold<K, EV, T> foldFunction) {
this.foldFunction = foldFunction;
}
@Override
public T fold(T accumulator, Edge<K, EV> edge) throws Exception {
return foldFunction.foldEdges(accumulator, edge.getSource(), edge.getTarget(), edge.getValue());
}
@Override
public TypeInformation<T> getProducedType() {
return TypeExtractor.createTypeInfo(EdgesFold.class, foldFunction.getClass(), 2,
null, null);
}
}
/**
// * Performs an aggregation on the neighboring edges of each vertex on the graph window stream.
* <p>
* For each vertex, the transformation consecutively calls a
* {@link EdgesReduce} function until only a single value for each edge remains.
* The {@link EdgesReduce} function combines two edge values into one new value of the same type.
*
* @param reduceFunction the aggregation function
* @return a result stream of Tuple2, containing one tuple per vertex.
* The first field is the vertex ID and the second field is the final value,
* after applying the user-defined aggregation operation on the neighborhood.
*/
public DataStream<Tuple2<K, EV>> reduceOnEdges(final EdgesReduce<EV> reduceFunction) {
return windowedStream.reduce(new EdgesReduceFunction<K, EV>(reduceFunction))
.project(0, 2);
}
@SuppressWarnings("serial")
public static final class EdgesReduceFunction<K, EV> implements ReduceFunction<Edge<K, EV>> {
private final EdgesReduce<EV> reduceFunction;
public EdgesReduceFunction(EdgesReduce<EV> reduceFunction) {
this.reduceFunction = reduceFunction;
}
@Override
public Edge<K, EV> reduce(Edge<K, EV> firstEdge, Edge<K, EV> secondEdge) throws Exception {
EV reducedValue = this.reduceFunction.reduceEdges(firstEdge.getValue(), secondEdge.getValue());
firstEdge.setValue(reducedValue);
return firstEdge;
}
}
/**
* Performs a generic neighborhood aggregation in the graph window stream.
* Each vertex can produce zero, one or more values from the computation on its neighborhood.
*
* @param applyFunction the neighborhood computation function
* @return the result stream after applying the user-defined operation on the window
*/
public <T> DataStream<T> applyOnNeighbors(final EdgesApply<K, EV, T> applyFunction) {
return windowedStream.apply(new SnapshotFunction<>(applyFunction));
}
@SuppressWarnings("serial")
public static final class SnapshotFunction<K, EV, T> implements
WindowFunction<Edge<K, EV>, T, K, TimeWindow>, ResultTypeQueryable<T> {
private final EdgesApply<K, EV, T> applyFunction;
public SnapshotFunction(EdgesApply<K, EV, T> applyFunction) {
this.applyFunction = applyFunction;
}
public void apply(K key, TimeWindow window, final Iterable<Edge<K, EV>> edges, Collector<T> out)
throws Exception {
final Iterator<Tuple2<K, EV>> neighborsIterator = new Iterator<Tuple2<K, EV>>() {
final Iterator<Edge<K, EV>> edgesIterator = edges.iterator();
@Override
public boolean hasNext() {
return edgesIterator.hasNext();
}
@Override
public Tuple2<K, EV> next() {
Edge<K, EV> nextEdge = edgesIterator.next();
return new Tuple2<K, EV>(nextEdge.f1, nextEdge.f2);
}
@Override
public void remove() {
edgesIterator.remove();
}
};
Iterable<Tuple2<K, EV>> neighborsIterable = new Iterable<Tuple2<K, EV>>() {
public Iterator<Tuple2<K, EV>> iterator() {
return neighborsIterator;
}
};
applyFunction.applyOnEdges(key, neighborsIterable, out);
}
@Override
public TypeInformation<T> getProducedType() {
return TypeExtractor.createTypeInfo(EdgesApply.class, applyFunction.getClass(), 2,
null, null);
}
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/EdgesFold.java | src/main/java/org/apache/flink/graph/streaming/EdgesFold.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming;
import java.io.Serializable;
import org.apache.flink.api.common.functions.Function;
/**
* Interface to be implemented by the function applied to a vertex neighborhood
* in the {@link SnapshotStream#foldNeighbors(Object, EdgesFold)} method.
*
* @param <K> the vertex ID type
* @param <EV> the edge value type
* @param <T> the accumulator type
*/
public interface EdgesFold<K, EV, T> extends Function, Serializable {
/**
* Combines two edge values into one value of the same type.
* The foldEdges function is consecutively applied to all edges of a neighborhood,
* until only a single value remains.
*
* @param accum the initial value and accumulator
* @param vertexID the vertex ID
* @param neighborID the neighbor's ID
* @param edgeValue the edge value
* @return The data stream that is the result of applying the foldEdges function to the graph window.
* @throws Exception
*/
T foldEdges(T accum, K vertexID, K neighborID, EV edgeValue) throws Exception;
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/EventType.java | src/main/java/org/apache/flink/graph/streaming/EventType.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming;
/**
* Defines the event type in a GraphStream.
*/
public enum EventType {
EDGE_ADDITION,
EDGE_DELETION
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/GraphStream.java | src/main/java/org/apache/flink/graph/streaming/GraphStream.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming;
import java.io.Serializable;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.graph.Edge;
import org.apache.flink.graph.Vertex;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.types.NullValue;
/**
* The super-class of all graph stream types.
*
* @param <K> the vertex ID type
* @param <VV> the vertex value type
* @param <EV> the edge value type
*/
public abstract class GraphStream<K, VV, EV> {
/**
* @return the Flink streaming execution environment.
*/
public abstract StreamExecutionEnvironment getContext();
/**
* @return the vertex DataStream.
*/
public abstract DataStream<Vertex<K, VV>> getVertices();
/**
* @return the edge DataStream.
*/
public abstract DataStream<Edge<K, EV>> getEdges();
/**
* Apply a function to the attribute of each edge in the graph stream.
*
* @param mapper the map function to apply.
* @return a new graph stream.
*/
public abstract <NV> GraphStream<K, VV, NV> mapEdges(final MapFunction<Edge<K, EV>, NV> mapper);
/**
* Apply a filter to each vertex in the graph stream
* Since this is an edge-only stream, the vertex filter can only access the key of vertices
*
* @param filter the filter function to apply.
* @return the filtered graph stream.
*/
public abstract GraphStream<K, VV, EV> filterVertices(FilterFunction<Vertex<K, NullValue>> filter);
/**
* Apply a filter to each edge in the graph stream
*
* @param filter the filter function to apply.
* @return the filtered graph stream.
*/
public abstract GraphStream<K, VV, EV> filterEdges(FilterFunction<Edge<K, EV>> filter);
/**
* Removes the duplicate edges by storing a neighborhood set for each vertex
*
* @return a graph stream with no duplicate edges
*/
public abstract GraphStream<K, VV, EV> distinct();
/**
* Get the degree stream
*
* @return a stream of vertices, with the degree as the vertex value
* @throws Exception
*/
public abstract DataStream<Vertex<K, Long>> getDegrees() throws Exception;
/**
* Get the in-degree stream
*
* @return a stream of vertices, with the in-degree as the vertex value
* @throws Exception
*/
public abstract DataStream<Vertex<K, Long>> getInDegrees() throws Exception;
/**
* Get the out-degree stream
*
* @return a stream of vertices, with the out-degree as the vertex value
* @throws Exception
*/
public abstract DataStream<Vertex<K, Long>> getOutDegrees() throws Exception;
/**
* @return a data stream representing the number of all edges in the streamed graph, including possible duplicates
*/
public abstract DataStream<Long> numberOfEdges();
/**
* @return a continuously improving data stream representing the number of vertices in the streamed graph
*/
public abstract DataStream<Long> numberOfVertices();
/**
* @return a graph stream where edges are undirected
*/
public abstract GraphStream<K, VV, EV> undirected();
/**
* @return a graph stream with the edge directions reversed
*/
public abstract GraphStream<K, VV, EV> reverse();
/**
* Applies an incremental aggregation on a graphstream and returns a stream of aggregation results
*
* @param summaryAggregation
* @param <S>
* @param <T>
* @return
*/
public abstract <S extends Serializable, T> DataStream<T> aggregate(
SummaryAggregation<K,EV,S,T> summaryAggregation);
} | java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/SummaryTreeReduce.java | src/main/java/org/apache/flink/graph/streaming/SummaryTreeReduce.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.typeutils.TupleTypeInfo;
import org.apache.flink.api.java.typeutils.TypeExtractor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.graph.Edge;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.windowing.time.Time;
import java.io.Serializable;
import java.util.concurrent.TimeUnit;
/**
* Graph Tree Aggregation on Parallel Time Window
*
* TODO add documentation
*
*/
public class SummaryTreeReduce<K, EV, S extends Serializable, T> extends SummaryBulkAggregation<K, EV, S, T> {
private static final long serialVersionUID = 1L;
private int degree;
public SummaryTreeReduce(EdgesFold<K, EV, S> updateFun, ReduceFunction<S> combineFun, MapFunction<S, T> transformFun, S initialVal, long timeMillis, boolean transientState, int degree) {
super(updateFun, combineFun, transformFun, initialVal, timeMillis, transientState);
this.degree = degree;
}
public SummaryTreeReduce(EdgesFold<K, EV, S> updateFun, ReduceFunction<S> combineFun, S initialVal, long timeMillis, boolean transientState, int degree) {
this(updateFun, combineFun, null, initialVal, timeMillis, transientState, degree);
}
public SummaryTreeReduce(EdgesFold<K, EV, S> updateFun, ReduceFunction<S> combineFun, S initialVal, long timeMillis, boolean transientState) {
this(updateFun, combineFun, null, initialVal, timeMillis, transientState, -1);
}
@SuppressWarnings("unchecked")
@Override
public DataStream<T> run(final DataStream<Edge<K, EV>> edgeStream) {
TypeInformation<Tuple2<Integer, Edge<K, EV>>> basicTypeInfo = new TupleTypeInfo<>(BasicTypeInfo.INT_TYPE_INFO, edgeStream.getType());
TupleTypeInfo edgeTypeInfo = (TupleTypeInfo) edgeStream.getType();
TypeInformation<S> partialAggType = TypeExtractor.createTypeInfo(EdgesFold.class, getUpdateFun().getClass(), 2, edgeTypeInfo.getTypeAt(0), edgeTypeInfo.getTypeAt(2));
TypeInformation<Tuple2<Integer, S>> partialTypeInfo = new TupleTypeInfo<>(BasicTypeInfo.INT_TYPE_INFO, partialAggType);
degree = (degree == -1) ? edgeStream.getParallelism() : degree;
DataStream<S> partialAgg = edgeStream
.map(new PartitionMapper<>()).returns(basicTypeInfo)
.setParallelism(degree)
.keyBy(0)
.timeWindow(Time.of(timeMillis, TimeUnit.MILLISECONDS))
.fold(getInitialValue(), new PartialAgg<>(getUpdateFun(), partialAggType)).setParallelism(degree);
//split here
DataStream<Tuple2<Integer, S>> treeAgg = enhance(partialAgg.map(new PartitionMapper<>()).setParallelism(degree).returns(partialTypeInfo), partialTypeInfo);
DataStream<S> resultStream = treeAgg.map(new PartitionStripper<>()).setParallelism(treeAgg.getParallelism())
.timeWindowAll(Time.of(timeMillis, TimeUnit.MILLISECONDS))
.reduce(getCombineFun())
.flatMap(getAggregator(edgeStream)).setParallelism(1);
return (getTransform() != null) ? resultStream.map(getTransform()) : (DataStream<T>) resultStream;
}
private DataStream<Tuple2<Integer, S>> enhance(DataStream<Tuple2<Integer, S>> input, TypeInformation<Tuple2<Integer, S>> aggType) {
if (input.getParallelism() <= 2) {
return input;
}
int nextParal = input.getParallelism() / 2;
DataStream<Tuple2<Integer, S>> unpartitionedStream =
input.keyBy(new KeySelector<Tuple2<Integer, S>, Integer>() {
//collapse two partitions into one
@Override
public Integer getKey(Tuple2<Integer, S> record) throws Exception {
return record.f0 / 2;
}
});
//repartition stream to p / 2 aggregators
KeyedStream<Tuple2<Integer, S>, Integer> repartitionedStream =
unpartitionedStream.map(new PartitionReMapper()).returns(aggType)
.setParallelism(nextParal)
.keyBy(0);
//window again on event time and aggregate
DataStream<Tuple2<Integer, S>> aggregatedStream =
repartitionedStream.timeWindow(Time.of(timeMillis, TimeUnit.MILLISECONDS))
.reduce(new AggregationWrapper<>(getCombineFun()))
.setParallelism(nextParal);
return enhance(aggregatedStream, aggType);
}
protected static final class PartitionReMapper<Y> extends RichMapFunction<Tuple2<Integer, Y>, Tuple2<Integer, Y>> {
private int partitionIndex;
@Override
public void open(Configuration parameters) throws Exception {
this.partitionIndex = getRuntimeContext().getIndexOfThisSubtask();
}
@Override
public Tuple2<Integer, Y> map(Tuple2<Integer, Y> tpl) throws Exception {
return new Tuple2<>(partitionIndex, tpl.f1);
}
}
public static class PartitionStripper<S> implements MapFunction<Tuple2<Integer, S>, S> {
@Override
public S map(Tuple2<Integer, S> tpl) throws Exception {
return tpl.f1;
}
}
public static class AggregationWrapper<S> implements ReduceFunction<Tuple2<Integer, S>> {
private final ReduceFunction<S> wrappedFunction;
protected AggregationWrapper(ReduceFunction<S> wrappedFunction) {
this.wrappedFunction = wrappedFunction;
}
@Override
public Tuple2<Integer, S> reduce(Tuple2<Integer, S> tpl1, Tuple2<Integer, S> tpl2) throws Exception {
return new Tuple2<>(tpl1.f0, wrappedFunction.reduce(tpl1.f1, tpl2.f1));
}
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/EdgesApply.java | src/main/java/org/apache/flink/graph/streaming/EdgesApply.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming;
import java.io.Serializable;
import org.apache.flink.api.common.functions.Function;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.util.Collector;
/**
* Interface to be implemented by the function applied to a vertex neighborhood
* in the {@link SnapshotStream#applyOnNeighbors(EdgesApply)} method.
*
* @param <K> the vertex ID type
* @param <EV> the edge value type
* @param <T> the accumulator type
*/
public interface EdgesApply<K, EV, T> extends Function, Serializable {
/**
* Computes a custom function on the neighborhood of a vertex.
* The vertex can output zero, one or more result values.
*
* @param vertexID the vertex ID
* @param neighbors the neighbors of this vertex. The first field of the tuple contains
* the neighbor ID and the second field contains the edge value.
* @param out the collector to emit the result
* @throws Exception
*/
void applyOnEdges(K vertexID, Iterable<Tuple2<K, EV>> neighbors, Collector<T> out) throws Exception;
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/SummaryAggregation.java | src/main/java/org/apache/flink/graph/streaming/SummaryAggregation.java | package org.apache.flink.graph.streaming;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.operators.translation.WrappingFunction;
import org.apache.flink.graph.Edge;
import org.apache.flink.streaming.api.checkpoint.ListCheckpointed;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.util.Collector;
import java.io.Serializable;
import java.util.Collections;
import java.util.List;
/**
* @param <K> key type
* @param <EV> edge value type
* @param <S> intermediate state type
* @param <T> fold result type
*/
public abstract class SummaryAggregation<K, EV, S extends Serializable, T> implements Serializable {
private static final long serialVersionUID = 1L;
/**
* A function applied to each edge in an edge stream that aggregates a user-defined graph property state. In case
* we slice the edge stream into windows a fold will output its aggregation state value per window, otherwise, this
* operates edge-wise
*/
private final EdgesFold<K, EV, S> updateFun;
/**
* An optional combine function for updating graph property state
*/
private final ReduceFunction<S> combineFun;
/**
* An optional map function that converts state to output
*/
private final MapFunction<S, T> transform;
private final S initialValue;
/**
* This flag indicates whether a merger state is cleaned up after an operation
*/
private final boolean transientState;
protected SummaryAggregation(EdgesFold<K, EV, S> updateFun, ReduceFunction<S> combineFun, MapFunction<S, T> transform, S initialValue, boolean transientState) {
this.updateFun = updateFun;
this.combineFun = combineFun;
this.transform = transform;
this.initialValue = initialValue;
this.transientState = transientState;
}
public abstract DataStream<T> run(DataStream<Edge<K, EV>> edgeStream);
public ReduceFunction<S> getCombineFun() {
return combineFun;
}
public EdgesFold<K, EV, S> getUpdateFun() {
return updateFun;
}
public MapFunction<S, T> getTransform() {
return transform;
}
public boolean isTransientState() {
return transientState;
}
public S getInitialValue() {
return initialValue;
}
//FIXME - naive prototype - blocking reduce should be implemented correctly
protected FlatMapFunction<S, S> getAggregator(final DataStream<Edge<K, EV>> edgeStream) {
return new Merger<>(getInitialValue(), getCombineFun(), isTransientState());
}
/**
* In this prototype the Merger is non-blocking and merges partitions incrementally
*
* @param <S>
*/
@SuppressWarnings("serial")
private final static class Merger<S extends Serializable> extends WrappingFunction<ReduceFunction<S>> implements FlatMapFunction<S, S>, ListCheckpointed<S> {
private final S initialVal;
private S summary;
private final boolean transientState;
private Merger(S initialVal, ReduceFunction<S> combiner, boolean transientState) {
super(combiner);
this.initialVal = initialVal;
this.summary = initialVal;
this.transientState = transientState;
}
@Override
public void flatMap(S s, Collector<S> collector) throws Exception {
if (getWrappedFunction() != null) {
summary = getWrappedFunction().reduce(s, summary);
collector.collect(summary);
if (transientState) {
summary = initialVal;
}
} else {
collector.collect(s);
}
}
/**
* Graph state is task-parallel, thus, we use operator state.
*
* TODO In the future we can change the redistribution strategy to split a summary and repartition it customly
*
*/
@Override
public List<S> snapshotState(long l, long l1) throws Exception {
return Collections.singletonList(summary);
}
@Override
public void restoreState(List<S> list) throws Exception {
summary = list.get(0);
}
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/util/SampledEdge.java | src/main/java/org/apache/flink/graph/streaming/util/SampledEdge.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.util;
import org.apache.flink.api.java.tuple.Tuple5;
import org.apache.flink.graph.Edge;
import org.apache.flink.types.NullValue;
public class SampledEdge extends Tuple5<Integer, Integer, Edge<Long, NullValue>, Integer, Boolean> {
public SampledEdge() {}
public SampledEdge(int subtask, int instance, Edge<Long, NullValue> edge, int edgeCount, boolean resample) throws Exception {
this.f0 = subtask;
this.f1 = instance;
this.f2 = edge;
this.f3 = edgeCount;
this.f4 = resample;
}
public int getSubTask() {
return this.f0;
}
public int getInstance() {
return this.f1;
}
public Edge<Long, NullValue> getEdge() {
return this.f2;
}
public int getEdgeCount() {
return this.f3;
}
public boolean isResampled() {
return this.f4;
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/util/MatchingEvent.java | src/main/java/org/apache/flink/graph/streaming/util/MatchingEvent.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.util;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.graph.Edge;
public class MatchingEvent extends Tuple2<MatchingEvent.Type, Edge<Long, Long>> {
public enum Type {ADD, REMOVE};
public MatchingEvent() {}
public MatchingEvent(MatchingEvent.Type type, Edge<Long, Long> edge) throws Exception {
this.f0 = type;
this.f1 = edge;
}
public MatchingEvent.Type geType() {
return this.f0;
}
public Edge<Long, Long> getEdge() {
return this.f1;
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/util/SignedVertex.java | src/main/java/org/apache/flink/graph/streaming/util/SignedVertex.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.util;
import org.apache.flink.api.java.tuple.Tuple2;
public class SignedVertex extends Tuple2<Long, Boolean> {
public SignedVertex() {}
public SignedVertex(long vertex, boolean sign) {
super(vertex, sign);
}
public long getVertex() {
return this.f0;
}
public boolean getSign() {
return this.f1;
}
public SignedVertex reverse() {
return new SignedVertex(this.getVertex(), !this.getSign());
}
} | java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/util/TriangleEstimate.java | src/main/java/org/apache/flink/graph/streaming/util/TriangleEstimate.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.util;
import org.apache.flink.api.java.tuple.Tuple3;
public class TriangleEstimate extends Tuple3<Integer, Integer, Integer> {
public TriangleEstimate() {}
public TriangleEstimate(int source, int edges, int beta) throws Exception {
this.f0 = source;
this.f1 = edges;
this.f2 = beta;
}
public int getSource() {
return this.f0;
}
public int getEdgeCount() {
return this.f1;
}
public int getBeta() {
return this.f2;
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/summaries/DisjointSet.java | src/main/java/org/apache/flink/graph/streaming/summaries/DisjointSet.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.summaries;
import java.io.Serializable;
import java.util.*;
public class DisjointSet<R extends Serializable> implements Serializable {
private static final long serialVersionUID = 1L;
private Map<R, R> matches;
private Map<R, Integer> ranks;
public DisjointSet() {
matches = new HashMap<>();
ranks = new HashMap<>();
}
public DisjointSet(Set<R> elements) {
this();
for (R element : elements) {
matches.put(element, element);
ranks.put(element, 0);
}
}
public Map<R, R> getMatches() {
return matches;
}
/**
* Creates a new disjoined set solely with e
*
* @param e
*/
public void makeSet(R e) {
matches.put(e, e);
ranks.put(e, 0);
}
/**
* Find returns the root of the disjoint set e belongs in.
* It implements path compression, flattening the tree whenever used, attaching nodes directly to the disjoint
* set root if not already.
*
* @param e
* @return the root of the connected component
*/
public R find(R e) {
if (!matches.containsKey(e)) {
return null;
}
R parent = matches.get(e);
if (!parent.equals(e)) {
R tmp = find(parent);
if (!parent.equals(tmp)) {
parent = tmp;
matches.put(e, parent);
}
}
return parent;
}
/**
* Union combines the two possibly disjoint sets where e1 and e2 belong in.
* Optimizations:
* <p/>
* - In case e1 or e2 do not exist they are being added directly in the same disjoint set.
* - Union by Rank to minimize lookup depth
*
* @param e1
* @param e2
*/
public void union(R e1, R e2) {
if (!matches.containsKey(e1)) {
makeSet(e1);
}
if (!matches.containsKey(e2)) {
makeSet(e2);
}
R root1 = find(e1);
R root2 = find(e2);
if (root1.equals(root2)) {
return;
}
int dist1 = ranks.get(root1);
int dist2 = ranks.get(root2);
if (dist1 > dist2) {
matches.put(root2, root1);
} else if (dist1 < dist2) {
matches.put(root1, root2);
} else {
matches.put(root2, root1);
ranks.put(root1, dist1 + 1);
}
}
/**
* Merge works in a similar fashion to a naive symmetric hash join.
* We keep the current disjoint sets and attach all nodes of 'other' incrementally
* There is certainly room for further optimisations...
*
* @param other
*/
public void merge(DisjointSet<R> other) {
for (Map.Entry<R, R> entry : other.getMatches().entrySet()) {
union(entry.getKey(), entry.getValue());
}
}
@Override
public String toString() {
Map<R, List<R>> comps = new HashMap<>();
for (R vertex : getMatches().keySet()) {
R parent = find(vertex);
if (!comps.containsKey(parent)) {
List<R> vertices = new ArrayList<>();
vertices.add(vertex);
comps.put(parent, vertices);
} else {
List<R> cc = comps.get(parent);
cc.add(vertex);
comps.put(parent, cc);
}
}
return comps.toString();
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/summaries/Candidates.java | src/main/java/org/apache/flink/graph/streaming/summaries/Candidates.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.summaries;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.graph.streaming.util.SignedVertex;
import java.io.Serializable;
import java.util.*;
public class Candidates extends Tuple2<Boolean, TreeMap<Long, Map<Long, SignedVertex>>> implements Serializable{
public Candidates() {}
public Candidates(boolean success) {
this.f0 = success;
this.f1 = new TreeMap<>();
}
public Candidates(boolean success, Candidates input) throws Exception {
this(success);
for (Map.Entry<Long, Map<Long, SignedVertex>> entry : input.getMap().entrySet()) {
this.add(entry.getKey(), entry.getValue());
}
}
public boolean getSuccess() {
return this.f0;
}
public TreeMap<Long, Map<Long, SignedVertex>> getMap() {
return this.f1;
}
public boolean add(long component, Map<Long, SignedVertex> vertices) throws Exception {
for (SignedVertex vertex : vertices.values()) {
if (!this.add(component, vertex)) {
return false;
}
}
return true;
}
public boolean add(long component, SignedVertex vertex) throws Exception {
if (!this.getMap().containsKey(component)) {
this.getMap().put(component, new TreeMap<Long, SignedVertex>());
}
if (this.getMap().get(component).containsKey(vertex.getVertex())) {
SignedVertex storedVertex = this.getMap().get(component).get(vertex.getVertex());
if (storedVertex.getSign() != vertex.getSign()) {
return false;
}
}
this.getMap().get(component).put(vertex.getVertex(), vertex);
return true;
}
//TODO clean up
public Candidates merge(Candidates input) throws Exception {
// Propagate failure
if (!input.getSuccess() || !this.getSuccess()) {
return fail();
}
// Compare each input component with each candidate component and merge accordingly
for (Map.Entry<Long, Map<Long, SignedVertex>> inEntry : input.getMap().entrySet()) {
List<Long> mergeWith = new ArrayList<>();
for (Map.Entry<Long, Map<Long, SignedVertex>> selfEntry : this.getMap().entrySet()) {
long selfKey = selfEntry.getKey();
// If the two components are exactly the same, skip them
if (inEntry.getValue().keySet().containsAll(selfEntry.getValue().keySet())
&& selfEntry.getValue().keySet().containsAll(inEntry.getValue().keySet())) {
continue;
}
// Find vertices of input component in the candidate component
for (long inVertex : inEntry.getValue().keySet()) {
if (selfEntry.getValue().containsKey(inVertex)) {
if (!mergeWith.contains(selfKey)) {
mergeWith.add(selfKey);
break;
}
}
}
}
if (mergeWith.isEmpty()) {
// If the input component is disjoint from all components of the candidate,
// simply add that component
this.add(inEntry.getKey(), inEntry.getValue());
} else {
// Merge the input with the lowest id component in candidate
Collections.sort(mergeWith);
long firstKey = mergeWith.get(0);
boolean success;
success = merge(input, this,inEntry.getKey(), firstKey);
if (!success) {
return fail();
}
firstKey = Math.min(inEntry.getKey(), firstKey);
// Merge other components of candidate into the lowest id component
for (int i = 1; i < mergeWith.size(); ++i) {
success = merge(this, this, mergeWith.get(i), firstKey);
if (!success) {
fail();
}
this.getMap().remove(mergeWith.get(i));
}
}
}
return this;
}
private boolean merge(Candidates input, Candidates candidates, long inputKey, long selfKey) throws Exception {
Map<Long, SignedVertex> inputComponent = input.getMap().get(inputKey);
Map<Long, SignedVertex> selfComponent = candidates.getMap().get(selfKey);
// Find the vertices to merge along
List<Long> mergeBy = new ArrayList<>();
for (long inputVertex : inputComponent.keySet()) {
if (selfComponent.containsKey(inputVertex)) {
mergeBy.add(inputVertex);
}
}
// Determine if the merge should be with reversed signs or not
boolean inputSign = inputComponent.get(mergeBy.get(0)).getSign();
boolean selfSign = selfComponent.get(mergeBy.get(0)).getSign();
boolean reversed = inputSign != selfSign;
// Evaluate the merge
boolean success = true;
for (long mergeVertex : mergeBy) {
inputSign = inputComponent.get(mergeVertex).getSign();
selfSign = selfComponent.get(mergeVertex).getSign();
if (reversed) {
success = inputSign != selfSign;
} else {
success = inputSign == selfSign;
}
if (!success) {
return false;
}
}
// Execute the merge
long commonKey = Math.min(inputKey, selfKey);
// Merge input vertices
for (SignedVertex inputVertex : inputComponent.values()) {
if (reversed) {
success = candidates.add(commonKey, inputVertex.reverse());
} else {
success = candidates.add(commonKey, inputVertex);
}
if (!success) {
return false;
}
}
return true;
}
private Candidates fail() {
return new Candidates(false);
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/summaries/AdjacencyListGraph.java | src/main/java/org/apache/flink/graph/streaming/summaries/AdjacencyListGraph.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.summaries;
import java.io.Serializable;
import java.util.*;
/**
* A simple, undirected adjacency list graph representation with methods for traversals.
* Used in the Spanner library method.
* @param <K> the vertex id type
*/
public class AdjacencyListGraph<K extends Comparable<K>> implements Serializable {
private Map<K, HashSet<K>> adjacencyMap;
public AdjacencyListGraph() {
adjacencyMap = new HashMap<>();
}
public Map<K, HashSet<K>> getAdjacencyMap() {
return adjacencyMap;
}
/**
* Adds the edge to the current adjacency graph
* @param src the src id
* @param trg the trg id
*/
public void addEdge(K src, K trg) {
HashSet<K> neighbors;
// add the neighbor to the src
if (adjacencyMap.containsKey(src)) {
neighbors = adjacencyMap.get(src);
}
else {
neighbors = new HashSet<>();
}
neighbors.add(trg);
adjacencyMap.put(src, neighbors);
// add the neighbor to the trg
if (adjacencyMap.containsKey(trg)) {
neighbors = adjacencyMap.get(trg);
}
else {
neighbors = new HashSet<>();
}
neighbors.add(src);
adjacencyMap.put(trg, neighbors);
}
/**
* Performs a bounded BFS on the adjacency graph to determine
* whether the current distance between src and trg is greater than k.
* @param src
* @param trg
* @param k
* @return true if the current distance is less than or equal to k
* and false otherwise.
*/
//TODO: maybe k should be a property of the adjacency graph
public boolean boundedBFS(K src, K trg, int k) {
if (!adjacencyMap.containsKey(src)) {
// this is the first time we encounter this vertex
return false;
}
else {
Set<K> visited = new HashSet<>();
Queue<Node> queue = new LinkedList<>();
// add the src neighbors
for (K neighbor : adjacencyMap.get(src)) {
queue.add(new Node(neighbor, 1));
}
visited.add(src);
while (!queue.isEmpty()) {
Node current = queue.peek();
if (current.getId().equals(trg)) {
// we found the trg in <= k levels
return true;
}
else {
queue.remove();
visited.add(current.getId());
// bound the BFS to k steps
if (current.getLevel() < k) {
for (K neighbor : adjacencyMap.get(current.getId())) {
if (!visited.contains(neighbor)) {
queue.add(new Node(neighbor, current.getLevel()+1));
}
}
}
}
}
return false;
}
}
public void reset() {
adjacencyMap.clear();
}
public class Node {
private K id;
private int level;
Node(K id, int level) {
this.id = id;
this.level = level;
}
public K getId() {
return id;
}
int getLevel() {
return level;
}
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/library/BipartitenessCheck.java | src/main/java/org/apache/flink/graph/streaming/library/BipartitenessCheck.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.library;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.graph.streaming.EdgesFold;
import org.apache.flink.graph.streaming.SummaryBulkAggregation;
import org.apache.flink.graph.streaming.summaries.Candidates;
import org.apache.flink.graph.streaming.util.SignedVertex;
import org.apache.flink.types.NullValue;
import java.io.Serializable;
/**
* The Bipartiteness check library method checks whether an input graph is bipartite
* or not. A bipartite graph's vertices can be separated into two disjoint
* groups, such as no two nodes inside the same group are connected by an edge.
* The library uses the Window Graph Aggregation class of our graph streaming API.
*
* @param <K> the vertex ID type
* @param <EV> the edge value type
*/
public class BipartitenessCheck<K extends Serializable, EV> extends SummaryBulkAggregation<K, EV, Candidates, Candidates> implements Serializable {
/**
* Creates a BipartitenessCheck object using WindowGraphAggregation class.
* To perform the Bipartiteness check the BipartitenessCheck object is passed as an argument
* to the aggregate function of the {@link org.apache.flink.graph.streaming.GraphStream} class.
* Creating the Bipartiteness object sets the EdgeFold, ReduceFunction, Initial Value,
* MergeWindow Time and Transient State for using the Window Graph Aggregation class.
*
* @param mergeWindowTime Window time in millisec for the merger.
*/
public BipartitenessCheck(long mergeWindowTime) {
super(new updateFunction(), new combineFunction(), new Candidates(true), mergeWindowTime, false);
}
public static Candidates edgeToCandidate(long v1, long v2) throws Exception {
long src = Math.min(v1, v2);
long trg = Math.max(v1, v2);
Candidates cand = new Candidates(true);
cand.add(src, new SignedVertex(src, true));
cand.add(src, new SignedVertex(trg, false));
return cand;
}
@SuppressWarnings("serial")
/**
* Implements the EdgesFold Interface, applies foldEdges function to
* a vertex neighborhood.
* The Edge stream is divided into different windows, the foldEdges function
* is applied on each window incrementally and the aggregate state for each window
* is updated, in this case it checks the sub-graph(stream of edges) in a window is Bipartite or not.
*
* @param <K> the vertex ID type
*/
public static class updateFunction<K extends Serializable> implements EdgesFold<Long, NullValue, Candidates> {
/**
* Implements foldEdges method of EdgesFold interface for combining
* two edges values into same type using merge method of the Candidates class.
* In this case it checks the Bipartiteness of the sub-graph in a partition by
* separating vertices into two groups such that there is no edge between the
* vertices of the same group.
* In case the sub-graph is Bipartite it assigns true value to the candidate object's field,
* otherwise false.
*
* @param candidates the initial value and accumulator
* @param v1 the vertex ID
* @param v2 the neighbor's ID
* @param edgeVal the edge value
* @return The data stream that is the result of applying the foldEdges function to the graph window.
* @throws Exception
*/
@Override
public Candidates foldEdges(Candidates candidates, Long v1, Long v2, NullValue edgeVal) throws Exception {
return candidates.merge(edgeToCandidate(v1, v2));
}
}
/**
* Implements the ReduceFunction Interface, applies reduce function to
* combine group of elements into a single value.
* The aggregated states from different windows are combined together
* and reduced to a single result.
* In this case the Bipartiteness state of sub-graphs in each window is checked
* and their aggregate states are merged to check if the whole graph is Bipartite or
* not.
*
*/
public static class combineFunction implements ReduceFunction<Candidates> {
/**
* Implements reduce method of ReduceFunction interface.
* Two values of Candidates class are combined into one using merge method
* of the Candidate class.
* In this case the merge method checks Bipartiteness state i.e true or false
* from all windows and merges the aggregate results together to check
* if all of vertices can be divided into two groups so that there in no edge between
* two vertices of the same group.
* In-case two such groups exist it assigns true value to the candidate object's field
* declaring the graph as Bipartite.
*
* @param c1 The first value to combine.
* @param c2 The second value to combine.
* @return The combined value of both input values.
* @throws Exception This method may throw exceptions. Throwing an exception will cause the operation
* to fail and may trigger recovery.
*/
@Override
public Candidates reduce(Candidates c1, Candidates c2) throws Exception {
return c1.merge(c2);
}
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/library/ConnectedComponents.java | src/main/java/org/apache/flink/graph/streaming/library/ConnectedComponents.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.library;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.graph.streaming.EdgesFold;
import org.apache.flink.graph.streaming.SummaryBulkAggregation;
import org.apache.flink.graph.streaming.example.IterativeConnectedComponents;
import org.apache.flink.graph.streaming.summaries.DisjointSet;
import org.apache.flink.types.NullValue;
import java.io.Serializable;
/**
* The Connected Components library method assigns a component ID to each vertex in the graph.
* Vertices that belong to the same component have the same component ID.
* This algorithm computes _weakly_ connected components, i.e. edge direction is ignored.
* <p>
* This is a single-pass implementation, which uses a {@link SummaryBulkAggregation} to periodically merge
* the partitioned state. For an iterative implementation, see {@link IterativeConnectedComponents}.
*
* @param <K> the vertex ID type
* @param <EV> the edge value type
*/
public class ConnectedComponents<K extends Serializable, EV> extends SummaryBulkAggregation<K, EV, DisjointSet<K>, DisjointSet<K>> implements Serializable {
/**
* Creates a ConnectedComponents object using WindowGraphAggregation class.
* To find number of Connected Components the ConnectedComponents object is passed as an argument
* to the aggregate function of the {@link org.apache.flink.graph.streaming.GraphStream} class.
* Creating the ConnectedComponents object sets the EdgeFold, ReduceFunction, Initial Value,
* MergeWindow Time and Transient State for using the Window Graph Aggregation class.
*
* @param mergeWindowTime Window time in millisec for the merger.
*/
public ConnectedComponents(long mergeWindowTime) {
super(new UpdateCC(), new CombineCC(), new DisjointSet<K>(), mergeWindowTime, false);
}
/**
* Implements EdgesFold Interface, applies foldEdges function to
* a vertex neighborhood
* The Edge stream is divided into different windows, the foldEdges function
* is applied on each window incrementally and the aggregate state for each window
* is updated, in this case it checks the connected components in a window. If
* there is an edge between two vertices then they become part of a connected component.
*
* @param <K> the vertex ID type
*/
public final static class UpdateCC<K extends Serializable> implements EdgesFold<K, NullValue, DisjointSet<K>> {
/**
* Implements foldEdges method of EdgesFold interface for combining
* two edges values into same type using union method of the DisjointSet class.
* In this case it computes the connected components in a partition by
* by checking which vertices are connected checking their edges, all the connected
* vertices are assigned the same component ID.
*
* @param ds the initial value and accumulator
* @param vertex the vertex ID
* @param vertex2 the neighbor's ID
* @param edgeValue the edge value
* @return The data stream that is the result of applying the foldEdges function to the graph window.
* @throws Exception
*/
@Override
public DisjointSet<K> foldEdges(DisjointSet<K> ds, K vertex, K vertex2, NullValue edgeValue) throws Exception {
ds.union(vertex, vertex2);
return ds;
}
}
/**
* Implements the ReduceFunction Interface, applies reduce function to
* combine group of elements into a single value.
* The aggregated states from different windows are combined together
* and reduced to a single result.
* In this case the values of the vertices belonging to Connected Components form
* each window are merged to find the Connected Components for the whole graph.
*/
public static class CombineCC<K extends Serializable> implements ReduceFunction<DisjointSet<K>> {
/**
* Implements reduce method of ReduceFunction interface.
* Two values of DisjointSet class are combined into one using merge method
* of the DisjointSet class.
* In this case the merge method takes Connected Components values that includes
* the vertices values along with the Component ID they belong from different
* windows and merges them, some Connected Components can be combined if they have
* some common vertex value. In the end the total Connected Components of the whole graph
* along with the vertices values belonging to those components are returned.
*
*
* @param s1 The first value to combine.
* @param s2 The second value to combine.
* @return The combined value of both input values.
* @throws Exception This method may throw exceptions.
*/
@Override
public DisjointSet<K> reduce(DisjointSet<K> s1, DisjointSet<K> s2) throws Exception {
int count1 = s1.getMatches().size();
int count2 = s2.getMatches().size();
if (count1 <= count2) {
s2.merge(s1);
return s2;
}
s1.merge(s2);
return s1;
}
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/library/Spanner.java | src/main/java/org/apache/flink/graph/streaming/library/Spanner.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.library;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.graph.streaming.EdgesFold;
import org.apache.flink.graph.streaming.SummaryBulkAggregation;
import org.apache.flink.graph.streaming.summaries.AdjacencyListGraph;
import org.apache.flink.types.NullValue;
import java.io.Serializable;
/**
* The Spanner library method continuously computes a k-Spanner of an insertion-only edge stream.
* The user-defined parameter k defines the distance estimation error,
* i.e. a k-spanner preserves all distances with a factor of up to k.
* <p>
* This is a single-pass implementation, which uses a {@link SummaryBulkAggregation} to periodically merge
* the partitioned state.
*
* @param <K> the vertex ID type
* @param <EV> the edge value type
*/
public class Spanner<K extends Comparable<K>, EV> extends SummaryBulkAggregation<K, EV, AdjacencyListGraph<K>, AdjacencyListGraph<K>> implements Serializable {
private final int k;
/**
* Creates a Spanner instance.
*
* @param mergeWindowTime Window time in millisec for the merger.
* @param k the distance error factor
*/
public Spanner(long mergeWindowTime, int k) {
super(new UpdateLocal(k), new CombineSpanners(k), new AdjacencyListGraph<K>(), mergeWindowTime, false);
this.k = k;
}
/**
* Decide to add or remove an edge to the local spanner in the current window.
* If the current distance between the edge endpoints is <= k then the edge is dropped,
* otherwise it is added to the local spanner.
*
* @param <K> the vertex ID type
*/
public final static class UpdateLocal<K extends Comparable<K>> implements EdgesFold<K, NullValue, AdjacencyListGraph<K>> {
private final int factorK;
public UpdateLocal(int k) {
factorK = k;
}
@Override
public AdjacencyListGraph<K> foldEdges(AdjacencyListGraph<K> g, K src, K trg, NullValue value) throws Exception {
if (!g.boundedBFS(src, trg, factorK)) {
// the current distance between src and trg is > k
g.addEdge(src, trg);
}
return g;
}
}
/**
* Merge the local spanners of each partition into the global spanner.
*/
public static class CombineSpanners<K extends Comparable<K>> implements ReduceFunction<AdjacencyListGraph<K>> {
private final int factorK;
public CombineSpanners(int k) {
factorK = k;
}
@Override
public AdjacencyListGraph<K> reduce(AdjacencyListGraph<K> g1, AdjacencyListGraph<K> g2) throws Exception {
// merge the smaller spanner into the larger one
if (g1.getAdjacencyMap().size() > g2.getAdjacencyMap().size()) {
for (K src : g2.getAdjacencyMap().keySet()) {
for (K trg : g2.getAdjacencyMap().get(src)) {
if (!g1.boundedBFS(src, trg, factorK)) {
// the current distance between src and trg is > k
g1.addEdge(src, trg);
}
}
}
return g1;
}
else {
for (K src : g1.getAdjacencyMap().keySet()) {
for (K trg : g1.getAdjacencyMap().get(src)) {
if (!g2.boundedBFS(src, trg, factorK)) {
// the current distance between src and trg is > k
g2.addEdge(src, trg);
}
}
}
return g2;
}
}
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/library/ConnectedComponentsTree.java | src/main/java/org/apache/flink/graph/streaming/library/ConnectedComponentsTree.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.library;
import org.apache.flink.graph.streaming.SummaryTreeReduce;
import org.apache.flink.graph.streaming.summaries.DisjointSet;
import java.io.Serializable;
public class ConnectedComponentsTree<K extends Serializable, EV> extends SummaryTreeReduce<K, EV, DisjointSet<K>, DisjointSet<K>> implements Serializable {
public ConnectedComponentsTree(long mergeWindowTime, int degree) {
super(new ConnectedComponents.UpdateCC(), new ConnectedComponents.CombineCC(), new DisjointSet<K>(), mergeWindowTime, false, degree);
}
public ConnectedComponentsTree(long mergeWindowTime) {
super(new ConnectedComponents.UpdateCC(), new ConnectedComponents.CombineCC(), new DisjointSet<K>(), mergeWindowTime, false);
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/example/SpannerExample.java | src/main/java/org/apache/flink/graph/streaming/example/SpannerExample.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.example;
import org.apache.flink.api.common.ProgramDescription;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.FoldFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.graph.Edge;
import org.apache.flink.graph.streaming.GraphStream;
import org.apache.flink.graph.streaming.SimpleEdgeStream;
import org.apache.flink.graph.streaming.SummaryBulkAggregation;
import org.apache.flink.graph.streaming.library.Spanner;
import org.apache.flink.graph.streaming.summaries.AdjacencyListGraph;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.types.NullValue;
import org.apache.flink.util.Collector;
import java.util.concurrent.TimeUnit;
/**
* This example uses the Spanner library method to continuously compute
* a k-Spanner of an insertion-only edge stream.
* The user-defined parameter k defines the distance estimation error,
* i.e. a k-spanner preserves all distances with a factor of up to k.
* <p>
* This is a single-pass implementation, which uses a {@link SummaryBulkAggregation} to periodically merge
* the partitioned state.
*/
public class SpannerExample implements ProgramDescription {
public static void main(String[] args) throws Exception {
if (!parseParameters(args)) {
return;
}
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
GraphStream<Long, NullValue, NullValue> edges = getGraphStream(env);
DataStream<AdjacencyListGraph<Long>> spanner = edges.aggregate(new Spanner<Long, NullValue>(mergeWindowTime, k));
// flatten the elements of the spanner and
// in windows of printWindowTime
spanner.flatMap(new FlattenSet())
.keyBy(0).timeWindow(Time.of(printWindowTime, TimeUnit.MILLISECONDS))
.fold(new Tuple2<>(0l, 0l), new IdentityFold()).print();
env.execute("Streaming Spanner");
}
// *************************************************************************
// UTIL METHODS
// *************************************************************************
private static boolean fileOutput = false;
private static String edgeInputPath = null;
private static long mergeWindowTime = 1000;
private static long printWindowTime = 2000;
private static int k = 3;
private static boolean parseParameters(String[] args) {
if (args.length > 0) {
if (args.length != 3) {
System.err.println("Usage: SpannerExample <input edges path> <merge window time (ms)> "
+ "<print window time (ms)> <distance factor>");
return false;
}
fileOutput = true;
edgeInputPath = args[0];
mergeWindowTime = Long.parseLong(args[1]);
printWindowTime = Long.parseLong(args[2]);
k = Integer.parseInt(args[3]);
} else {
System.out.println("Executing Spanner example with default parameters and built-in default data.");
System.out.println(" Provide parameters to read input data from files.");
System.out.println(" See the documentation for the correct format of input files.");
System.out.println(" Usage: SpannerExample <input edges path> <merge window time (ms)> "
+ "<print window time (ms)> <distance factor>");
}
return true;
}
@SuppressWarnings("serial")
private static GraphStream<Long, NullValue, NullValue> getGraphStream(StreamExecutionEnvironment env) {
if (fileOutput) {
return new SimpleEdgeStream<>(env.readTextFile(edgeInputPath)
.map(new MapFunction<String, Edge<Long, NullValue>>() {
@Override
public Edge<Long, NullValue> map(String s) {
String[] fields = s.split("\\s");
long src = Long.parseLong(fields[0]);
long trg = Long.parseLong(fields[1]);
return new Edge<>(src, trg, NullValue.getInstance());
}
}), env);
}
return new SimpleEdgeStream<>(env.fromElements(
new Edge<>(1l, 4l, NullValue.getInstance()),
new Edge<>(4l, 7l, NullValue.getInstance()),
new Edge<>(7l, 8l, NullValue.getInstance()),
new Edge<>(4l, 8l, NullValue.getInstance()),
new Edge<>(4l, 5l, NullValue.getInstance()),
new Edge<>(5l, 6l, NullValue.getInstance()),
new Edge<>(2l, 3l, NullValue.getInstance()),
new Edge<>(3l, 4l, NullValue.getInstance()),
new Edge<>(3l, 6l, NullValue.getInstance()),
new Edge<>(8l, 9l, NullValue.getInstance()),
new Edge<>(6l, 8l, NullValue.getInstance()),
new Edge<>(5l, 9l, NullValue.getInstance())), env);
}
@SuppressWarnings("serial")
public static final class FlattenSet implements FlatMapFunction<AdjacencyListGraph<Long>, Tuple2<Long, Long>> {
private Tuple2<Long, Long> t = new Tuple2<>();
@Override
public void flatMap(AdjacencyListGraph<Long> g, Collector<Tuple2<Long, Long>> out) {
for (Long src : g.getAdjacencyMap().keySet()) {
t.setField(src, 0);
for (Long trg : g.getAdjacencyMap().get(src)) {
t.setField(trg, 1);
out.collect(t);
}
}
}
}
@SuppressWarnings("serial")
public static final class IdentityFold implements FoldFunction<Tuple2<Long, Long>, Tuple2<Long, Long>> {
public Tuple2<Long, Long> fold(Tuple2<Long, Long> accumulator, Tuple2<Long, Long> value) throws Exception {
return value;
}
}
@Override
public String getDescription() {
return "Streaming Spanner on Global Aggregation";
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/example/CentralizedWeightedMatching.java | src/main/java/org/apache/flink/graph/streaming/example/CentralizedWeightedMatching.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.example;
import org.apache.flink.api.common.JobExecutionResult;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.graph.Edge;
import org.apache.flink.graph.streaming.GraphStream;
import org.apache.flink.graph.streaming.SimpleEdgeStream;
import org.apache.flink.graph.streaming.util.MatchingEvent;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.types.NullValue;
import org.apache.flink.util.Collector;
import java.util.HashSet;
import java.util.Set;
public class CentralizedWeightedMatching {
public CentralizedWeightedMatching() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironment();
// Source: http://grouplens.org/datasets/movielens/
@SuppressWarnings("serial")
DataStream<Edge<Long, Long>> edges = env
.readTextFile("movielens_10k_sorted.txt")
.map(new MapFunction<String, Edge<Long, Long>>() {
@Override
public Edge<Long, Long> map(String s) throws Exception {
String[] args = s.split("\t");
long src = Long.parseLong(args[0]);
long trg = Long.parseLong(args[1]) + 1000000;
long val = Long.parseLong(args[2]) * 10;
return new Edge<>(src, trg, val);
}
});
GraphStream<Long, NullValue, Long> graph = new SimpleEdgeStream<>(edges, env);
graph.getEdges()
.flatMap(new WeightedMatchingFlatMapper()).setParallelism(1)
.print().setParallelism(1);
JobExecutionResult res = env.execute("Distributed Merge Tree Sandbox");
long runtime = res.getNetRuntime();
System.out.println("Runtime: " + runtime);
}
@SuppressWarnings("serial")
private static final class WeightedMatchingFlatMapper
implements FlatMapFunction<Edge<Long,Long>, MatchingEvent> {
private Set<Edge<Long, Long>> localMatching;
public WeightedMatchingFlatMapper() {
localMatching = new HashSet<>();
}
@Override
public void flatMap(Edge<Long, Long> edge, Collector<MatchingEvent> out) throws Exception {
// Find collisions
Set<Edge<Long, Long>> collisions = new HashSet<>();
for (Edge<Long, Long> localEdge : localMatching) {
if (localEdge.getSource().equals(edge.getSource())
|| localEdge.getSource().equals(edge.getTarget())
|| localEdge.getTarget().equals(edge.getSource())
|| localEdge.getTarget().equals(edge.getTarget())) {
collisions.add(localEdge);
}
}
// Calculate sum
long sum = 0;
for (Edge<Long, Long> collidingEdge : collisions) {
sum += collidingEdge.getValue();
}
if (edge.getValue() > 2 * sum) {
// Remove collisions
for (Edge<Long, Long> collidingEdge : collisions) {
localMatching.remove(collidingEdge);
out.collect(new MatchingEvent(MatchingEvent.Type.REMOVE, collidingEdge));
}
localMatching.add(edge);
out.collect(new MatchingEvent(MatchingEvent.Type.ADD, edge));
}
}
}
public static void main(String[] args) throws Exception {
new CentralizedWeightedMatching();
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/example/BipartitenessCheckExample.java | src/main/java/org/apache/flink/graph/streaming/example/BipartitenessCheckExample.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.example;
import org.apache.flink.api.common.ProgramDescription;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.graph.Edge;
import org.apache.flink.graph.streaming.GraphStream;
import org.apache.flink.graph.streaming.SimpleEdgeStream;
import org.apache.flink.graph.streaming.library.BipartitenessCheck;
import org.apache.flink.graph.streaming.summaries.Candidates;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.types.NullValue;
import org.apache.flink.util.Collector;
/**
* The bipartiteness check example tests whether an input graph is bipartite
* or not. A bipartite graph's vertices can be separated into two disjoint
* groups, such as no two nodes inside the same group is connected by an edge.
* The example uses the merge-tree abstraction of our graph streaming API.
*/
public class BipartitenessCheckExample implements ProgramDescription {
@SuppressWarnings("serial")
public static void main(String[] args) throws Exception {
// Set up the environment
if (!parseParameters(args)) {
return;
}
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
GraphStream<Long, NullValue, NullValue> graph = new SimpleEdgeStream<>(getEdgesDataSet(env), env);
DataStream<Candidates> bipartition = graph.aggregate
(new BipartitenessCheck<Long, NullValue>((long) 500));
// Emit the results
if (fileOutput) {
bipartition.writeAsCsv(outputPath);
} else {
bipartition.print();
}
env.execute("Bipartiteness Check");
}
// *************************************************************************
// UTIL METHODS
// *************************************************************************
private static boolean fileOutput = false;
private static String edgeInputPath = null;
private static String outputPath = null;
private static boolean parseParameters(String[] args) {
if (args.length > 0) {
if (args.length != 2) {
System.err.println("Usage: BipartitenessCheckExample <input edges path> <output path>");
return false;
}
fileOutput = true;
edgeInputPath = args[0];
outputPath = args[1];
} else {
System.out.println("Executing BipartitenessCheckExample example with default parameters and built-in default data.");
System.out.println(" Provide parameters to read input data from files.");
System.out.println(" See the documentation for the correct format of input files.");
System.out.println(" Usage: BipartitenessCheckExample <input edges path> <output path>");
}
return true;
}
@SuppressWarnings("serial")
private static DataStream<Edge<Long, NullValue>> getEdgesDataSet(StreamExecutionEnvironment env) {
if (fileOutput) {
return env.readTextFile(edgeInputPath)
.map(new MapFunction<String, Edge<Long, NullValue>>() {
@Override
public Edge<Long, NullValue> map(String s) throws Exception {
String[] fields = s.split("\\t");
long src = Long.parseLong(fields[0]);
long trg = Long.parseLong(fields[1]);
return new Edge<>(src, trg, NullValue.getInstance());
}
});
}
return env.generateSequence(1, 100).flatMap(
new FlatMapFunction<Long, Edge<Long, NullValue>>() {
@Override
public void flatMap(Long key, Collector<Edge<Long, NullValue>> out) throws Exception {
for (int i = 0; i < 10; i++) {
long target = key * 2 + 1;
out.collect(new Edge<>(key, target, NullValue.getInstance()));
}
}
});
}
@Override
public String getDescription() {
return "Bipartiteness Check";
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/example/ExactTriangleCount.java | src/main/java/org/apache/flink/graph/streaming/example/ExactTriangleCount.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.example;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.graph.Edge;
import org.apache.flink.graph.streaming.SimpleEdgeStream;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.types.NullValue;
import org.apache.flink.util.Collector;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeSet;
/**
* Single-pass, insertion-only exact Triangle Local and Global Count algorithm.
* <p>
* Based on http://www.kdd.org/kdd2016/papers/files/rfp0465-de-stefaniA.pdf.
*/
public class ExactTriangleCount {
public static void main(String[] args) throws Exception {
if (!parseParameters(args)) {
return;
}
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
SimpleEdgeStream<Integer, NullValue> edges = getGraphStream(env);
DataStream<Tuple2<Integer, Integer>> result =
edges.buildNeighborhood(false)
.map(new ProjectCanonicalEdges())
.keyBy(0, 1).flatMap(new IntersectNeighborhoods())
.keyBy(0).flatMap(new SumAndEmitCounters());
if (resultPath != null) {
result.writeAsText(resultPath);
}
else {
result.print();
}
env.execute("Exact Triangle Count");
}
// *** Transformation Methods *** //
/**
* Receives 2 tuples from the same edge (src + target) and intersects the attached neighborhoods.
* For each common neighbor, increase local and global counters.
*/
public static final class IntersectNeighborhoods implements
FlatMapFunction<Tuple3<Integer, Integer, TreeSet<Integer>>, Tuple2<Integer, Integer>> {
Map<Tuple2<Integer, Integer>, TreeSet<Integer>> neighborhoods = new HashMap<>();
public void flatMap(Tuple3<Integer, Integer, TreeSet<Integer>> t, Collector<Tuple2<Integer, Integer>> out) {
//intersect neighborhoods and emit local and global counters
Tuple2<Integer, Integer> key = new Tuple2<>(t.f0, t.f1);
if (neighborhoods.containsKey(key)) {
// this is the 2nd neighborhood => intersect
TreeSet<Integer> t1 = neighborhoods.remove(key);
TreeSet<Integer> t2 = t.f2;
int counter = 0;
if (t1.size() < t2.size()) {
// iterate t1 and search t2
for (int i : t1) {
if (t2.contains(i)) {
counter++;
out.collect(new Tuple2<>(i, 1));
}
}
} else {
// iterate t2 and search t1
for (int i : t2) {
if (t1.contains(i)) {
counter++;
out.collect(new Tuple2<>(i, 1));
}
}
}
if (counter > 0) {
//emit counter for srcID, trgID, and total
out.collect(new Tuple2<>(t.f0, counter));
out.collect(new Tuple2<>(t.f1, counter));
// -1 signals the total counter
out.collect(new Tuple2<>(-1, counter));
}
} else {
// first neighborhood for this edge: store and wait for next
neighborhoods.put(key, t.f2);
}
}
}
/**
* Sums up and emits local and global counters.
*/
public static final class SumAndEmitCounters implements FlatMapFunction<Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> {
Map<Integer, Integer> counts = new HashMap<>();
public void flatMap(Tuple2<Integer, Integer> t, Collector<Tuple2<Integer, Integer>> out) {
if (counts.containsKey(t.f0)) {
int newCount = counts.get(t.f0) + t.f1;
counts.put(t.f0, newCount);
out.collect(new Tuple2<>(t.f0, newCount));
} else {
counts.put(t.f0, t.f1);
out.collect(new Tuple2<>(t.f0, t.f1));
}
}
}
public static final class ProjectCanonicalEdges implements
MapFunction<Tuple3<Integer, Integer, TreeSet<Integer>>, Tuple3<Integer, Integer, TreeSet<Integer>>> {
@Override
public Tuple3<Integer, Integer, TreeSet<Integer>> map(Tuple3<Integer, Integer, TreeSet<Integer>> t) {
int source = Math.min(t.f0, t.f1);
int trg = Math.max(t.f0, t.f1);
t.setField(source, 0);
t.setField(trg, 1);
return t;
}
}
// *************************************************************************
// UTIL METHODS
// *************************************************************************
private static boolean fileOutput = false;
private static String edgeInputPath = null;
private static String resultPath = null;
private static boolean parseParameters(String[] args) {
if (args.length > 0) {
if (args.length != 2) {
System.err.println("Usage: ExactTriangleCount <input edges path> <result path>");
return false;
}
fileOutput = true;
edgeInputPath = args[0];
resultPath = args[1];
} else {
System.out.println("Executing ExactTriangleCount example with default parameters and built-in default data.");
System.out.println(" Provide parameters to read input data from files.");
System.out.println(" See the documentation for the correct format of input files.");
System.out.println(" Usage: ExactTriangleCount <input edges path> <result path>");
}
return true;
}
@SuppressWarnings("serial")
private static SimpleEdgeStream<Integer, NullValue> getGraphStream(StreamExecutionEnvironment env) {
if (fileOutput) {
return new SimpleEdgeStream<>(env.readTextFile(edgeInputPath)
.flatMap(new FlatMapFunction<String, Edge<Integer, NullValue>>() {
@Override
public void flatMap(String s, Collector<Edge<Integer, NullValue>> out) {
String[] fields = s.split("\\s");
if (!fields[0].equals("%")) {
int src = Integer.parseInt(fields[0]);
int trg = Integer.parseInt(fields[1]);
out.collect(new Edge<>(src, trg, NullValue.getInstance()));
}
}
}), env);
}
return new SimpleEdgeStream<>(env.fromElements(
new Edge<>(1, 2, NullValue.getInstance()),
new Edge<>(2, 3, NullValue.getInstance()),
new Edge<>(2, 6, NullValue.getInstance()),
new Edge<>(5, 6, NullValue.getInstance()),
new Edge<>(1, 4, NullValue.getInstance()),
new Edge<>(5, 3, NullValue.getInstance()),
new Edge<>(3, 4, NullValue.getInstance()),
new Edge<>(3, 6, NullValue.getInstance()),
new Edge<>(1, 3, NullValue.getInstance())), env);
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/example/IterativeConnectedComponents.java | src/main/java/org/apache/flink/graph/streaming/example/IterativeConnectedComponents.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.example;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map.Entry;
import org.apache.flink.api.common.ProgramDescription;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.IterativeStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;
/**
* The Connected Components algorithm assigns a component ID to each vertex in the graph.
* Vertices that belong to the same component have the same component ID.
* This algorithm computes _weakly_ connected components, i.e. edge direction is ignored.
* <p>
* This implementation uses streaming iterations to asynchronously merge state among partitions.
* For a single-pass implementation, see {@link ConnectedComponentsExample}.
*/
public class IterativeConnectedComponents implements ProgramDescription {
public static void main(String[] args) throws Exception {
// Set up the environment
if(!parseParameters(args)) {
return;
}
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStream<Tuple2<Long, Long>> edges = getEdgesDataSet(env);
IterativeStream<Tuple2<Long, Long>> iteration = edges.iterate();
DataStream<Tuple2<Long, Long>> result = iteration.closeWith(
iteration.keyBy(0).flatMap(new AssignComponents()));
// Emit the results
result.print();
env.execute("Streaming Connected Components");
}
@SuppressWarnings("serial")
public static class AssignComponents extends RichFlatMapFunction<Tuple2<Long, Long>, Tuple2<Long, Long>> {
private HashMap<Long, HashSet<Long>> components = new HashMap<>();
@Override
public void flatMap(Tuple2<Long, Long> edge, Collector<Tuple2<Long, Long>> out) {
final long sourceId = edge.f0;
final long targetId = edge.f1;
long sourceComp = -1;
long trgComp = -1;
// check if the endpoints belong to existing components
for (Entry<Long, HashSet<Long>> entry : components.entrySet()) {
if ((sourceComp == -1) || (trgComp == -1)) {
if (entry.getValue().contains(sourceId)) {
sourceComp = entry.getKey();
}
if (entry.getValue().contains(targetId)) {
trgComp = entry.getKey();
}
}
}
if (sourceComp != -1) {
// the source belongs to an existing component
if (trgComp != -1) {
// merge the components
merge(sourceComp, trgComp, out);
}
else {
// add the target to the source's component
// and update the component Id if needed
addToExistingComponent(sourceComp, targetId, out);
}
}
else {
// the source doesn't belong to any component
if (trgComp != -1) {
// add the source to the target's component
// and update the component Id if needed
addToExistingComponent(trgComp, sourceId, out);
}
else {
// neither src nor trg belong to any component
// create a new component and add them in it
createNewComponent(sourceId, targetId, out);
}
}
}
private void createNewComponent(long sourceId, long targetId, Collector<Tuple2<Long, Long>> out) {
long componentId = Math.min(sourceId, targetId);
HashSet<Long> vertexSet = new HashSet<>();
vertexSet.add(sourceId);
vertexSet.add(targetId);
components.put(componentId, vertexSet);
out.collect(new Tuple2<Long, Long>(sourceId, componentId));
out.collect(new Tuple2<Long, Long>(targetId, componentId));
}
private void addToExistingComponent(long componentId, long toAdd, Collector<Tuple2<Long, Long>> out) {
HashSet<Long> vertices = components.remove(componentId);
if (componentId >= toAdd) {
// output and update component ID
for (long v: vertices) {
out.collect(new Tuple2<Long, Long>(v, toAdd));
}
vertices.add(componentId);
components.put(toAdd, vertices);
}
else {
vertices.add(toAdd);
components.put(componentId, vertices);
out.collect(new Tuple2<Long, Long>(toAdd, componentId));
}
}
private void merge(long sourceComp, long trgComp, Collector<Tuple2<Long, Long>> out) {
HashSet<Long> srcVertexSet = components.remove(sourceComp);
HashSet<Long> trgVertexSet = components.remove(trgComp);
long componentId = Math.min(sourceComp, trgComp);
if (sourceComp == componentId) {
// collect the trgVertexSet
if (trgVertexSet!= null) {
for (long v: trgVertexSet) {
out.collect(new Tuple2<Long, Long>(v, componentId));
}
}
}
else {
// collect the srcVertexSet
if (srcVertexSet != null) {
for (long v: srcVertexSet) {
out.collect(new Tuple2<Long, Long>(v, componentId));
}
}
}
if (trgVertexSet!= null) {
srcVertexSet.addAll(trgVertexSet);
}
components.put(componentId, srcVertexSet);
}
}
// *************************************************************************
// UTIL METHODS
// *************************************************************************
private static boolean fileOutput = false;
private static String edgeInputPath = null;
private static boolean parseParameters(String[] args) {
if(args.length > 0) {
if(args.length != 1) {
System.err.println("Usage: ConnectedComponentsExample <input edges path>");
return false;
}
fileOutput = true;
edgeInputPath = args[0];
} else {
System.out.println("Executing ConnectedComponentsExample example with default parameters and built-in default data.");
System.out.println(" Provide parameters to read input data from files.");
System.out.println(" See the documentation for the correct format of input files.");
System.out.println(" Usage: ConnectedComponentsExample <input edges path>");
}
return true;
}
@SuppressWarnings("serial")
private static DataStream<Tuple2<Long, Long>> getEdgesDataSet(StreamExecutionEnvironment env) {
if (fileOutput) {
return env.readTextFile(edgeInputPath)
.map(new MapFunction<String, Tuple2<Long, Long>>() {
@Override
public Tuple2<Long, Long> map(String s) {
String[] fields = s.split("\\t");
long src = Long.parseLong(fields[0]);
long trg = Long.parseLong(fields[1]);
return new Tuple2<>(src, trg);
}
});
}
return env.generateSequence(1, 10).flatMap(
new FlatMapFunction<Long, Tuple2<Long, Long>>() {
@Override
public void flatMap(Long key, Collector<Tuple2<Long, Long>> out) throws Exception {
for (int i = 1; i < 3; i++) {
long target = key + i;
out.collect(new Tuple2<>(key, target));
}
}
});
}
@Override
public String getDescription() {
return "Streaming Connected Components";
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/example/IncidenceSamplingTriangleCount.java | src/main/java/org/apache/flink/graph/streaming/example/IncidenceSamplingTriangleCount.java | package org.apache.flink.graph.streaming.example;
import org.apache.flink.api.common.ProgramDescription;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.graph.Edge;
import org.apache.flink.graph.streaming.util.SampledEdge;
import org.apache.flink.graph.streaming.util.TriangleEstimate;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.types.NullValue;
import org.apache.flink.util.Collector;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
public class IncidenceSamplingTriangleCount implements ProgramDescription {
public static void main(String[] args) throws Exception {
// Set up the environment
if(!parseParameters(args)) {
return;
}
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStream<Edge<Long, NullValue>> edges = getEdgesDataSet(env);
int localSamples = samples / env.getParallelism();
// Count triangles
DataStream<Tuple2<Integer, Integer>> triangles = edges
.flatMap(new EdgeSampleMapper(localSamples, env.getParallelism()))
.setParallelism(1)
.keyBy(0)
.flatMap(new TriangleSampleMapper(localSamples, vertexCount))
.flatMap(new TriangleSummer(samples, vertexCount))
.setParallelism(1);
// Emit the results
if (fileOutput) {
triangles.writeAsCsv(outputPath);
} else {
triangles.print();
}
env.execute("Incidence Sampling Triangle Count");
}
// *************************************************************************
// TRIANGLE COUNT FUNCTIONS
// *************************************************************************
@SuppressWarnings("serial")
private static final class EdgeSampleMapper extends RichFlatMapFunction<Edge<Long, NullValue>, SampledEdge> {
private final int instanceSize, p;
private final List<Random> randoms;
private final List<Edge<Long, NullValue>> samples;
private int edgeCount;
public EdgeSampleMapper(int instanceSize, int p) {
this.instanceSize = instanceSize;
this.p = p;
this.edgeCount = 0;
// Initialize seeds
randoms = new ArrayList<>();
samples = new ArrayList<>();
Random r = new Random(0xDEADBEEF);
for (int i = 0; i < instanceSize * p; ++i) {
randoms.add(new Random(r.nextInt()));
samples.add(null);
}
}
@Override
public void flatMap(Edge<Long, NullValue> edge, Collector<SampledEdge> out) throws Exception {
this.edgeCount++;
// Flip a coin for all instances
for (int i = 0; i < instanceSize * p; ++i) {
boolean sample = Coin.flip(this.edgeCount, randoms.get(i));
int subtask = i % p;
int instance = i / p;
if (sample) {
out.collect(new SampledEdge(subtask, instance, edge, edgeCount, true));
samples.set(i, edge);
// emitCount++;
} else if (samples.get(i) != null) {
// Check if the edge is incident to the sampled one
Edge<Long, NullValue> e = samples.get(i);
boolean incidence = e.getSource().equals(edge.getSource())
|| e.getSource().equals(edge.getTarget())
|| e.getTarget().equals(edge.getSource())
|| e.getTarget().equals(edge.getTarget());
if (incidence) {
out.collect(new SampledEdge(subtask, instance, edge, edgeCount, false));
// emitCount++;
}
}
}
/*
if (edgeCount % 1000 == 0) {
System.out.printf("Emit rate: %.2f\n", (double) emitCount / (double) edgeCount);
}
*/
}
}
@SuppressWarnings("serial")
private static final class TriangleSampleMapper extends RichFlatMapFunction<SampledEdge, TriangleEstimate> {
private List<SampleTriangleState> states;
private int edgeCount;
private int previousResult;
private int vertices;
public TriangleSampleMapper(int size, int vertices) {
this.states = new ArrayList<>();
this.edgeCount = 0;
this.previousResult = 0;
this.vertices = vertices;
for (int i = 0; i < size; ++i) {
states.add(new SampleTriangleState());
}
}
@Override
public void flatMap(SampledEdge input, Collector<TriangleEstimate> out) throws Exception {
Edge<Long, NullValue> edge = input.getEdge();
// Update edge count
edgeCount = input.getEdgeCount();
SampleTriangleState state = states.get(input.getInstance());
// With probability 1/i sample a candidate (already flipped the coin during partitioning)
if (input.isResampled()) {
state.srcVertex = edge.getSource();
state.trgVertex = edge.getTarget();
// Randomly sample the third vertex from V \ {src, trg}
while (true) {
state.thirdVertex = (int) Math.floor(Math.random() * vertices);
if (state.thirdVertex != state.srcVertex && state.thirdVertex != state.trgVertex) {
break;
}
}
state.srcEdgeFound = false;
state.trgEdgeFound = false;
state.beta = 0;
}
// Update beta
boolean triangleFound = false;
if (state.beta == 0) {
// Check if any of the two remaining edges in the candidate has been found
if ((edge.getSource() == state.srcVertex && edge.getTarget() == state.thirdVertex)
|| (edge.getSource() == state.thirdVertex && edge.getTarget() == state.srcVertex)) {
state.srcEdgeFound = true;
}
if ((edge.getSource() == state.trgVertex && edge.getTarget() == state.thirdVertex)
|| (edge.getSource() == state.thirdVertex && edge.getTarget() == state.trgVertex)) {
state.trgEdgeFound = true;
}
triangleFound = (state.srcEdgeFound && state.trgEdgeFound);
state.beta = triangleFound ? 1 : 0;
}
// Sum local betas
if (triangleFound) {
int localBetaSum = 0;
for (SampleTriangleState s : states) {
localBetaSum += s.beta;
}
if (localBetaSum != previousResult) {
previousResult = localBetaSum;
int source = getRuntimeContext().getIndexOfThisSubtask();
out.collect(new TriangleEstimate(source, edgeCount, localBetaSum));
}
}
}
}
@SuppressWarnings("serial")
private static final class TriangleSummer
implements FlatMapFunction<TriangleEstimate, Tuple2<Integer, Integer>> {
private Map<Integer, TriangleEstimate> results;
private int maxEdges;
private int sampleSize;
private int previousResult;
private int vertices;
public TriangleSummer(int sampleSize, int vertices) {
this.results = new HashMap<>();
this.maxEdges = 0;
this.sampleSize = sampleSize;
this.previousResult = 0;
this.vertices = vertices;
}
@Override
public void flatMap(TriangleEstimate estimate, Collector<Tuple2<Integer, Integer>> out) throws Exception {
results.put(estimate.getSource(), estimate);
if (estimate.getEdgeCount() > maxEdges) {
maxEdges = estimate.getEdgeCount();
}
int globalBetaSum = 0;
for (TriangleEstimate entry : results.values()) {
globalBetaSum += entry.getBeta();
}
int result = (int) ((1.0 / (double) sampleSize) * globalBetaSum * maxEdges * (vertices - 2));
if (result != previousResult) {
previousResult = result;
out.collect(new Tuple2<>(maxEdges, result));
}
}
}
@SuppressWarnings("serial")
private static final class SampleTriangleState implements Serializable {
public long beta;
public long srcVertex;
public long trgVertex;
public long thirdVertex;
public boolean srcEdgeFound;
public boolean trgEdgeFound;
public int i;
public SampleTriangleState() {
this.beta = 0L;
this.thirdVertex = -1L;
this.srcEdgeFound = false;
this.trgEdgeFound = false;
i = 1;
}
}
private static final class Coin {
public static boolean flip(int size, Random rnd) {
return rnd.nextDouble() * size <= 1.0;
}
}
// *************************************************************************
// UTIL METHODS
// *************************************************************************
private static boolean fileOutput = false;
private static String edgeInputPath = null;
private static String outputPath = null;
private static int vertexCount = 1000;
private static int samples = 10000;
private static boolean parseParameters(String[] args) {
if(args.length > 0) {
if(args.length != 4) {
System.err.println("Usage: IncidenceSamplingTriangleCount <input edges path> <output path> <vertex count> <sample count>");
return false;
}
fileOutput = true;
edgeInputPath = args[0];
outputPath = args[1];
vertexCount = Integer.parseInt(args[2]);
samples = Integer.parseInt(args[3]);
} else {
System.out.println("Executing IncidenceSamplingTriangleCount example with default parameters and built-in default data.");
System.out.println(" Provide parameters to read input data from files.");
System.out.println(" See the documentation for the correct format of input files.");
System.out.println(" Usage: IncidenceSamplingTriangleCount <input edges path> <output path> <vertex count> <sample count>");
}
return true;
}
@SuppressWarnings("serial")
private static DataStream<Edge<Long, NullValue>> getEdgesDataSet(StreamExecutionEnvironment env) {
if (fileOutput) {
return env.readTextFile(edgeInputPath)
.map(new MapFunction<String, Edge<Long, NullValue>>() {
@Override
public Edge<Long, NullValue> map(String s) throws Exception {
String[] fields = s.split("\\t");
long src = Long.parseLong(fields[0]);
long trg = Long.parseLong(fields[1]);
return new Edge<>(src, trg, NullValue.getInstance());
}
});
}
return env.generateSequence(0, 999).flatMap(
new FlatMapFunction<Long, Edge<Long, NullValue>>() {
@Override
public void flatMap(Long key, Collector<Edge<Long, NullValue>> out) throws Exception {
out.collect(new Edge<>(key, (key + 2) % 1000, NullValue.getInstance()));
out.collect(new Edge<>(key, (key + 4) % 1000, NullValue.getInstance()));
}
});
}
@Override
public String getDescription() {
return "Incidence Sampling Triangle Count";
}
} | java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/example/DegreeDistribution.java | src/main/java/org/apache/flink/graph/streaming/example/DegreeDistribution.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.example;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.graph.streaming.EventType;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;
import java.util.HashMap;
import java.util.Map;
/**
* The Degree Distribution algorithm emits a stream of (degree, count)
* and works for fully dynamic streams of edges, i.e. both edge additions and deletions.
* <p>
* NOTE: The algorithm does not check the edge stream for consistency,
* i.e. it is assumed that an edge deletion refers to a previously added edge
* and will always have effect. However, a vertex degree won't be further decremented if 0.
* Adding the same edge multiple times will always have effect.
*/
public class DegreeDistribution {
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
if (!parseParameters(args, env)) {
return;
}
DataStream<Tuple3<Integer, Integer, EventType>> edges = getGraphStream(env);
// 1. emit (vertexID, 1) or (vertexID, -1) for addition or deletion
edges.flatMap(new EmitVerticesWithChange())
// group by vertex ID and maintain degree per vertex
.keyBy(0).flatMap(new VertexDegreeCounts())
// group by degree and emit current count
.keyBy(0).map(new DegreeDistributionMap())
.writeAsText(resultPath);
env.execute("Streaming Degree Distribution");
}
// *** Transformation Methods *** //
/**
* Transforms an event into tuples of (srcID, change), (trgID, change)
* where change = 1 for an addition and change = -1 for a deletion.
*/
private static final class EmitVerticesWithChange implements
FlatMapFunction<Tuple3<Integer, Integer, EventType>, Tuple2<Integer, Integer>> {
public void flatMap(Tuple3<Integer, Integer, EventType> t, Collector<Tuple2<Integer, Integer>> c) {
// output <vertexID, degreeChange>
int change = t.f2.equals(EventType.EDGE_ADDITION) ? 1 : -1 ;
c.collect(new Tuple2<>(t.f0, change));
c.collect(new Tuple2<>(t.f1, change));
}
}
/**
* Maintains a hash map of vertex ID -> degree and emits changes in the form of (degree, change).
*/
private static final class VertexDegreeCounts implements FlatMapFunction<Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> {
Map<Integer, Integer> verticesWithDegrees = new HashMap<>();
public void flatMap(Tuple2<Integer, Integer> t, Collector<Tuple2<Integer, Integer>> c) {
// output <degree, localCount>
if (verticesWithDegrees.containsKey(t.f0)) {
// update existing vertex
int oldDegree = verticesWithDegrees.get(t.f0);
int newDegree = oldDegree + t.f1;
if (newDegree > 0) {
verticesWithDegrees.put(t.f0, newDegree);
c.collect(new Tuple2<>(newDegree, 1));
}
else {
// if the current degree is <= 0: remove the vertex
verticesWithDegrees.remove(t.f0);
}
c.collect(new Tuple2<>(oldDegree, -1));
} else {
// first time we see this vertex
if (t.f1 > 0) {
verticesWithDegrees.put(t.f0, 1);
c.collect(new Tuple2<>(1, 1));
}
}
}
}
/**
* Computes degree distribution and emits (degree, count) tuples for every change.
*/
private static final class DegreeDistributionMap implements MapFunction<Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> {
Map<Integer, Integer> degreesWithCounts = new HashMap<>();
public Tuple2<Integer, Integer> map(Tuple2<Integer, Integer> t) {
if (degreesWithCounts.containsKey(t.f0)) {
// update existing degree
int newCount = degreesWithCounts.get(t.f0) + t.f1;
degreesWithCounts.put(t.f0, newCount);
return new Tuple2<>(t.f0, newCount);
} else {
// first time degree
degreesWithCounts.put(t.f0, t.f1);
return new Tuple2<>(t.f0, t.f1);
}
}
}
// *************************************************************************
// UTIL METHODS
// *************************************************************************
private static boolean fileOutput = false;
private static String edgeInputPath = null;
private static String resultPath = null;
private static boolean parseParameters(String[] args, StreamExecutionEnvironment env) {
if (args.length > 0) {
if (args.length < 2) {
System.err.println("Usage: DegreeDistribution <input edges path> <result path> <parallelism (optional)>");
return false;
}
fileOutput = true;
edgeInputPath = args[0];
resultPath = args[1];
if (args.length > 2) {
env.setParallelism(Integer.parseInt(args[2]));
}
} else {
System.out.println("Executing DegreeDistribution example with default parameters and built-in default data.");
System.out.println(" Provide parameters to read input data from files.");
System.out.println(" See the documentation for the correct format of input files.");
System.out.println(" Usage: DegreeDistribution <input edges path> <result path> <parallelism (optional)>");
}
return true;
}
@SuppressWarnings("serial")
private static DataStream<Tuple3<Integer, Integer, EventType>> getGraphStream(StreamExecutionEnvironment env) {
if (fileOutput) {
return env.readTextFile(edgeInputPath)
.map(new MapFunction<String, Tuple3<Integer, Integer, EventType>>() {
@Override
public Tuple3<Integer, Integer, EventType> map(String s) {
String[] fields = s.split("\\s");
int src = Integer.parseInt(fields[0]);
int trg = Integer.parseInt(fields[1]);
EventType t = fields[2].equals("+") ? EventType.EDGE_ADDITION : EventType.EDGE_DELETION;
return new Tuple3<>(src, trg, t);
}
});
}
return env.fromElements(
new Tuple3<>(1, 2, EventType.EDGE_ADDITION),
new Tuple3<>(2, 3, EventType.EDGE_ADDITION),
new Tuple3<>(1, 4, EventType.EDGE_ADDITION),
new Tuple3<>(2, 3, EventType.EDGE_DELETION),
new Tuple3<>(3, 4, EventType.EDGE_ADDITION),
new Tuple3<>(1, 2, EventType.EDGE_DELETION));
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/example/BroadcastTriangleCount.java | src/main/java/org/apache/flink/graph/streaming/example/BroadcastTriangleCount.java | package org.apache.flink.graph.streaming.example;
import org.apache.flink.api.common.ProgramDescription;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.graph.Edge;
import org.apache.flink.graph.streaming.util.TriangleEstimate;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.types.NullValue;
import org.apache.flink.util.Collector;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* The broadcast triangle count example estimates the number of triangles
* in a streamed graph. The output is in <edges, triangles> format, where
* edges refers to the number of edges processed so far.
*/
public class BroadcastTriangleCount implements ProgramDescription {
public static void main(String[] args) throws Exception {
// Set up the environment
if(!parseParameters(args)) {
return;
}
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStream<Edge<Long, NullValue>> edges = getEdgesDataSet(env);
int localSamples = samples / env.getParallelism();
// Count triangles
DataStream<Tuple2<Integer, Integer>> triangles = edges
.broadcast()
.flatMap(new TriangleSampler(localSamples, vertexCount))
.flatMap(new TriangleSummer(samples, vertexCount))
.setParallelism(1);
// Emit the results
if (fileOutput) {
triangles.writeAsCsv(outputPath);
} else {
triangles.print();
}
env.execute("Broadcast Triangle Count");
}
// *************************************************************************
// TRIANGLE COUNT FUNCTIONS
// *************************************************************************
@SuppressWarnings("serial")
private static final class TriangleSampler extends RichFlatMapFunction<Edge<Long, NullValue>, TriangleEstimate> {
private List<SampleTriangleState> states;
private int edgeCount;
private int previousResult;
private int vertices;
public TriangleSampler(int size, int vertices) {
this.states = new ArrayList<>();
this.edgeCount = 0;
this.previousResult = 0;
this.vertices = vertices;
for (int i = 0; i < size; ++i) {
states.add(new SampleTriangleState());
}
}
@Override
public void flatMap(Edge<Long, NullValue> edge, Collector<TriangleEstimate> out) throws Exception {
// Update edge count
edgeCount++;
int localBetaSum = 0;
// Process the edge for all instances
for (SampleTriangleState state : states) {
// Flip a coin and with probability 1/i sample a candidate
if (Coin.flip(state)) {
state.srcVertex = edge.getSource();
state.trgVertex = edge.getTarget();
// Randomly sample the third vertex from V \ {src, trg}
while (true) {
state.thirdVertex = (int) Math.floor(Math.random() * vertices);
if (state.thirdVertex != state.srcVertex && state.thirdVertex != state.trgVertex) {
break;
}
}
state.srcEdgeFound = false;
state.trgEdgeFound = false;
state.beta = 0;
}
if (state.beta == 0) {
// Check if any of the two remaining edges in the candidate has been found
if ((edge.getSource() == state.srcVertex && edge.getTarget() == state.thirdVertex)
|| (edge.getSource() == state.thirdVertex && edge.getTarget() == state.srcVertex)) {
state.srcEdgeFound = true;
}
if ((edge.getSource() == state.trgVertex && edge.getTarget() == state.thirdVertex)
|| (edge.getSource() == state.thirdVertex && edge.getTarget() == state.trgVertex)) {
state.trgEdgeFound = true;
}
state.beta = (state.srcEdgeFound && state.trgEdgeFound) ? 1 : 0;
}
if (state.beta == 1) {
localBetaSum++;
}
}
if (localBetaSum != previousResult) {
previousResult = localBetaSum;
int source = getRuntimeContext().getIndexOfThisSubtask();
out.collect(new TriangleEstimate(source, edgeCount, localBetaSum));
}
}
}
@SuppressWarnings("serial")
private static final class TriangleSummer
implements FlatMapFunction<TriangleEstimate, Tuple2<Integer, Integer>> {
private Map<Integer, TriangleEstimate> results;
private int maxEdges;
private int sampleSize;
private int previousResult;
private int vertices;
public TriangleSummer(int sampleSize, int vertices) {
this.results = new HashMap<>();
this.maxEdges = 0;
this.sampleSize = sampleSize;
this.previousResult = 0;
this.vertices = vertices;
}
@Override
public void flatMap(TriangleEstimate estimate, Collector<Tuple2<Integer, Integer>> out) throws Exception {
results.put(estimate.getSource(), estimate);
if (estimate.getEdgeCount() > maxEdges) {
maxEdges = estimate.getEdgeCount();
}
int globalBetaSum = 0;
for (TriangleEstimate entry : results.values()) {
globalBetaSum += entry.getBeta();
}
int result = (int) ((1.0 / (double) sampleSize) * globalBetaSum * maxEdges * (vertices - 2));
if (result != previousResult) {
previousResult = result;
out.collect(new Tuple2<>(maxEdges, result));
}
}
}
@SuppressWarnings("serial")
private static final class SampleTriangleState implements Serializable {
public long beta;
public long srcVertex;
public long trgVertex;
public long thirdVertex;
public boolean srcEdgeFound;
public boolean trgEdgeFound;
public int i;
public SampleTriangleState() {
this.beta = 0L;
this.thirdVertex = -1L;
this.srcEdgeFound = false;
this.trgEdgeFound = false;
i = 1;
}
}
private static final class Coin {
public static boolean flip(SampleTriangleState state) {
boolean result = (Math.random() * (state.i) < 1);
state.i++;
return result;
}
}
// *************************************************************************
// UTIL METHODS
// *************************************************************************
private static boolean fileOutput = false;
private static String edgeInputPath = null;
private static String outputPath = null;
private static int vertexCount = 1000;
private static int samples = 10000;
private static boolean parseParameters(String[] args) {
if(args.length > 0) {
if(args.length != 4) {
System.err.println("Usage: BroadcastTriangleCount <input edges path> <output path> <vertex count> <sample count>");
return false;
}
fileOutput = true;
edgeInputPath = args[0];
outputPath = args[1];
vertexCount = Integer.parseInt(args[2]);
samples = Integer.parseInt(args[3]);
} else {
System.out.println("Executing BroadcastTriangleCount example with default parameters and built-in default data.");
System.out.println(" Provide parameters to read input data from files.");
System.out.println(" See the documentation for the correct format of input files.");
System.out.println(" Usage: BroadcastTriangleCount <input edges path> <output path> <vertex count> <sample count>");
}
return true;
}
@SuppressWarnings("serial")
private static DataStream<Edge<Long, NullValue>> getEdgesDataSet(StreamExecutionEnvironment env) {
if (fileOutput) {
return env.readTextFile(edgeInputPath)
.map(new MapFunction<String, Edge<Long, NullValue>>() {
@Override
public Edge<Long, NullValue> map(String s) throws Exception {
String[] fields = s.split("\\t");
long src = Long.parseLong(fields[0]);
long trg = Long.parseLong(fields[1]);
return new Edge<>(src, trg, NullValue.getInstance());
}
});
}
return env.generateSequence(0, 999).flatMap(
new FlatMapFunction<Long, Edge<Long, NullValue>>() {
@Override
public void flatMap(Long key, Collector<Edge<Long, NullValue>> out) throws Exception {
out.collect(new Edge<>(key, (key + 2) % 1000, NullValue.getInstance()));
out.collect(new Edge<>(key, (key + 4) % 1000, NullValue.getInstance()));
}
});
}
@Override
public String getDescription() {
return "Broadcast Triangle Count";
}
} | java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/example/ConnectedComponentsExample.java | src/main/java/org/apache/flink/graph/streaming/example/ConnectedComponentsExample.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.example;
import org.apache.flink.api.common.ProgramDescription;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.FoldFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.graph.Edge;
import org.apache.flink.graph.streaming.GraphStream;
import org.apache.flink.graph.streaming.SimpleEdgeStream;
import org.apache.flink.graph.streaming.SummaryBulkAggregation;
import org.apache.flink.graph.streaming.library.ConnectedComponents;
import org.apache.flink.graph.streaming.summaries.DisjointSet;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.AscendingTimestampExtractor;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.types.NullValue;
import org.apache.flink.util.Collector;
import java.util.concurrent.TimeUnit;
/**
* The Connected Components algorithm assigns a component ID to each vertex in the graph.
* Vertices that belong to the same component have the same component ID.
* This algorithm computes _weakly_ connected components, i.e. edge direction is ignored.
* <p>
* This is a single-pass implementation, which uses a {@link SummaryBulkAggregation} to periodically merge
* the partitioned state. For an iterative implementation, see {@link IterativeConnectedComponents}.
*/
public class ConnectedComponentsExample implements ProgramDescription {
public static void main(String[] args) throws Exception {
if (!parseParameters(args)) {
return;
}
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
GraphStream<Long, NullValue, NullValue> edges = getGraphStream(env);
DataStream<DisjointSet<Long>> cc = edges.aggregate(new ConnectedComponents<Long, NullValue>(mergeWindowTime));
// flatten the elements of the disjoint set and print
// in windows of printWindowTime
cc.flatMap(new FlattenSet()).keyBy(0)
.timeWindow(Time.of(printWindowTime, TimeUnit.MILLISECONDS))
.fold(new Tuple2<Long, Long>(0l, 0l), new IdentityFold()).print();
env.execute("Streaming Connected Components");
}
// *************************************************************************
// UTIL METHODS
// *************************************************************************
private static boolean fileOutput = false;
private static String edgeInputPath = null;
private static long mergeWindowTime = 1000;
private static long printWindowTime = 2000;
private static boolean parseParameters(String[] args) {
if (args.length > 0) {
if (args.length != 3) {
System.err.println("Usage: ConnectedComponentsExample <input edges path> <merge window time (ms)> "
+ "print window time (ms)");
return false;
}
fileOutput = true;
edgeInputPath = args[0];
mergeWindowTime = Long.parseLong(args[1]);
printWindowTime = Long.parseLong(args[2]);
} else {
System.out.println("Executing ConnectedComponentsExample example with default parameters and built-in default data.");
System.out.println(" Provide parameters to read input data from files.");
System.out.println(" See the documentation for the correct format of input files.");
System.out.println(" Usage: ConnectedComponentsExample <input edges path> <merge window time (ms)> "
+ "print window time (ms)");
}
return true;
}
@SuppressWarnings("serial")
private static GraphStream<Long, NullValue, NullValue> getGraphStream(StreamExecutionEnvironment env) {
if (fileOutput) {
return new SimpleEdgeStream<Long, NullValue>(env.readTextFile(edgeInputPath)
.map(new MapFunction<String, Edge<Long, NullValue>>() {
@Override
public Edge<Long, NullValue> map(String s) {
String[] fields = s.split("\\s");
long src = Long.parseLong(fields[0]);
long trg = Long.parseLong(fields[1]);
return new Edge<>(src, trg, NullValue.getInstance());
}
}), env);
}
return new SimpleEdgeStream<>(env.generateSequence(1, 100).flatMap(
new FlatMapFunction<Long, Edge<Long, Long>>() {
@Override
public void flatMap(Long key, Collector<Edge<Long, Long>> out) throws Exception {
out.collect(new Edge<>(key, key + 2, key * 100));
}
}),
new AscendingTimestampExtractor<Edge<Long, Long>>() {
@Override
public long extractAscendingTimestamp(Edge<Long, Long> element) {
return element.getValue();
}
}, env).mapEdges(new MapFunction<Edge<Long, Long>, NullValue>() {
@Override
public NullValue map(Edge<Long, Long> edge) {
return NullValue.getInstance();
}
});
}
@SuppressWarnings("serial")
public static final class FlattenSet implements FlatMapFunction<DisjointSet<Long>, Tuple2<Long, Long>> {
private Tuple2<Long, Long> t = new Tuple2<>();
@Override
public void flatMap(DisjointSet<Long> set, Collector<Tuple2<Long, Long>> out) {
for (Long vertex : set.getMatches().keySet()) {
Long parent = set.find(vertex);
t.setField(vertex, 0);
t.setField(parent, 1);
out.collect(t);
}
}
}
@SuppressWarnings("serial")
public static final class IdentityFold implements FoldFunction<Tuple2<Long, Long>, Tuple2<Long, Long>> {
public Tuple2<Long, Long> fold(Tuple2<Long, Long> accumulator, Tuple2<Long, Long> value) throws Exception {
return value;
}
}
@Override
public String getDescription() {
return "Streaming Connected Components on Global Aggregation";
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/main/java/org/apache/flink/graph/streaming/example/WindowTriangles.java | src/main/java/org/apache/flink/graph/streaming/example/WindowTriangles.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.example;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.apache.flink.api.common.ProgramDescription;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.FoldFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.graph.Edge;
import org.apache.flink.graph.EdgeDirection;
import org.apache.flink.graph.streaming.EdgesApply;
import org.apache.flink.graph.streaming.SimpleEdgeStream;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.AscendingTimestampExtractor;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.types.NullValue;
import org.apache.flink.util.Collector;
/**
* Counts exact number of triangles in a graph slice.
*/
public class WindowTriangles implements ProgramDescription {
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
if (!parseParameters(args, env)) {
return;
}
SimpleEdgeStream<Long, NullValue> edges = getGraphStream(env);
DataStream<Tuple2<Integer, Long>> triangleCount =
edges.slice(windowTime, EdgeDirection.ALL)
.applyOnNeighbors(new GenerateCandidateEdges())
.keyBy(0, 1).timeWindow(windowTime)
.apply(new CountTriangles())
.timeWindowAll(windowTime).sum(0);
if (fileOutput) {
triangleCount.writeAsText(outputPath);
}
else {
triangleCount.print();
}
env.execute("Naive window triangle count");
}
// *************************************************************************
// UTIL METHODS
// *************************************************************************
@SuppressWarnings("serial")
public static final class GenerateCandidateEdges implements
EdgesApply<Long, NullValue, Tuple3<Long, Long, Boolean>> {
@Override
public void applyOnEdges(Long vertexID,
Iterable<Tuple2<Long, NullValue>> neighbors,
Collector<Tuple3<Long, Long, Boolean>> out) throws Exception {
Tuple3<Long, Long, Boolean> outT = new Tuple3<>();
outT.setField(vertexID, 0);
outT.setField(false, 2); //isCandidate=false
Set<Long> neighborIdsSet = new HashSet<Long>();
for (Tuple2<Long, NullValue> t: neighbors) {
outT.setField(t.f0, 1);
out.collect(outT);
neighborIdsSet.add(t.f0);
}
Object[] neighborIds = neighborIdsSet.toArray();
neighborIdsSet.clear();
outT.setField(true, 2); //isCandidate=true
for (int i=0; i<neighborIds.length-1; i++) {
for (int j=i; j<neighborIds.length; j++) {
// only emit the candidates
// with IDs larger than the vertex ID
if (((long)neighborIds[i] > vertexID) && ((long)neighborIds[j] > vertexID)) {
outT.setField((long)neighborIds[i], 0);
outT.setField((long)neighborIds[j], 1);
out.collect(outT);
}
}
}
}
}
@SuppressWarnings("serial")
public static final class CountTriangles implements
WindowFunction<Tuple3<Long, Long, Boolean>, Tuple2<Integer, Long>, Tuple, TimeWindow>{
@Override
public void apply(Tuple key, TimeWindow window,
Iterable<Tuple3<Long, Long, Boolean>> values,
Collector<Tuple2<Integer, Long>> out) throws Exception {
int candidates = 0;
int edges = 0;
for (Tuple3<Long, Long, Boolean> t: values) {
if (t.f2) { // candidate
candidates++;
}
else {
edges++;
}
}
if (edges > 0) {
out.collect(new Tuple2<Integer, Long>(candidates, window.maxTimestamp()));
}
}
}
private static boolean fileOutput = false;
private static String edgeInputPath = null;
private static String outputPath = null;
private static Time windowTime = Time.of(300, TimeUnit.MILLISECONDS);
private static boolean parseParameters(String[] args, StreamExecutionEnvironment env) {
if(args.length > 0) {
if(args.length < 3) {
System.err.println("Usage: WindowTriangles <input edges path> <output path>"
+ " <window time (ms)> <parallelism (optional)>");
return false;
}
fileOutput = true;
edgeInputPath = args[0];
outputPath = args[1];
windowTime = Time.of(Long.parseLong(args[2]), TimeUnit.MILLISECONDS);
if (args.length > 3) {
env.setParallelism(Integer.parseInt(args[3]));
}
} else {
System.out.println("Executing WindowTriangles example with default parameters and built-in default data.");
System.out.println(" Provide parameters to read input data from files.");
System.out.println(" See the documentation for the correct format of input files.");
System.out.println(" Usage: WindowTriangles <input edges path> <output path>"
+ " <window time (ms)> <parallelism (optional)>");
}
return true;
}
@SuppressWarnings("serial")
private static SimpleEdgeStream<Long, NullValue> getGraphStream(StreamExecutionEnvironment env) {
if (fileOutput) {
return new SimpleEdgeStream<>(env.readTextFile(edgeInputPath)
.map(new MapFunction<String, Edge<Long, Long>>() {
@Override
public Edge<Long, Long> map(String s) {
String[] fields = s.split("\\s");
long src = Long.parseLong(fields[0]);
long trg = Long.parseLong(fields[1]);
long timestamp = Long.parseLong(fields[2]);
return new Edge<>(src, trg, timestamp);
}
}), new EdgeValueTimestampExtractor(), env).mapEdges(new RemoveEdgeValue());
}
return new SimpleEdgeStream<>(env.generateSequence(1, 10).flatMap(
new FlatMapFunction<Long, Edge<Long, Long>>() {
@Override
public void flatMap(Long key, Collector<Edge<Long, Long>> out) throws Exception {
for (int i = 1; i < 3; i++) {
long target = key + i;
out.collect(new Edge<>(key, target, key*100 + (i-1)*50));
}
}
}), new EdgeValueTimestampExtractor(), env).mapEdges(new RemoveEdgeValue());
}
@SuppressWarnings("serial")
public static final class EdgeValueTimestampExtractor extends AscendingTimestampExtractor<Edge<Long, Long>> {
@Override
public long extractAscendingTimestamp(Edge<Long, Long> element) {
return element.getValue();
}
}
@SuppressWarnings("serial")
public static final class RemoveEdgeValue implements MapFunction<Edge<Long,Long>, NullValue> {
@Override
public NullValue map(Edge<Long, Long> edge) {
return NullValue.getInstance();
}
}
@Override
public String getDescription() {
return "Streaming Connected Components on Global Aggregation";
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/RoncooJuiSpringbootWebApplication.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/RoncooJuiSpringbootWebApplication.java | package com.roncoo.jui;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.web.servlet.ServletComponentScan;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.annotation.EnableScheduling;
/**
*
* @author wujing
*/
@EnableAsync
@EnableScheduling
@ServletComponentScan
@SpringBootApplication
public class RoncooJuiSpringbootWebApplication {
public static void main(String[] args) {
SpringApplication.run(RoncooJuiSpringbootWebApplication.class, args);
System.out.println("请直接访问:http://localhost:8080/roncoo-jui-springboot/index");
}
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/controller/LoginController.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/controller/LoginController.java | /**
* Copyright 2015-2016 广州市领课网络科技有限公司
*/
package com.roncoo.jui.web.controller;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.shiro.SecurityUtils;
import org.apache.shiro.authc.UsernamePasswordToken;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import com.fasterxml.jackson.databind.JsonNode;
import com.roncoo.jui.common.util.ConfUtil;
import com.roncoo.jui.common.util.HttpUtil;
import com.roncoo.jui.common.util.base.BaseController;
import com.roncoo.jui.common.util.jui.Jui;
import com.xiaoleilu.hutool.json.JSONUtil;
/**
* @author wujing
*/
@Controller
public class LoginController extends BaseController {
@RequestMapping(value = "/login", method = RequestMethod.GET)
public String loginGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
// 判断是否ajax请求
if (req.getHeader("x-requested-with") != null && req.getHeader("x-requested-with").equalsIgnoreCase("XMLHttpRequest")) {
// 如果是ajax请求响应头会有x-requested-with
resp.setCharacterEncoding("UTF-8");
PrintWriter out = resp.getWriter();
Jui bj = new Jui();
bj.setStatusCode(301);
bj.setMessage("登录超时,请重新登录!");
out.print(JSONUtil.toJsonStr(bj));
out.flush();
return null;
}
return "login";
}
@RequestMapping(value = "/login", method = RequestMethod.POST)
public String loginPost() throws UnsupportedEncodingException {
return redirect(login());
}
/**
* 跳转登录
*/
private static String login() throws UnsupportedEncodingException {
return ConfUtil.getProperty("oauth2AuthorizeUrl").replace("{CLIENTID}", ConfUtil.getProperty("clientId")).replace("{RESPONSETYPE}", "code").replace("{REDIRECTURI}", URLEncoder.encode(ConfUtil.getProperty("redirectUrl"), "utf-8"));
}
@RequestMapping(value = "/oauth2", method = RequestMethod.GET)
public String oauth2(@RequestParam(value = "code", defaultValue = "") String code, @RequestParam(value = "currentIp", defaultValue = "") String currentIp) {
logger.warn("授权登录:code={},currentIp={}", code, currentIp);
UsernamePasswordToken token = new UsernamePasswordToken();
token.setUsername(oauth2(code));
token.setPassword("www.roncoo.com".toCharArray());
SecurityUtils.getSubject().login(token);
return redirect("/index");
}
/**
* 授权登录
*
* @param code
*/
private static String oauth2(String code) {
Map<String, Object> param = new HashMap<String, Object>();
param.put("clientId", ConfUtil.getProperty("clientId"));
param.put("clientSecret", ConfUtil.getProperty("clientSecret"));
param.put("code", code);
param.put("grantType", "authorization_code");
String url = ConfUtil.getProperty("apiAccessTokenUrl");
JsonNode json = HttpUtil.postForObject(url, param);
int status = json.get("errCode").asInt();
if (0 == status) {
return json.get("resultData").get("roncooNo").asText();
}
return "";
}
@RequestMapping(value = "/timeout", method = { RequestMethod.GET, RequestMethod.POST })
public String timeout() {
return "timeout";
}
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/controller/IndexController.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/controller/IndexController.java | /**
* Copyright 2015-2016 广州市领课网络科技有限公司
*/
package com.roncoo.jui.web.controller;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpSession;
import org.apache.shiro.SecurityUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import com.roncoo.jui.common.entity.SysUser;
import com.roncoo.jui.common.util.Constants;
import com.roncoo.jui.common.util.base.BaseController;
import com.roncoo.jui.web.bean.vo.SysMenuRoleVO;
import com.roncoo.jui.web.bean.vo.SysMenuVO;
import com.roncoo.jui.web.bean.vo.SysRoleUserVO;
import com.roncoo.jui.web.service.SysMenuRoleService;
import com.roncoo.jui.web.service.SysMenuService;
import com.roncoo.jui.web.service.SysRoleUserService;
import com.roncoo.jui.web.service.WebSiteService;
/**
* @author wujing
*/
@Controller
public class IndexController extends BaseController {
@Autowired
private SysMenuService sysMenuService;
@Autowired
private SysMenuRoleService sysMenuRoleService;
@Autowired
private SysRoleUserService sysRoleUserService;
@Autowired
private WebSiteService webSiteService;
@RequestMapping(value = "/main", method = RequestMethod.GET)
public void index(@RequestParam(value = "id", defaultValue = "1") Long id, ModelMap modelMap) {
modelMap.put("vo", webSiteService.main(id));
}
@RequestMapping(value = "/", method = RequestMethod.GET)
public String index(HttpSession session) {
return redirect("/index");
}
@RequestMapping("/index")
public void index(ModelMap modelMap) {
SysUser sysUser = (SysUser) SecurityUtils.getSubject().getSession().getAttribute(Constants.Session.USER);
// 用户-->角色-->菜单
List<SysMenuRoleVO> sysMenuRoleVOList = new ArrayList<>();
List<SysRoleUserVO> sysRoleUserVOList = sysRoleUserService.listByUserId(sysUser.getId());
for (SysRoleUserVO sruVO : sysRoleUserVOList) {
sysMenuRoleVOList.addAll(sysMenuRoleService.listByRoleId(sruVO.getRoleId()));
}
// 筛选
List<SysMenuVO> menuVOList = sysMenuService.listMenucByRole(sysMenuRoleVOList);
SecurityUtils.getSubject().getSession().setAttribute(Constants.Session.MENU, menuVOList);
modelMap.put("menuVOList", menuVOList);
modelMap.put("javaVersion", System.getProperty("java.version"));
modelMap.put("osName", System.getProperty("os.name"));
}
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/controller/admin/WebSiteController.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/controller/admin/WebSiteController.java | package com.roncoo.jui.web.controller.admin;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.multipart.MultipartFile;
import com.roncoo.jui.common.util.base.BaseController;
import com.roncoo.jui.web.bean.qo.WebSiteQO;
import com.roncoo.jui.web.service.WebSiteService;
/**
* 网址汇总
*
* @author wujing
* @since 2017-11-22
*/
@Controller
@RequestMapping(value = "/admin/webSite")
public class WebSiteController extends BaseController {
private final static String TARGETID = "admin-webSite";
@Autowired
private WebSiteService service;
@RequestMapping(value = "/list")
public void list(@RequestParam(value = "pageCurrent", defaultValue = "1") int pageCurrent, @RequestParam(value = "pageSize", defaultValue = "20") int pageSize, @ModelAttribute WebSiteQO qo, ModelMap modelMap) {
modelMap.put("page", service.listForPage(pageCurrent, pageSize, qo));
modelMap.put("pageCurrent", pageCurrent);
modelMap.put("pageSize", pageSize);
modelMap.put("bean", qo);
}
@RequestMapping(value = "/add")
public void add() {
}
@ResponseBody
@RequestMapping(value = "/save")
public String save(@ModelAttribute WebSiteQO qo, @RequestParam(value = "fileLogo") MultipartFile file) {
if (service.save(qo, file) > 0) {
return success(TARGETID);
}
return error("添加失败");
}
@ResponseBody
@RequestMapping(value = "/delete")
public String delete(@RequestParam(value = "id") Long id) {
if (service.deleteById(id) > 0) {
return delete(TARGETID);
}
return error("删除失败");
}
@RequestMapping(value = "/edit")
public void edit(@RequestParam(value = "id") Long id, ModelMap modelMap) {
modelMap.put("bean", service.getById(id));
}
@ResponseBody
@RequestMapping(value = "/update")
public String update(@ModelAttribute WebSiteQO qo, @RequestParam(value = "fileLogo") MultipartFile file) {
if (service.updateById(qo, file) > 0) {
return success(TARGETID);
}
return error("修改失败");
}
@RequestMapping(value = "/view")
public void view(@RequestParam(value = "id") Long id, ModelMap modelMap) {
modelMap.put("bean", service.getById(id));
}
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/controller/admin/RcReportController.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/controller/admin/RcReportController.java | package com.roncoo.jui.web.controller.admin;
import java.io.IOException;
import javax.servlet.http.HttpServletResponse;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import com.roncoo.jui.common.util.base.BaseController;
import com.roncoo.jui.web.bean.qo.RcReportQO;
import com.roncoo.jui.web.service.RcReportService;
/**
* 报表
*
* @author wujing
* @since 2017-11-11
*/
@Controller
@RequestMapping(value = "/admin/rcReport")
public class RcReportController extends BaseController {
// private final static String TARGETID = "admin-rcReport";
@Autowired
private RcReportService service;
/**
* 分页查看
*
* @param pageCurrent
* @param pageSize
* @param modelMap
*/
/**
* 分页查看
*
* @param pageCurrent
* @param pageSize
* @param modelMap
*/
@RequestMapping(value = "/list")
public void list(@RequestParam(value = "pageNum", defaultValue = "1") int pageCurrent, @RequestParam(value = "numPerPage", defaultValue = "20") int pageSize, @ModelAttribute RcReportQO qo, ModelMap modelMap) {
modelMap.put("page", service.listForPage(pageCurrent, pageSize, qo));
modelMap.put("bean", qo);
}
/**
* 导出
*
* @param id
* @param modelMap
* @throws IOException
*/
@RequestMapping(value = "/download")
public void download(HttpServletResponse response) throws IOException {
service.exportExcel(response);
}
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/controller/admin/SysRoleUserController.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/controller/admin/SysRoleUserController.java | package com.roncoo.jui.web.controller.admin;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import com.roncoo.jui.common.enums.StatusIdEnum;
import com.roncoo.jui.common.util.base.BaseController;
import com.roncoo.jui.web.bean.qo.SysRoleUserQO;
import com.roncoo.jui.web.bean.vo.SysRoleUserVO;
import com.roncoo.jui.web.service.SysRoleService;
import com.roncoo.jui.web.service.SysRoleUserService;
/**
* 角色用户关联表
*
* @author wujing
* @since 2017-10-20
*/
@Controller
@RequestMapping(value = "/admin/sysRoleUser", method = RequestMethod.POST)
public class SysRoleUserController extends BaseController {
private final static String TARGETID = "admin-sysRoleUser";
@Autowired
private SysRoleUserService service;
@Autowired
private SysRoleService sysRoleService;
/**
* 设置
*
* @param id
* @param modelMap
*/
@RequestMapping(value = "/set", method = RequestMethod.GET)
public void set(@RequestParam(value = "pageNum", defaultValue = "1") int pageCurrent, @RequestParam(value = "numPerPage", defaultValue = "20") int pageSize, SysRoleUserQO qo, ModelMap modelMap) {
List<SysRoleUserVO> list = service.listByUserId(qo.getUserId());
modelMap.put("bean", qo);
modelMap.put("roleUserList", list);
modelMap.put("page", sysRoleService.checkUserByRole(pageCurrent, pageSize, qo, list));
modelMap.put("statusIdEnums", StatusIdEnum.values());
}
/**
* 设置
*
* @param id
* @param modelMap
*/
@RequestMapping(value = "/setRole", method = RequestMethod.GET)
@ResponseBody
public String setPost(Long userId, String ids, ModelMap modelMap) {
service.save(userId, ids);
return success(TARGETID);
}
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/controller/admin/RcDataDictionaryListController.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/controller/admin/RcDataDictionaryListController.java | package com.roncoo.jui.web.controller.admin;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import com.roncoo.jui.common.util.base.BaseController;
import com.roncoo.jui.web.bean.qo.RcDataDictionaryListQO;
import com.roncoo.jui.web.service.RcDataDictionaryListService;
/**
* 数据字典明细表
*
* @author wujing
* @since 2017-11-11
*/
@Controller
@RequestMapping(value = "/admin/rcDataDictionaryList")
public class RcDataDictionaryListController extends BaseController {
private final static String TARGETID = "admin-rcDataDictionaryList";
@Autowired
private RcDataDictionaryListService service;
@RequestMapping(value = "/list")
public void list(@RequestParam(value = "pageCurrent", defaultValue = "1") int pageCurrent, @RequestParam(value = "pageSize", defaultValue = "20") int pageSize, @ModelAttribute RcDataDictionaryListQO qo, ModelMap modelMap) {
modelMap.put("page", service.listForPage(pageCurrent, pageSize, qo));
modelMap.put("pageCurrent", pageCurrent);
modelMap.put("pageSize", pageSize);
modelMap.put("bean", qo);
}
@RequestMapping(value = "/add")
public void add() {
}
@ResponseBody
@RequestMapping(value = "/save")
public String save(@ModelAttribute RcDataDictionaryListQO qo) {
if (service.save(qo) > 0) {
return success(TARGETID);
}
return error("添加失败");
}
@ResponseBody
@RequestMapping(value = "/delete")
public String delete(@RequestParam(value = "id") Long id) {
if (service.deleteById(id) > 0) {
return delete(TARGETID);
}
return error("删除失败");
}
@RequestMapping(value = "/edit")
public void edit(@RequestParam(value = "id") Long id, ModelMap modelMap) {
modelMap.put("bean", service.getById(id));
}
@ResponseBody
@RequestMapping(value = "/update")
public String update(@ModelAttribute RcDataDictionaryListQO qo) {
if (service.updateById(qo) > 0) {
return success(TARGETID);
}
return error("修改失败");
}
@RequestMapping(value = "/view")
public void view(@RequestParam(value = "id") Long id, ModelMap modelMap) {
modelMap.put("bean", service.getById(id));
}
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/controller/admin/RcDataDictionaryController.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/controller/admin/RcDataDictionaryController.java | package com.roncoo.jui.web.controller.admin;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import com.roncoo.jui.common.util.base.BaseController;
import com.roncoo.jui.web.bean.qo.RcDataDictionaryQO;
import com.roncoo.jui.web.service.RcDataDictionaryService;
/**
* 数据字典
*
* @author wujing
* @since 2017-11-11
*/
@Controller
@RequestMapping(value = "/admin/rcDataDictionary")
public class RcDataDictionaryController extends BaseController {
private final static String TARGETID = "admin-rcDataDictionary";
@Autowired
private RcDataDictionaryService service;
@RequestMapping(value = "/list")
public void list(@RequestParam(value = "pageCurrent", defaultValue = "1") int pageCurrent, @RequestParam(value = "pageSize", defaultValue = "20") int pageSize, @RequestParam(value = "orderField", required = false) String orderField, @RequestParam(value = "orderDirection", required = false) String orderDirection, @ModelAttribute RcDataDictionaryQO qo, ModelMap modelMap) {
modelMap.put("page", service.listForPage(pageCurrent, pageSize, orderField, orderDirection, qo));
modelMap.put("pageCurrent", pageCurrent);
modelMap.put("pageSize", pageSize);
modelMap.put("bean", qo);
}
@RequestMapping(value = "/add")
public void add() {
}
@ResponseBody
@RequestMapping(value = "/save")
public String save(@ModelAttribute RcDataDictionaryQO qo) {
if (service.save(qo) > 0) {
return success(TARGETID);
}
return error("添加失败");
}
@ResponseBody
@RequestMapping(value = "/delete")
public String delete(@RequestParam(value = "id") Long id) {
if (service.deleteById(id) > 0) {
return delete(TARGETID);
}
return error("删除失败");
}
@RequestMapping(value = "/edit")
public void edit(@RequestParam(value = "id") Long id, ModelMap modelMap) {
modelMap.put("bean", service.getById(id));
}
@ResponseBody
@RequestMapping(value = "/update")
public String update(@ModelAttribute RcDataDictionaryQO qo) {
if (service.updateById(qo) > 0) {
return success(TARGETID);
}
return error("修改失败");
}
@RequestMapping(value = "/view")
public void view(@RequestParam(value = "id") Long id, ModelMap modelMap) {
modelMap.put("bean", service.getById(id));
}
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/controller/admin/SysRoleController.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/controller/admin/SysRoleController.java | package com.roncoo.jui.web.controller.admin;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import com.roncoo.jui.common.enums.StatusIdEnum;
import com.roncoo.jui.common.util.base.BaseController;
import com.roncoo.jui.web.bean.qo.SysRoleQO;
import com.roncoo.jui.web.service.SysRoleService;
/**
* 角色信息
*
* @author wujing
* @since 2017-10-20
*/
@Controller
@RequestMapping(value = "/admin/sysRole")
public class SysRoleController extends BaseController {
private final static String TARGETID = "admin-sysRole";
@Autowired
private SysRoleService service;
@RequestMapping(value = "/list")
public void list(@RequestParam(value = "pageNum", defaultValue = "1") int pageCurrent, @RequestParam(value = "numPerPage", defaultValue = "20") int pageSize, @ModelAttribute SysRoleQO qo, ModelMap modelMap) {
modelMap.put("page", service.listForPage(pageCurrent, pageSize, qo));
modelMap.put("pageCurrent", pageCurrent);
modelMap.put("pageSize", pageSize);
modelMap.put("bean", qo);
modelMap.put("statusIdEnums", StatusIdEnum.values());
}
@RequestMapping(value = "/add")
public void add() {
}
@ResponseBody
@RequestMapping(value = "/save")
public String save(@ModelAttribute SysRoleQO qo) {
if (service.save(qo) > 0) {
return success(TARGETID);
}
return error("添加失败");
}
@ResponseBody
@RequestMapping(value = "/delete")
public String delete(@RequestParam(value = "id") Long id) {
if (service.deleteById(id) > 0) {
return delete(TARGETID);
}
return error("删除失败");
}
@RequestMapping(value = "/edit")
public void edit(@RequestParam(value = "id") Long id, ModelMap modelMap) {
modelMap.put("statusIdEnums", StatusIdEnum.values());
modelMap.put("bean", service.getById(id));
}
@ResponseBody
@RequestMapping(value = "/update")
public String update(@ModelAttribute SysRoleQO qo) {
if (service.updateById(qo) > 0) {
return success(TARGETID);
}
return error("修改失败");
}
@RequestMapping(value = "/view")
public void view(@RequestParam(value = "id") Long id, ModelMap modelMap) {
modelMap.put("bean", service.getById(id));
}
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/controller/admin/SysMenuRoleController.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/controller/admin/SysMenuRoleController.java | package com.roncoo.jui.web.controller.admin;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import com.roncoo.jui.common.util.base.BaseController;
import com.roncoo.jui.web.bean.vo.SysMenuRoleVO;
import com.roncoo.jui.web.service.SysMenuRoleService;
import com.roncoo.jui.web.service.SysMenuService;
/**
* 菜单角色关联表
*
* @author wujing
* @since 2017-10-20
*/
@Controller
@RequestMapping(value = "/admin/sysMenuRole")
public class SysMenuRoleController extends BaseController {
private final static String TARGETID = "admin-sysMenuRole";
@Autowired
private SysMenuRoleService service;
@Autowired
private SysMenuService sysMenuService;
/**
* 设置
*
* @param id
* @param modelMap
*/
@RequestMapping(value = "/set", method = RequestMethod.GET)
public void setGet(Long roleId, ModelMap modelMap) {
List<SysMenuRoleVO> list = service.listByRoleId(roleId);
modelMap.put("roleId", roleId);
modelMap.put("menuRoleList", list);
modelMap.put("ids", service.getIds(list));
modelMap.put("menuList", sysMenuService.checkMenucByRole(list));
}
/**
* 设置
*
* @param id
* @param modelMap
*/
@RequestMapping(value = "/setMenu", method = RequestMethod.POST)
@ResponseBody
public String setPost(Long roleId, String ids, ModelMap modelMap) {
System.out.println(ids);
service.save(roleId, ids);
return success(TARGETID);
}
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/controller/admin/SysUserController.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/controller/admin/SysUserController.java | package com.roncoo.jui.web.controller.admin;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import com.roncoo.jui.common.enums.UserSexEnum;
import com.roncoo.jui.common.enums.UserStatusEnum;
import com.roncoo.jui.common.util.base.BaseController;
import com.roncoo.jui.web.bean.qo.SysUserQO;
import com.roncoo.jui.web.service.SysUserService;
/**
* 后台用户信息
*
* @author wujing
* @since 2017-10-25
*/
@Controller
@RequestMapping(value = "/admin/sysUser")
public class SysUserController extends BaseController {
private final static String TARGETID = "admin-sysUser";
@Autowired
private SysUserService service;
@RequestMapping(value = "/list")
public void list(@RequestParam(value = "pageNum", defaultValue = "1") int pageCurrent, @RequestParam(value = "numPerPage", defaultValue = "20") int pageSize, @ModelAttribute SysUserQO qo, ModelMap modelMap){
modelMap.put("page", service.listForPage(pageCurrent, pageSize, qo));
modelMap.put("pageCurrent", pageCurrent);
modelMap.put("pageSize", pageSize);
modelMap.put("bean", qo);
modelMap.put("userSexEnums", UserSexEnum.values());
modelMap.put("userStatusEnums", UserStatusEnum.values());
}
@RequestMapping(value = "/add")
public void add(ModelMap modelMap){
modelMap.put("userSexEnums", UserSexEnum.values());
}
@ResponseBody
@RequestMapping(value = "/save")
public String save(@ModelAttribute SysUserQO qo){
if (service.save(qo) > 0) {
return success(TARGETID);
}
return error("添加失败");
}
@ResponseBody
@RequestMapping(value = "/delete")
public String delete(@RequestParam(value = "id") Long id){
if (service.deleteById(id) > 0) {
return delete(TARGETID);
}
return error("删除失败");
}
@RequestMapping(value = "/edit")
public void edit(@RequestParam(value = "id") Long id, ModelMap modelMap){
modelMap.put("bean", service.getById(id));
modelMap.put("userSexEnums", UserSexEnum.values());
modelMap.put("userStatusEnums", UserStatusEnum.values());
}
@ResponseBody
@RequestMapping(value = "/update")
public String update(@ModelAttribute SysUserQO qo){
if (service.updateById(qo) > 0) {
return success(TARGETID);
}
return error("修改失败");
}
@RequestMapping(value = "/view")
public void view(@RequestParam(value = "id") Long id, ModelMap modelMap){
modelMap.put("bean", service.getById(id));
modelMap.put("userSexEnums", UserSexEnum.values());
modelMap.put("userStatusEnums", UserStatusEnum.values());
}
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/controller/admin/SysMenuController.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/controller/admin/SysMenuController.java | package com.roncoo.jui.web.controller.admin;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import com.roncoo.jui.common.enums.StatusIdEnum;
import com.roncoo.jui.common.util.base.BaseController;
import com.roncoo.jui.web.bean.qo.SysMenuQO;
import com.roncoo.jui.web.service.SysMenuService;
/**
* 菜单信息
*
* @author wujing
* @since 2017-10-25
*/
@Controller
@RequestMapping(value = "/admin/sysMenu")
public class SysMenuController extends BaseController {
private final static String TARGETID = "admin-sysMenu";
@Autowired
private SysMenuService service;
@RequestMapping(value = "/list")
public void list(@RequestParam(value = "pageNum", defaultValue = "1") int pageCurrent, @RequestParam(value = "numPerPage", defaultValue = "20") int pageSize, @ModelAttribute SysMenuQO qo, ModelMap modelMap) {
modelMap.put("page", service.listForPage(pageCurrent, pageSize, qo));
modelMap.put("pageCurrent", pageCurrent);
modelMap.put("pageSize", pageSize);
modelMap.put("bean", qo);
modelMap.put("statusIdEnums", StatusIdEnum.values());
}
@RequestMapping(value = "/add")
public void add(@ModelAttribute SysMenuQO qo, ModelMap modelMap) {
modelMap.put("bean", qo);
}
@ResponseBody
@RequestMapping(value = "/save")
public String save(@ModelAttribute SysMenuQO qo) {
if (service.save(qo) > 0) {
return success(TARGETID);
}
return error("添加失败");
}
@ResponseBody
@RequestMapping(value = "/delete")
public String delete(@RequestParam(value = "id") Long id) {
int result = service.deleteById(id);
if (result > 0) {
return delete(TARGETID);
} else if (result == -1) {
return error("删除失败,请先删除子菜单");
}
return error("删除失败");
}
@RequestMapping(value = "/edit")
public void edit(@RequestParam(value = "id") Long id, ModelMap modelMap) {
modelMap.put("bean", service.getById(id));
modelMap.put("statusIdEnums", StatusIdEnum.values());
}
@ResponseBody
@RequestMapping(value = "/update")
public String update(@ModelAttribute SysMenuQO qo) {
if (service.updateById(qo) > 0) {
return success(TARGETID);
}
return error("修改失败");
}
@RequestMapping(value = "/view")
public void view(@RequestParam(value = "id") Long id, ModelMap modelMap) {
modelMap.put("bean", service.getById(id));
modelMap.put("statusIdEnums", StatusIdEnum.values());
}
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/controller/admin/WebSiteUrlController.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/controller/admin/WebSiteUrlController.java | package com.roncoo.jui.web.controller.admin;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import com.roncoo.jui.common.util.base.BaseController;
import com.roncoo.jui.web.bean.qo.WebSiteUrlQO;
import com.roncoo.jui.web.service.WebSiteUrlService;
/**
* 网址汇总地址
*
* @author wujing
* @since 2017-11-22
*/
@Controller
@RequestMapping(value = "/admin/webSiteUrl")
public class WebSiteUrlController extends BaseController {
private final static String TARGETID = "admin-webSiteUrl";
@Autowired
private WebSiteUrlService service;
@RequestMapping(value = "/list")
public void list(@RequestParam(value = "pageCurrent", defaultValue = "1") int pageCurrent, @RequestParam(value = "pageSize", defaultValue = "20") int pageSize, @ModelAttribute WebSiteUrlQO qo, ModelMap modelMap) {
modelMap.put("page", service.listForPage(pageCurrent, pageSize, qo));
modelMap.put("pageCurrent", pageCurrent);
modelMap.put("pageSize", pageSize);
modelMap.put("bean", qo);
}
@RequestMapping(value = "/add")
public void add(@ModelAttribute WebSiteUrlQO qo, ModelMap modelMap) {
modelMap.put("bean", qo);
}
@ResponseBody
@RequestMapping(value = "/save")
public String save(@ModelAttribute WebSiteUrlQO qo) {
if (service.save(qo) > 0) {
return success(TARGETID);
}
return error("添加失败");
}
@ResponseBody
@RequestMapping(value = "/delete")
public String delete(@RequestParam(value = "id") Long id) {
if (service.deleteById(id) > 0) {
return delete(TARGETID);
}
return error("删除失败");
}
@RequestMapping(value = "/edit")
public void edit(@RequestParam(value = "id") Long id, ModelMap modelMap) {
modelMap.put("bean", service.getById(id));
}
@ResponseBody
@RequestMapping(value = "/update")
public String update(@ModelAttribute WebSiteUrlQO qo) {
if (service.updateById(qo) > 0) {
return success(TARGETID);
}
return error("修改失败");
}
@RequestMapping(value = "/view")
public void view(@RequestParam(value = "id") Long id, ModelMap modelMap) {
modelMap.put("bean", service.getById(id));
}
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/custom/ShiroConfiguration.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/custom/ShiroConfiguration.java | package com.roncoo.jui.web.custom;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.shiro.SecurityUtils;
import org.apache.shiro.authc.AuthenticationException;
import org.apache.shiro.authc.AuthenticationInfo;
import org.apache.shiro.authc.AuthenticationToken;
import org.apache.shiro.authc.SimpleAuthenticationInfo;
import org.apache.shiro.authc.UsernamePasswordToken;
import org.apache.shiro.authz.AuthorizationInfo;
import org.apache.shiro.authz.SimpleAuthorizationInfo;
import org.apache.shiro.subject.PrincipalCollection;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.util.StringUtils;
import com.roncoo.jui.common.dao.SysUserDao;
import com.roncoo.jui.common.util.ConfUtil;
import com.roncoo.jui.common.util.Constants;
import com.roncoo.jui.web.bean.vo.SysMenuVO;
import com.roncoo.spring.boot.autoconfigure.shiro.ShiroRealm;
import com.xiaoleilu.hutool.util.CollectionUtil;
/**
* shiro配置类 Created by cdyoue on 2016/10/21.
*/
@Configuration
public class ShiroConfiguration {
/**
* ShiroRealm
*/
@Bean(name = "shiroRealm")
public ShiroRealm shiroRealm() {
ShiroCustomRealm realm = new ShiroCustomRealm();
return realm;
}
}
class ShiroCustomRealm extends ShiroRealm {
@Autowired
private SysUserDao sysUserDao;
/**
* 授权认证
*/
@Override
protected AuthorizationInfo doGetAuthorizationInfo(PrincipalCollection arg0) {
SimpleAuthorizationInfo simpleAuthorizationInfo = new SimpleAuthorizationInfo();
@SuppressWarnings("unchecked")
List<SysMenuVO> menuVOList = (List<SysMenuVO>) SecurityUtils.getSubject().getSession().getAttribute(Constants.Session.MENU);
Set<String> menuSet = new HashSet<>();
// 处理菜单权限
listMenu(menuSet, menuVOList);
simpleAuthorizationInfo.setStringPermissions(menuSet);
return simpleAuthorizationInfo;
}
/**
* 登录认证
*/
@Override
protected AuthenticationInfo doGetAuthenticationInfo(AuthenticationToken arg0) throws AuthenticationException {
UsernamePasswordToken token = (UsernamePasswordToken) arg0;
// String roncooNo = token.getUsername();
// String password = token.getPassword().toString();
SecurityUtils.getSubject().getSession().setAttribute(Constants.Session.USER, sysUserDao.getByUserPhone(ConfUtil.USER));
return new SimpleAuthenticationInfo(token, token.getPassword(), getName());
}
/**
* @param list
* @return
*/
private static void listMenu(Set<String> menuSet, List<SysMenuVO> menuVOList) {
if (CollectionUtil.isNotEmpty(menuVOList)) {
for (SysMenuVO sm : menuVOList) {
if (StringUtils.hasText(sm.getMenuUrl())) {
menuSet.add(sm.getMenuUrl());
}
listMenu(menuSet, sm.getList());
}
}
}
} | java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/custom/WebMvcConfigurer.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/custom/WebMvcConfigurer.java | package com.roncoo.jui.web.custom;
import java.io.PrintWriter;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.shiro.SecurityUtils;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.util.StringUtils;
import org.springframework.web.servlet.HandlerInterceptor;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter;
import com.roncoo.jui.common.util.Constants;
import com.roncoo.jui.common.util.base.Base;
import com.roncoo.jui.common.util.jui.Jui;
import com.roncoo.jui.web.bean.vo.SysMenuVO;
import com.xiaoleilu.hutool.json.JSONUtil;
import com.xiaoleilu.hutool.util.CollectionUtil;
/**
* 拦截器
*/
@Configuration
public class WebMvcConfigurer extends WebMvcConfigurerAdapter {
@Bean
ShiroInterceptor shiroInterceptor() {
return new ShiroInterceptor();
}
@Override
public void addInterceptors(InterceptorRegistry registry) {
registry.addInterceptor(shiroInterceptor()).addPathPatterns("/admin/**");
super.addInterceptors(registry);
}
}
/**
* 权限拦截器
*/
class ShiroInterceptor extends Base implements HandlerInterceptor {
@Override
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception {
String uri = request.getServletPath();
if (!checkUri(uri)) {
logger.error("没此权限,当前访问路径为:{}", uri);
response.setCharacterEncoding("utf-8");
PrintWriter out = response.getWriter();
Jui bj = new Jui();
bj.setStatusCode(300);
bj.setMessage("测试账号,没此权限!");
out.print(JSONUtil.toJsonStr(bj));
out.flush();
out.close();
return false;
}
return true;
}
private static Boolean checkUri(String uri) {
@SuppressWarnings("unchecked")
List<SysMenuVO> menuVOList = (List<SysMenuVO>) SecurityUtils.getSubject().getSession().getAttribute(Constants.Session.MENU);
Set<String> menuSet = new HashSet<>();
listMenu(menuSet, menuVOList);
if (StringUtils.hasText(uri) && uri.endsWith("/")) {
uri = uri.substring(0, uri.length() - 1);
}
for (String s : menuSet) {
if (s.equalsIgnoreCase(uri)) {
return true;
}
}
return false;
}
/**
* @param list
* @return
*/
private static void listMenu(Set<String> menuSet, List<SysMenuVO> menuVOList) {
if (CollectionUtil.isNotEmpty(menuVOList)) {
for (SysMenuVO sm : menuVOList) {
if (StringUtils.hasText(sm.getMenuUrl())) {
menuSet.add(sm.getMenuUrl());
}
listMenu(menuSet, sm.getList());
}
}
}
@Override
public void postHandle(HttpServletRequest request, HttpServletResponse response, Object handler, ModelAndView modelAndView) throws Exception {
}
@Override
public void afterCompletion(HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex) throws Exception {
}
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/custom/RoncooExceptionHandler.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/custom/RoncooExceptionHandler.java | package com.roncoo.jui.web.custom;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestControllerAdvice;
import com.roncoo.jui.common.util.base.BaseController;
import com.roncoo.jui.common.util.base.RoncooException;
/**
* 错误处理
*
* @author wujing
*/
@RestControllerAdvice
public class RoncooExceptionHandler extends BaseController {
@ExceptionHandler({ RoncooException.class })
@ResponseStatus(HttpStatus.OK)
public String processBizException(RoncooException e) {
logger.error(e.toString(), e);
return error(e.getExpMsg());
}
@ExceptionHandler({ Exception.class })
@ResponseStatus(HttpStatus.OK)
public String processException(Exception e) {
logger.error(e.getMessage(), e);
return error("系统错误");
}
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/custom/ShiroFreeMarkerWebConfiguration.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/custom/ShiroFreeMarkerWebConfiguration.java | package com.roncoo.jui.web.custom;
import javax.annotation.PostConstruct;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.freemarker.FreeMarkerAutoConfiguration.FreeMarkerWebConfiguration;
import org.springframework.context.annotation.Configuration;
import com.roncoo.shiro.freemarker.ShiroTags;
@Configuration
public class ShiroFreeMarkerWebConfiguration extends FreeMarkerWebConfiguration {
@Autowired
private freemarker.template.Configuration configuration;
@PostConstruct
public void setSharedVariable() {
try {
configuration.setSharedVariable("shiro", new ShiroTags());
} catch (Exception e) {
e.printStackTrace();
}
}
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/custom/WebXssFilter.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/custom/WebXssFilter.java | package com.roncoo.jui.web.custom;
import java.io.IOException;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.annotation.WebFilter;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletRequestWrapper;
import org.springframework.web.util.HtmlUtils;
/**
* 防止XSS攻击的过滤器
*
* @author wujing
*/
@WebFilter(filterName = "webXssFilter", urlPatterns = "/admin/*")
public class WebXssFilter implements Filter {
@Override
public void init(FilterConfig filterConfig) throws ServletException {
}
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
XssHttpServletRequestWrapper xssRequest = new XssHttpServletRequestWrapper((HttpServletRequest) request);
chain.doFilter(xssRequest, response);
}
@Override
public void destroy() {
}
}
/**
*
* @author wujing
*/
class XssHttpServletRequestWrapper extends HttpServletRequestWrapper {
/**
* @param request
*/
public XssHttpServletRequestWrapper(HttpServletRequest request) {
super(request);
}
@Override
public String[] getParameterValues(String name) {
String[] values = super.getParameterValues(name);
if (values != null) {
int length = values.length;
String[] escapseValues = new String[length];
for (int i = 0; i < length; i++) {
// 防xss攻击和过滤前后空格
escapseValues[i] = HtmlUtils.htmlEscape(values[i]).trim();
}
return escapseValues;
}
return super.getParameterValues(name);
}
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/service/WebSiteService.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/service/WebSiteService.java | package com.roncoo.jui.web.service;
import java.io.File;
import java.io.IOException;
import java.util.List;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
import org.springframework.web.multipart.MultipartFile;
import com.roncoo.jui.common.dao.WebSiteDao;
import com.roncoo.jui.common.dao.WebSiteUrlDao;
import com.roncoo.jui.common.entity.WebSite;
import com.roncoo.jui.common.entity.WebSiteExample;
import com.roncoo.jui.common.entity.WebSiteExample.Criteria;
import com.roncoo.jui.common.entity.WebSiteUrl;
import com.roncoo.jui.common.entity.WebSiteUrlExample;
import com.roncoo.jui.common.util.ArrayListUtil;
import com.roncoo.jui.common.util.ConfUtil;
import com.roncoo.jui.common.util.PageUtil;
import com.roncoo.jui.common.util.SqlUtil;
import com.roncoo.jui.common.util.base.Base;
import com.roncoo.jui.common.util.jui.Page;
import com.roncoo.jui.web.bean.qo.WebSiteQO;
import com.roncoo.jui.web.bean.vo.WebSiteUrlVO;
import com.roncoo.jui.web.bean.vo.WebSiteVO;
import com.xiaoleilu.hutool.crypto.SecureUtil;
import com.xiaoleilu.hutool.util.ObjectUtil;
/**
* 网址汇总
*
* @author wujing
* @since 2017-11-22
*/
@Component
public class WebSiteService extends Base {
@Autowired
private WebSiteDao dao;
@Autowired
private WebSiteUrlDao webSiteUrlDao;
public Page<WebSiteVO> listForPage(int pageCurrent, int pageSize, WebSiteQO qo) {
WebSiteExample example = new WebSiteExample();
Criteria c = example.createCriteria();
if (StringUtils.hasText(qo.getSiteName())) {
c.andSiteNameLike(SqlUtil.like(qo.getSiteName()));
}
example.setOrderByClause(" status_id desc, sort desc, id desc ");
Page<WebSite> page = dao.listForPage(pageCurrent, pageSize, example);
return PageUtil.transform(page, WebSiteVO.class);
}
public int save(WebSiteQO qo, MultipartFile file) {
if (!file.isEmpty()) {
// 上传
String fileName = file.getOriginalFilename();// 文件名
String filePath = ConfUtil.FILEPATH;
filePath = filePath + SecureUtil.simpleUUID() + fileName.substring(fileName.lastIndexOf(".")); // 注意,linux下文件名为中文的情况
logger.warn("当前上传的文件名为:{},上传的目录位置:{}", fileName, filePath);
File dest = new File(filePath);
if (!dest.getParentFile().exists()) {
// 判断文件父目录是否存在
dest.getParentFile().mkdirs();
}
try {
// 保存文件
file.transferTo(dest);
} catch (IllegalStateException | IOException e) {
e.printStackTrace();
}
qo.setSiteLogo(dest.getName());
}
WebSite record = new WebSite();
BeanUtils.copyProperties(qo, record);
return dao.save(record);
}
public int deleteById(Long id) {
return dao.deleteById(id);
}
public WebSiteVO getById(Long id) {
WebSiteVO vo = new WebSiteVO();
WebSite record = dao.getById(id);
BeanUtils.copyProperties(record, vo);
return vo;
}
public int updateById(WebSiteQO qo, MultipartFile file) {
if (!file.isEmpty()) {
// 上传
String fileName = file.getOriginalFilename();// 文件名
String filePath = ConfUtil.FILEPATH;
filePath = filePath + SecureUtil.simpleUUID() + fileName.substring(fileName.lastIndexOf(".")); // 注意,linux下文件名为中文的情况
logger.warn("当前上传的文件名为:{},上传的目录位置:{}", fileName, filePath);
File dest = new File(filePath);
if (!dest.getParentFile().exists()) {
// 判断文件父目录是否存在
dest.getParentFile().mkdirs();
}
try {
// 保存文件
file.transferTo(dest);
} catch (IllegalStateException | IOException e) {
e.printStackTrace();
}
qo.setSiteLogo(dest.getName());
}
WebSite record = new WebSite();
BeanUtils.copyProperties(qo, record);
return dao.updateById(record);
}
public WebSiteVO main(Long id) {
WebSiteVO vo = new WebSiteVO();
WebSite webSite = dao.getById(id);
if (ObjectUtil.isNotNull(webSite)) {
WebSiteUrlExample example = new WebSiteUrlExample();
example.createCriteria().andSiteIdEqualTo(webSite.getId());
List<WebSiteUrl> list = webSiteUrlDao.listByExample(example);
BeanUtils.copyProperties(webSite, vo);
vo.setList(ArrayListUtil.copy(list, WebSiteUrlVO.class));
}
return vo;
}
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/service/SysMenuRoleService.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/service/SysMenuRoleService.java | package com.roncoo.jui.web.service;
import java.util.List;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
import com.roncoo.jui.common.dao.SysMenuRoleDao;
import com.roncoo.jui.common.entity.SysMenuRole;
import com.roncoo.jui.common.entity.SysMenuRoleExample;
import com.roncoo.jui.common.entity.SysMenuRoleExample.Criteria;
import com.roncoo.jui.common.util.ArrayListUtil;
import com.roncoo.jui.common.util.PageUtil;
import com.roncoo.jui.common.util.jui.Page;
import com.roncoo.jui.web.bean.qo.SysMenuRoleQO;
import com.roncoo.jui.web.bean.vo.SysMenuRoleVO;
import com.xiaoleilu.hutool.util.CollectionUtil;
/**
* 菜单角色关联表
*
* @author wujing
* @since 2017-10-20
*/
@Component
public class SysMenuRoleService {
@Autowired
private SysMenuRoleDao dao;
public Page<SysMenuRoleVO> listForPage(int pageCurrent, int pageSize, SysMenuRoleQO qo) {
SysMenuRoleExample example = new SysMenuRoleExample();
Criteria c = example.createCriteria();
example.setOrderByClause(" sort desc, id desc ");
Page<SysMenuRole> page = dao.listForPage(pageCurrent, pageSize, example);
return PageUtil.transform(page, SysMenuRoleVO.class);
}
public int save(SysMenuRoleQO qo) {
SysMenuRole record = new SysMenuRole();
BeanUtils.copyProperties(qo, record);
return dao.save(record);
}
public int deleteById(Long id) {
return dao.deleteById(id);
}
public SysMenuRoleVO getById(Long id) {
SysMenuRoleVO vo = new SysMenuRoleVO();
SysMenuRole record = dao.getById(id);
BeanUtils.copyProperties(record, vo);
return vo;
}
public int updateById(SysMenuRoleQO qo) {
SysMenuRole record = new SysMenuRole();
BeanUtils.copyProperties(qo, record);
return dao.updateById(record);
}
public List<SysMenuRoleVO> listByRoleId(Long roleId) {
List<SysMenuRole> list = dao.listByRoleId(roleId);
return ArrayListUtil.copy(list, SysMenuRoleVO.class);
}
public String getIds(List<SysMenuRoleVO> list) {
StringBuilder sb = new StringBuilder();
if (CollectionUtil.isNotEmpty(list)) {
for (SysMenuRoleVO p : list) {
sb.append(p.getId()).append(",");
}
sb = sb.delete(sb.length() - 1, sb.length());
}
return sb.toString();
}
public int save(Long roleId, String ids) {
if (StringUtils.hasText(ids)) {
// 先删除旧的,再添加新的
dao.deleteByRoleId(roleId);
String[] idStrings = ids.split(",");
for (String id : idStrings) {
SysMenuRole entity = new SysMenuRole();
entity.setMenuId(Long.valueOf(id));
entity.setRoleId(roleId);
dao.save(entity);
}
}
return 1;
}
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/service/SysUserService.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/service/SysUserService.java | package com.roncoo.jui.web.service;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
import com.roncoo.jui.common.dao.SysUserDao;
import com.roncoo.jui.common.entity.SysUser;
import com.roncoo.jui.common.entity.SysUserExample;
import com.roncoo.jui.common.entity.SysUserExample.Criteria;
import com.roncoo.jui.common.enums.UserStatusEnum;
import com.roncoo.jui.common.util.PageUtil;
import com.roncoo.jui.common.util.SqlUtil;
import com.roncoo.jui.common.util.jui.Page;
import com.roncoo.jui.web.bean.qo.SysUserQO;
import com.roncoo.jui.web.bean.vo.SysUserVO;
import com.xiaoleilu.hutool.crypto.SecureUtil;
/**
* 后台用户信息
*
* @author wujing
* @since 2017-10-25
*/
@Component
public class SysUserService {
@Autowired
private SysUserDao dao;
public Page<SysUserVO> listForPage(int pageCurrent, int pageSize, SysUserQO qo) {
SysUserExample example = new SysUserExample();
Criteria c = example.createCriteria();
if (StringUtils.hasText(qo.getUserPhone())) {
c.andUserPhoneLike(SqlUtil.like(qo.getUserPhone()));
}
example.setOrderByClause(" id desc ");
Page<SysUser> page = dao.listForPage(pageCurrent, pageSize, example);
return PageUtil.transform(page, SysUserVO.class);
}
public int save(SysUserQO qo) {
SysUser record = new SysUser();
BeanUtils.copyProperties(qo, record);
record.setUserStatus(UserStatusEnum.NORMAL.getCode());
record.setSalt(SecureUtil.simpleUUID());
record.setPwd(SecureUtil.md5(record.getSalt() + qo.getPwd()));
return dao.save(record);
}
public int deleteById(Long id) {
return dao.deleteById(id);
}
public SysUserVO getById(Long id) {
SysUserVO vo = new SysUserVO();
SysUser record = dao.getById(id);
BeanUtils.copyProperties(record, vo);
return vo;
}
public int updateById(SysUserQO qo) {
SysUser record = new SysUser();
BeanUtils.copyProperties(qo, record);
return dao.updateById(record);
}
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/service/RcDataDictionaryListService.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/service/RcDataDictionaryListService.java | package com.roncoo.jui.web.service;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import com.roncoo.jui.common.dao.RcDataDictionaryListDao;
import com.roncoo.jui.common.entity.RcDataDictionaryList;
import com.roncoo.jui.common.entity.RcDataDictionaryListExample;
import com.roncoo.jui.common.entity.RcDataDictionaryListExample.Criteria;
import com.roncoo.jui.common.util.PageUtil;
import com.roncoo.jui.common.util.jui.Page;
import com.roncoo.jui.web.bean.qo.RcDataDictionaryListQO;
import com.roncoo.jui.web.bean.vo.RcDataDictionaryListVO;
/**
* 数据字典明细表
*
* @author wujing
* @since 2017-11-11
*/
@Component
public class RcDataDictionaryListService {
@Autowired
private RcDataDictionaryListDao dao;
public Page<RcDataDictionaryListVO> listForPage(int pageCurrent, int pageSize, RcDataDictionaryListQO qo) {
RcDataDictionaryListExample example = new RcDataDictionaryListExample();
Criteria c = example.createCriteria();
example.setOrderByClause(" id desc ");
Page<RcDataDictionaryList> page = dao.listForPage(pageCurrent, pageSize, example);
return PageUtil.transform(page, RcDataDictionaryListVO.class);
}
public int save(RcDataDictionaryListQO qo) {
RcDataDictionaryList record = new RcDataDictionaryList();
BeanUtils.copyProperties(qo, record);
return dao.save(record);
}
public int deleteById(Long id) {
return dao.deleteById(id);
}
public RcDataDictionaryListVO getById(Long id) {
RcDataDictionaryListVO vo = new RcDataDictionaryListVO();
RcDataDictionaryList record = dao.getById(id);
BeanUtils.copyProperties(record, vo);
return vo;
}
public int updateById(RcDataDictionaryListQO qo) {
RcDataDictionaryList record = new RcDataDictionaryList();
BeanUtils.copyProperties(qo, record);
return dao.updateById(record);
}
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/service/SysRoleService.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/service/SysRoleService.java | package com.roncoo.jui.web.service;
import java.util.List;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
import com.roncoo.jui.common.dao.SysRoleDao;
import com.roncoo.jui.common.entity.SysRole;
import com.roncoo.jui.common.entity.SysRoleExample;
import com.roncoo.jui.common.entity.SysRoleExample.Criteria;
import com.roncoo.jui.common.util.PageUtil;
import com.roncoo.jui.common.util.jui.Page;
import com.roncoo.jui.web.bean.qo.SysRoleQO;
import com.roncoo.jui.web.bean.qo.SysRoleUserQO;
import com.roncoo.jui.web.bean.vo.SysRoleUserVO;
import com.roncoo.jui.web.bean.vo.SysRoleVO;
/**
* 角色信息
*
* @author wujing
* @since 2017-10-20
*/
@Component
public class SysRoleService {
@Autowired
private SysRoleDao dao;
public Page<SysRoleVO> listForPage(int pageCurrent, int pageSize, SysRoleQO qo) {
SysRoleExample example = new SysRoleExample();
Criteria c = example.createCriteria();
if (StringUtils.hasText(qo.getRoleName())) {
c.andRoleNameEqualTo(qo.getRoleName());
}
example.setOrderByClause("status_id desc, sort desc, id desc ");
Page<SysRole> page = dao.listForPage(pageCurrent, pageSize, example);
return PageUtil.transform(page, SysRoleVO.class);
}
public int save(SysRoleQO qo) {
SysRole record = new SysRole();
BeanUtils.copyProperties(qo, record);
return dao.save(record);
}
public int deleteById(Long id) {
return dao.deleteById(id);
}
public SysRoleVO getById(Long id) {
SysRoleVO vo = new SysRoleVO();
SysRole record = dao.getById(id);
BeanUtils.copyProperties(record, vo);
return vo;
}
public int updateById(SysRoleQO qo) {
SysRole record = new SysRole();
BeanUtils.copyProperties(qo, record);
return dao.updateById(record);
}
public Page<SysRoleVO> checkUserByRole(int pageCurrent, int pageSize, SysRoleUserQO qo, List<SysRoleUserVO> list) {
SysRoleExample example = new SysRoleExample();
Criteria c = example.createCriteria();
example.setOrderByClause(" id desc ");
Page<SysRoleVO> page = PageUtil.transform(dao.listForPage(pageCurrent, pageSize, example), SysRoleVO.class);
for (SysRoleVO roleVo : page.getList()) {
Integer isShow = 0;
for (SysRoleUserVO roleUserVo : list) {
if (roleVo.getId().equals(roleUserVo.getRoleId())) {
isShow = 1;
break;
}
}
roleVo.setIsShow(isShow);
}
return page;
}
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/service/RcDataDictionaryService.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/service/RcDataDictionaryService.java | package com.roncoo.jui.web.service;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
import com.roncoo.jui.common.dao.RcDataDictionaryDao;
import com.roncoo.jui.common.entity.RcDataDictionary;
import com.roncoo.jui.common.entity.RcDataDictionaryExample;
import com.roncoo.jui.common.entity.RcDataDictionaryExample.Criteria;
import com.roncoo.jui.common.util.PageUtil;
import com.roncoo.jui.common.util.jui.Page;
import com.roncoo.jui.web.bean.qo.RcDataDictionaryQO;
import com.roncoo.jui.web.bean.vo.RcDataDictionaryVO;
/**
* 数据字典
*
* @author wujing
* @since 2017-11-11
*/
@Component
public class RcDataDictionaryService {
@Autowired
private RcDataDictionaryDao dao;
public Page<RcDataDictionaryVO> listForPage(int pageCurrent, int pageSize, String orderField, String orderDirection, RcDataDictionaryQO qo) {
RcDataDictionaryExample example = new RcDataDictionaryExample();
Criteria c = example.createCriteria();
// 字段排序
StringBuilder orderByClause = new StringBuilder();
if (StringUtils.hasText(orderField)) {
orderByClause.append(orderField).append(" ").append(orderDirection).append(", ");
}
example.setOrderByClause(orderByClause.append(" id desc ").toString());
Page<RcDataDictionary> page = dao.listForPage(pageCurrent, pageSize, example);
page.setOrderField(orderField);
page.setOrderDirection(orderDirection);
return PageUtil.transform(page, RcDataDictionaryVO.class);
}
public int save(RcDataDictionaryQO qo) {
RcDataDictionary record = new RcDataDictionary();
BeanUtils.copyProperties(qo, record);
return dao.save(record);
}
public int deleteById(Long id) {
return dao.deleteById(id);
}
public RcDataDictionaryVO getById(Long id) {
RcDataDictionaryVO vo = new RcDataDictionaryVO();
RcDataDictionary record = dao.getById(id);
BeanUtils.copyProperties(record, vo);
return vo;
}
public int updateById(RcDataDictionaryQO qo) {
RcDataDictionary record = new RcDataDictionary();
BeanUtils.copyProperties(qo, record);
return dao.updateById(record);
}
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/service/SysMenuService.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/service/SysMenuService.java | package com.roncoo.jui.web.service;
import java.util.ArrayList;
import java.util.List;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
import com.roncoo.jui.common.dao.SysMenuDao;
import com.roncoo.jui.common.entity.SysMenu;
import com.roncoo.jui.common.entity.SysMenuExample;
import com.roncoo.jui.common.entity.SysMenuExample.Criteria;
import com.roncoo.jui.common.util.PageUtil;
import com.roncoo.jui.common.util.jui.Page;
import com.roncoo.jui.web.bean.qo.SysMenuQO;
import com.roncoo.jui.web.bean.vo.SysMenuRoleVO;
import com.roncoo.jui.web.bean.vo.SysMenuVO;
import com.xiaoleilu.hutool.util.CollectionUtil;
/**
* 菜单信息
*
* @author wujing
* @since 2017-10-25
*/
@Component
public class SysMenuService {
@Autowired
private SysMenuDao dao;
public Page<SysMenuVO> listForPage(int pageCurrent, int pageSize, SysMenuQO qo) {
SysMenuExample example = new SysMenuExample();
Criteria c = example.createCriteria();
if (StringUtils.hasText(qo.getMenuName())) {
c.andMenuNameEqualTo(qo.getMenuName());
} else {
c.andParentIdEqualTo(0L);
}
example.setOrderByClause(" sort desc, id desc ");
Page<SysMenu> page = dao.listForPage(pageCurrent, pageSize, example);
Page<SysMenuVO> p = PageUtil.transform(page, SysMenuVO.class);
if (!StringUtils.hasText(qo.getMenuName())) {
if (CollectionUtil.isNotEmpty(p.getList())) {
for (SysMenuVO sm : p.getList()) {
sm.setList(recursionList(sm.getId()));
}
}
}
return p;
}
/**
* 递归展示菜单
*
* @param parentId
* @return
*/
private List<SysMenuVO> recursionList(Long parentId) {
List<SysMenuVO> lists = new ArrayList<>();
List<SysMenu> list = dao.listByParentId(parentId);
if (CollectionUtil.isNotEmpty(list)) {
for (SysMenu m : list) {
SysMenuVO vo = new SysMenuVO();
BeanUtils.copyProperties(m, vo);
vo.setList(recursionList(m.getId()));
lists.add(vo);
}
}
return lists;
}
public int save(SysMenuQO qo) {
SysMenu record = new SysMenu();
BeanUtils.copyProperties(qo, record);
return dao.save(record);
}
public int deleteById(Long id) {
List<SysMenu> list = dao.listByParentId(id);
if (CollectionUtil.isNotEmpty(list)) {
return -1;
}
return dao.deleteById(id);
}
public SysMenuVO getById(Long id) {
SysMenuVO vo = new SysMenuVO();
SysMenu record = dao.getById(id);
BeanUtils.copyProperties(record, vo);
return vo;
}
public int updateById(SysMenuQO qo) {
SysMenu record = new SysMenu();
BeanUtils.copyProperties(qo, record);
return dao.updateById(record);
}
public List<SysMenuVO> listMenucByRole(List<SysMenuRoleVO> sysMenuRoleVOList) {
List<SysMenuVO> list = recursionList(0L);
List<SysMenuVO> sysMenuVOList = new ArrayList<>();
listMenu(sysMenuVOList, sysMenuRoleVOList, list);
return sysMenuVOList;
}
private List<SysMenuVO> listMenu(List<SysMenuVO> sysMenuVOList, List<SysMenuRoleVO> sysMenuRoleVOList, List<SysMenuVO> list) {
for (SysMenuVO mv : list) {
SysMenuVO v = null;
for (SysMenuRoleVO vo : sysMenuRoleVOList) {
if (mv.getId().equals(vo.getMenuId())) {
v = new SysMenuVO();
BeanUtils.copyProperties(mv, v);
break;
}
}
if (!StringUtils.isEmpty(v)) {
sysMenuVOList.add(v);
List<SysMenuVO> l = new ArrayList<>();
if (v != null) {
v.setList(l);
}
listMenu(l, sysMenuRoleVOList, mv.getList());
}
}
return sysMenuVOList;
}
public List<SysMenuVO> checkMenucByRole(List<SysMenuRoleVO> sysMenuRoleVOList) {
List<SysMenuVO> sysMenuVOList = recursionList(0L);
checkMenu(sysMenuVOList, sysMenuRoleVOList);
return sysMenuVOList;
}
private List<SysMenuVO> checkMenu(List<SysMenuVO> sysMenuVOList, List<SysMenuRoleVO> sysMenuRoleVOList) {
for (SysMenuVO mv : sysMenuVOList) {
Integer isShow = 0;
for (SysMenuRoleVO vo : sysMenuRoleVOList) {
if (mv.getId().equals(vo.getMenuId())) {
isShow = 1;
break;
}
}
mv.setIsShow(isShow);
checkMenu(mv.getList(), sysMenuRoleVOList);
}
return sysMenuVOList;
}
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/service/WebSiteUrlService.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/service/WebSiteUrlService.java | package com.roncoo.jui.web.service;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
import com.roncoo.jui.common.dao.WebSiteUrlDao;
import com.roncoo.jui.common.entity.WebSiteUrl;
import com.roncoo.jui.common.entity.WebSiteUrlExample;
import com.roncoo.jui.common.entity.WebSiteUrlExample.Criteria;
import com.roncoo.jui.common.util.PageUtil;
import com.roncoo.jui.common.util.SqlUtil;
import com.roncoo.jui.common.util.jui.Page;
import com.roncoo.jui.web.bean.qo.WebSiteUrlQO;
import com.roncoo.jui.web.bean.vo.WebSiteUrlVO;
/**
* 网址汇总地址
*
* @author wujing
* @since 2017-11-22
*/
@Component
public class WebSiteUrlService {
@Autowired
private WebSiteUrlDao dao;
public Page<WebSiteUrlVO> listForPage(int pageCurrent, int pageSize, WebSiteUrlQO qo) {
WebSiteUrlExample example = new WebSiteUrlExample();
Criteria c = example.createCriteria();
c.andSiteIdEqualTo(qo.getSiteId());
if (StringUtils.hasText(qo.getUrlName())) {
c.andUrlNameLike(SqlUtil.like(qo.getUrlName()));
}
example.setOrderByClause(" status_id desc, sort desc, id desc ");
Page<WebSiteUrl> page = dao.listForPage(pageCurrent, pageSize, example);
return PageUtil.transform(page, WebSiteUrlVO.class);
}
public int save(WebSiteUrlQO qo) {
WebSiteUrl record = new WebSiteUrl();
BeanUtils.copyProperties(qo, record);
return dao.save(record);
}
public int deleteById(Long id) {
return dao.deleteById(id);
}
public WebSiteUrlVO getById(Long id) {
WebSiteUrlVO vo = new WebSiteUrlVO();
WebSiteUrl record = dao.getById(id);
BeanUtils.copyProperties(record, vo);
return vo;
}
public int updateById(WebSiteUrlQO qo) {
WebSiteUrl record = new WebSiteUrl();
BeanUtils.copyProperties(qo, record);
return dao.updateById(record);
}
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/service/SysRoleUserService.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/service/SysRoleUserService.java | package com.roncoo.jui.web.service;
import java.util.List;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
import com.roncoo.jui.common.dao.SysRoleUserDao;
import com.roncoo.jui.common.entity.SysRoleUser;
import com.roncoo.jui.common.entity.SysRoleUserExample;
import com.roncoo.jui.common.entity.SysRoleUserExample.Criteria;
import com.roncoo.jui.common.util.ArrayListUtil;
import com.roncoo.jui.common.util.PageUtil;
import com.roncoo.jui.common.util.jui.Page;
import com.roncoo.jui.web.bean.qo.SysRoleUserQO;
import com.roncoo.jui.web.bean.vo.SysRoleUserVO;
/**
* 角色用户关联表
*
* @author wujing
* @since 2017-10-20
*/
@Component
public class SysRoleUserService {
@Autowired
private SysRoleUserDao dao;
public Page<SysRoleUserVO> listForPage(int pageCurrent, int pageSize, SysRoleUserQO qo) {
SysRoleUserExample example = new SysRoleUserExample();
Criteria c = example.createCriteria();
example.setOrderByClause(" id desc ");
Page<SysRoleUser> page = dao.listForPage(pageCurrent, pageSize, example);
return PageUtil.transform(page, SysRoleUserVO.class);
}
public int save(SysRoleUserQO qo) {
SysRoleUser record = new SysRoleUser();
BeanUtils.copyProperties(qo, record);
return dao.save(record);
}
public int deleteById(Long id) {
return dao.deleteById(id);
}
public SysRoleUserVO getById(Long id) {
SysRoleUserVO vo = new SysRoleUserVO();
SysRoleUser record = dao.getById(id);
BeanUtils.copyProperties(record, vo);
return vo;
}
public int updateById(SysRoleUserQO qo) {
SysRoleUser record = new SysRoleUser();
BeanUtils.copyProperties(qo, record);
return dao.updateById(record);
}
public List<SysRoleUserVO> listByUserId(Long userId) {
List<SysRoleUser> list = dao.listByUserId(userId);
return ArrayListUtil.copy(list, SysRoleUserVO.class);
}
public int save(Long userId, String ids) {
if (StringUtils.hasText(ids)) {
// 先删除旧的,再添加新的
dao.deleteByUserId(userId);
// 拆分角色和平台拼接ID
String[] idStrings = ids.split(",");
for (String id : idStrings) {
SysRoleUser sysRoleUser = new SysRoleUser();
sysRoleUser.setUserId(userId);
sysRoleUser.setRoleId(Long.parseLong(id));
dao.save(sysRoleUser);
}
}
return 1;
}
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/service/RcReportService.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/service/RcReportService.java | package com.roncoo.jui.web.service;
import java.io.IOException;
import java.net.URLEncoder;
import javax.servlet.http.HttpServletResponse;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import com.roncoo.jui.common.dao.RcReportDao;
import com.roncoo.jui.common.entity.RcReport;
import com.roncoo.jui.common.entity.RcReportExample;
import com.roncoo.jui.common.entity.RcReportExample.Criteria;
import com.roncoo.jui.common.util.PageUtil;
import com.roncoo.jui.common.util.excel.ReportExcelUtil;
import com.roncoo.jui.common.util.jui.Page;
import com.roncoo.jui.web.bean.qo.RcReportQO;
import com.roncoo.jui.web.bean.vo.RcReportVO;
/**
* 报表
*
* @author wujing
* @since 2017-11-11
*/
@Component
public class RcReportService {
@Autowired
private RcReportDao dao;
public Page<RcReportVO> listForPage(int pageCurrent, int pageSize, RcReportQO qo) {
RcReportExample example = new RcReportExample();
Criteria c = example.createCriteria();
example.setOrderByClause(" id desc ");
Page<RcReport> page = dao.listForPage(pageCurrent, pageSize, example);
return PageUtil.transform(page, RcReportVO.class);
}
public int save(RcReportQO qo) {
RcReport record = new RcReport();
BeanUtils.copyProperties(qo, record);
return dao.save(record);
}
public int deleteById(Long id) {
return dao.deleteById(id);
}
public RcReportVO getById(Long id) {
RcReportVO vo = new RcReportVO();
RcReport record = dao.getById(id);
BeanUtils.copyProperties(record, vo);
return vo;
}
public int updateById(RcReportQO qo) {
RcReport record = new RcReport();
BeanUtils.copyProperties(qo, record);
return dao.updateById(record);
}
public void exportExcel(HttpServletResponse response) throws IOException {
// 获取数据
RcReportExample example = new RcReportExample();
Page<RcReport> page = dao.listForPage(1, 200, example);
if (page.getList().size() > 0) {
// 生成报表
response.setContentType("application/vnd.ms-excel;charset=utf-8");// 设置强制下载不打开
response.setHeader("Content-Disposition", "attachment; filename=" + URLEncoder.encode("龙果学院-报表测试", "utf-8") + ".xlsx");// 设置文件名
ReportExcelUtil.exportExcel("龙果学院", new String[] { "用户邮箱", "用户昵称" }, page.getList(), response.getOutputStream());
}
}
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/qo/RcDataDictionaryQO.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/qo/RcDataDictionaryQO.java | package com.roncoo.jui.web.bean.qo;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Date;
import lombok.Data;
import lombok.experimental.Accessors;
/**
* <p>
* 数据字典
* </p>
*
* @author wujing
* @since 2017-11-11
*/
@Data
@Accessors(chain = true)
public class RcDataDictionaryQO implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 主键
*/
private Long id;
/**
* 状态
*/
private String statusId;
/**
* 创建时间
*/
private Date createTime;
/**
* 修改时间
*/
private Date updateTime;
/**
* 字段名
*/
private String fieldName;
/**
* 字段CODE
*/
private String fieldCode;
/**
* 排序
*/
private Integer sort;
/**
* 备注
*/
private String remark;
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/qo/SysMenuQO.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/qo/SysMenuQO.java | package com.roncoo.jui.web.bean.qo;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Date;
import lombok.Data;
import lombok.experimental.Accessors;
/**
* <p>
* 菜单信息
* </p>
*
* @author wujing
* @since 2017-11-11
*/
@Data
@Accessors(chain = true)
public class SysMenuQO implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 主键
*/
private Long id;
/**
* 创建时间
*/
private Date gmtCreate;
/**
* 修改时间
*/
private Date gmtModified;
/**
* 状态
*/
private String statusId;
/**
* 排序
*/
private Integer sort;
/**
* 父主键
*/
private Long parentId;
/**
* 菜单名称
*/
private String menuName;
/**
* 菜单路径
*/
private String menuUrl;
/**
* 目标名称
*/
private String targetName;
/**
* 菜单图标
*/
private String menuIcon;
/**
* 备注
*/
private String remark;
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/qo/WebSiteUrlQO.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/qo/WebSiteUrlQO.java | package com.roncoo.jui.web.bean.qo;
import java.io.Serializable;
import java.util.Date;
import lombok.Data;
import lombok.experimental.Accessors;
/**
* <p>
* 网址汇总地址
* </p>
*
* @author wujing
* @since 2017-11-22
*/
@Data
@Accessors(chain = true)
public class WebSiteUrlQO implements Serializable {
private static final long serialVersionUID = 1L;
private Long id;
/**
* 创建时间
*/
private Date gmtCreate;
/**
* 修改时间
*/
private Date gmtModified;
/**
* 状态
*/
private String statusId;
/**
* 排序
*/
private Integer sort;
private Long siteId;
/**
* 标题
*/
private String urlName;
/**
* 描述
*/
private String urlDesc;
/**
* 内网
*/
private String inNet;
/**
* 外网
*/
private String outNet;
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/qo/SysMenuRoleQO.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/qo/SysMenuRoleQO.java | package com.roncoo.jui.web.bean.qo;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Date;
import lombok.Data;
import lombok.experimental.Accessors;
/**
* <p>
* 菜单角色关联表
* </p>
*
* @author wujing
* @since 2017-11-11
*/
@Data
@Accessors(chain = true)
public class SysMenuRoleQO implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 主键
*/
private Long id;
/**
* 创建时间
*/
private Date gmtCreate;
/**
* 修改时间
*/
private Date gmtModified;
/**
* 状态
*/
private String statusId;
/**
* 排序
*/
private Integer sort;
/**
* 菜单ID
*/
private Long menuId;
/**
* 角色ID
*/
private Long roleId;
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/qo/SysRoleUserQO.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/qo/SysRoleUserQO.java | package com.roncoo.jui.web.bean.qo;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Date;
import lombok.Data;
import lombok.experimental.Accessors;
/**
* <p>
* 角色用户关联表
* </p>
*
* @author wujing
* @since 2017-11-11
*/
@Data
@Accessors(chain = true)
public class SysRoleUserQO implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 主键
*/
private Long id;
/**
* 创建时间
*/
private Date gmtCreate;
/**
* 修改时间
*/
private Date gmtModified;
/**
* 状态
*/
private String statusId;
/**
* 排序
*/
private Integer sort;
/**
* 角色ID
*/
private Long roleId;
/**
* 用户ID
*/
private Long userId;
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/qo/WebSiteQO.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/qo/WebSiteQO.java | package com.roncoo.jui.web.bean.qo;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Date;
import lombok.Data;
import lombok.experimental.Accessors;
/**
* <p>
* 网址汇总
* </p>
*
* @author wujing
* @since 2017-11-22
*/
@Data
@Accessors(chain = true)
public class WebSiteQO implements Serializable {
private static final long serialVersionUID = 1L;
private Long id;
/**
* 创建时间
*/
private Date gmtCreate;
/**
* 修改时间
*/
private Date gmtModified;
/**
* 状态
*/
private String statusId;
/**
* 排序
*/
private Integer sort;
/**
* 标题
*/
private String title;
/**
* LOGO
*/
private String siteLogo;
/**
* 名字
*/
private String siteName;
/**
* 描述
*/
private String siteDesc;
/**
* URL
*/
private String siteUrl;
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/qo/SysUserQO.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/qo/SysUserQO.java | package com.roncoo.jui.web.bean.qo;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Date;
import lombok.Data;
import lombok.experimental.Accessors;
/**
* <p>
* 后台用户信息
* </p>
*
* @author wujing
* @since 2017-11-11
*/
@Data
@Accessors(chain = true)
public class SysUserQO implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 主键
*/
private Long id;
/**
* 创建时间
*/
private Date gmtCreate;
/**
* 修改时间
*/
private Date gmtModified;
/**
* 状态
*/
private String statusId;
/**
* 排序
*/
private Integer sort;
/**
* 用户状态
*/
private String userStatus;
/**
* 用户手机
*/
private String userPhone;
/**
* 用户邮箱
*/
private String userEmail;
/**
* 真实姓名
*/
private String userRealname;
/**
* 用户昵称
*/
private String userNickname;
/**
* 性别
*/
private String userSex;
/**
* 密码盐
*/
private String salt;
/**
* 用户密码
*/
private String pwd;
/**
* 备注
*/
private String remark;
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/qo/RcReportQO.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/qo/RcReportQO.java | package com.roncoo.jui.web.bean.qo;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Date;
import lombok.Data;
import lombok.experimental.Accessors;
/**
* <p>
* 报表
* </p>
*
* @author wujing
* @since 2017-11-11
*/
@Data
@Accessors(chain = true)
public class RcReportQO implements Serializable {
private static final long serialVersionUID = 1L;
private Long id;
/**
* 状态
*/
private String statusId;
/**
* 创建时间
*/
private Date createTime;
/**
* 修改时间
*/
private Date updateTime;
/**
* 排序
*/
private Integer sort;
/**
* 用户QQ
*/
private String userEmail;
/**
* 用户昵称
*/
private String userNickname;
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/qo/SysRoleQO.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/qo/SysRoleQO.java | package com.roncoo.jui.web.bean.qo;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Date;
import lombok.Data;
import lombok.experimental.Accessors;
/**
* <p>
* 角色信息
* </p>
*
* @author wujing
* @since 2017-11-11
*/
@Data
@Accessors(chain = true)
public class SysRoleQO implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 主键
*/
private Long id;
/**
* 创建时间
*/
private Date gmtCreate;
/**
* 修改时间
*/
private Date gmtModified;
/**
* 状态
*/
private String statusId;
/**
* 排序
*/
private Integer sort;
/**
* 名称
*/
private String roleName;
/**
* 备注
*/
private String remark;
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/qo/RcDataDictionaryListQO.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/qo/RcDataDictionaryListQO.java | package com.roncoo.jui.web.bean.qo;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Date;
import lombok.Data;
import lombok.experimental.Accessors;
/**
* <p>
* 数据字典明细表
* </p>
*
* @author wujing
* @since 2017-11-11
*/
@Data
@Accessors(chain = true)
public class RcDataDictionaryListQO implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 主键
*/
private Long id;
/**
* 状态
*/
private String statusId;
/**
* 创建时间
*/
private Date createTime;
/**
* 修改时间
*/
private Date updateTime;
/**
* 字段CODE
*/
private String fieldCode;
/**
* 字段KEY
*/
private String fieldKey;
/**
* 字段VALUE
*/
private String fieldValue;
/**
* 字段排序
*/
private Integer sort;
/**
* 备注
*/
private String remark;
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/vo/WebSiteUrlVO.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/vo/WebSiteUrlVO.java | package com.roncoo.jui.web.bean.vo;
import java.io.Serializable;
import java.util.Date;
import lombok.Data;
import lombok.experimental.Accessors;
/**
* <p>
* 网址汇总地址
* </p>
*
* @author wujing
* @since 2017-11-22
*/
@Data
@Accessors(chain = true)
public class WebSiteUrlVO implements Serializable {
private static final long serialVersionUID = 1L;
private Long id;
/**
* 创建时间
*/
private Date gmtCreate;
/**
* 修改时间
*/
private Date gmtModified;
/**
* 状态
*/
private String statusId;
/**
* 排序
*/
private Integer sort;
private Long siteId;
/**
* 标题
*/
private String urlName;
/**
* 描述
*/
private String urlDesc;
/**
* 内网
*/
private String inNet;
/**
* 外网
*/
private String outNet;
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/vo/SysRoleUserVO.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/vo/SysRoleUserVO.java | package com.roncoo.jui.web.bean.vo;
import java.io.Serializable;
import java.util.Date;
import lombok.Data;
import lombok.experimental.Accessors;
/**
* <p>
* 角色用户关联表
* </p>
*
* @author wujing
* @since 2017-10-21
*/
@Data
@Accessors(chain = true)
public class SysRoleUserVO implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 主键
*/
private Long id;
/**
* 创建时间
*/
private Date gmtCreate;
/**
* 修改时间
*/
private Date gmtModified;
/**
* 状态
*/
private String statusId;
/**
* 排序
*/
private Integer sort;
/**
* 角色ID
*/
private Long roleId;
/**
* 角色名称
*/
private String roleName;
/**
* 用户ID
*/
private Long userId;
/**
* 用户名称
*/
private String userName;
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/vo/SysMenuRoleVO.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/vo/SysMenuRoleVO.java | package com.roncoo.jui.web.bean.vo;
import java.io.Serializable;
import java.util.Date;
import lombok.Data;
import lombok.experimental.Accessors;
/**
* <p>
* 菜单角色关联表
* </p>
*
* @author wujing
* @since 2017-10-21
*/
@Data
@Accessors(chain = true)
public class SysMenuRoleVO implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 主键
*/
private Long id;
/**
* 创建时间
*/
private Date gmtCreate;
/**
* 修改时间
*/
private Date gmtModified;
/**
* 状态
*/
private String statusId;
/**
* 排序
*/
private Integer sort;
/**
* 菜单ID
*/
private Long menuId;
/**
* 菜单名称
*/
private String menuName;
/**
* 角色ID
*/
private Long roleId;
/**
* 角色名称
*/
private String roleName;
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/vo/SysUserVO.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/vo/SysUserVO.java | package com.roncoo.jui.web.bean.vo;
import java.io.Serializable;
import java.util.Date;
import lombok.Data;
import lombok.experimental.Accessors;
/**
* <p>
* 后台用户信息
* </p>
*
* @author wujing
* @since 2017-10-21
*/
@Data
@Accessors(chain = true)
public class SysUserVO implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 主键
*/
private Long id;
/**
* 创建时间
*/
private Date gmtCreate;
/**
* 修改时间
*/
private Date gmtModified;
/**
* 状态
*/
private String statusId;
/**
* 排序
*/
private Integer sort;
/**
* 用户状态
*/
private String userStatus;
/**
* 手机
*/
private String userPhone;
/**
* 邮箱
*/
private String userEmail;
/**
* 用户姓名
*/
private String userName;
/**
* 性别
*/
private String userSex;
/**
* 真实姓名
*/
private String userRealname;
/**
* 昵称
*/
private String userNickname;
/**
* 密码盐
*/
private String salt;
/**
* 用户密码
*/
private String pwd;
/**
* 组织全路径ID
*/
private String orgMergerId;
/**
* 组织全路径名称
*/
private String orgMergerName;
/**
* 头像
*/
private String headImage;
/**
* 备注
*/
private String remark;
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/vo/RcReportVO.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/vo/RcReportVO.java | package com.roncoo.jui.web.bean.vo;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Date;
import lombok.Data;
import lombok.experimental.Accessors;
/**
* <p>
* 报表
* </p>
*
* @author wujing
* @since 2017-11-11
*/
@Data
@Accessors(chain = true)
public class RcReportVO implements Serializable {
private static final long serialVersionUID = 1L;
private Long id;
/**
* 状态
*/
private String statusId;
/**
* 创建时间
*/
private Date createTime;
/**
* 修改时间
*/
private Date updateTime;
/**
* 排序
*/
private Integer sort;
/**
* 用户QQ
*/
private String userEmail;
/**
* 用户昵称
*/
private String userNickname;
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/vo/RcDataDictionaryListVO.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/vo/RcDataDictionaryListVO.java | package com.roncoo.jui.web.bean.vo;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Date;
import lombok.Data;
import lombok.experimental.Accessors;
/**
* <p>
* 数据字典明细表
* </p>
*
* @author wujing
* @since 2017-11-11
*/
@Data
@Accessors(chain = true)
public class RcDataDictionaryListVO implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 主键
*/
private Long id;
/**
* 状态
*/
private String statusId;
/**
* 创建时间
*/
private Date createTime;
/**
* 修改时间
*/
private Date updateTime;
/**
* 字段CODE
*/
private String fieldCode;
/**
* 字段KEY
*/
private String fieldKey;
/**
* 字段VALUE
*/
private String fieldValue;
/**
* 字段排序
*/
private Integer sort;
/**
* 备注
*/
private String remark;
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/vo/RcDataDictionaryVO.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/vo/RcDataDictionaryVO.java | package com.roncoo.jui.web.bean.vo;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Date;
import lombok.Data;
import lombok.experimental.Accessors;
/**
* <p>
* 数据字典
* </p>
*
* @author wujing
* @since 2017-11-11
*/
@Data
@Accessors(chain = true)
public class RcDataDictionaryVO implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 主键
*/
private Long id;
/**
* 状态
*/
private String statusId;
/**
* 创建时间
*/
private Date createTime;
/**
* 修改时间
*/
private Date updateTime;
/**
* 字段名
*/
private String fieldName;
/**
* 字段CODE
*/
private String fieldCode;
/**
* 排序
*/
private Integer sort;
/**
* 备注
*/
private String remark;
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/vo/WebSiteVO.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/vo/WebSiteVO.java | package com.roncoo.jui.web.bean.vo;
import java.io.Serializable;
import java.util.Date;
import java.util.List;
import lombok.Data;
import lombok.experimental.Accessors;
/**
* <p>
* 网址汇总
* </p>
*
* @author wujing
* @since 2017-11-22
*/
@Data
@Accessors(chain = true)
public class WebSiteVO implements Serializable {
private static final long serialVersionUID = 1L;
private Long id;
/**
* 创建时间
*/
private Date gmtCreate;
/**
* 修改时间
*/
private Date gmtModified;
/**
* 状态
*/
private String statusId;
/**
* 排序
*/
private Integer sort;
/**
* 标题
*/
private String title;
/**
* LOGO
*/
private String siteLogo;
/**
* 名字
*/
private String siteName;
/**
* 描述
*/
private String siteDesc;
/**
* URL
*/
private String siteUrl;
private List<WebSiteUrlVO> list;
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/vo/SysMenuVO.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/vo/SysMenuVO.java | package com.roncoo.jui.web.bean.vo;
import java.io.Serializable;
import java.util.Date;
import java.util.List;
import lombok.Data;
import lombok.experimental.Accessors;
/**
* <p>
* 菜单信息
* </p>
*
* @author wujing
* @since 2017-10-26
*/
@Data
@Accessors(chain = true)
public class SysMenuVO implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 主键
*/
private Long id;
/**
* 创建时间
*/
private Date gmtCreate;
/**
* 修改时间
*/
private Date gmtModified;
/**
* 状态
*/
private String statusId;
/**
* 排序
*/
private Integer sort;
/**
* 父主键
*/
private Long parentId;
/**
* 菜单名称
*/
private String menuName;
/**
* 菜单路径
*/
private String menuUrl;
/**
* 目标名称
*/
private String targetName;
/**
* 菜单图标
*/
private String menuIcon;
/**
* 备注
*/
private String remark;
private List<SysMenuVO> list;
private Integer isShow;
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/vo/SysRoleVO.java | roncoo-jui-springboot-web/src/main/java/com/roncoo/jui/web/bean/vo/SysRoleVO.java | package com.roncoo.jui.web.bean.vo;
import java.io.Serializable;
import java.util.Date;
import lombok.Data;
import lombok.experimental.Accessors;
/**
* <p>
* 角色信息
* </p>
*
* @author wujing
* @since 2017-10-21
*/
@Data
@Accessors(chain = true)
public class SysRoleVO implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 主键
*/
private Long id;
/**
* 创建时间
*/
private Date gmtCreate;
/**
* 修改时间
*/
private Date gmtModified;
/**
* 状态
*/
private String statusId;
/**
* 排序
*/
private Integer sort;
/**
* 名称
*/
private String roleName;
/**
* 备注
*/
private String remark;
private Integer isShow;
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/RcReportDao.java | roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/RcReportDao.java | package com.roncoo.jui.common.dao;
import com.roncoo.jui.common.entity.RcReport;
import com.roncoo.jui.common.entity.RcReportExample;
import com.roncoo.jui.common.util.jui.Page;
public interface RcReportDao {
int save(RcReport record);
int deleteById(Long id);
int updateById(RcReport record);
RcReport getById(Long id);
Page<RcReport> listForPage(int pageCurrent, int pageSize, RcReportExample example);
} | java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/DataDictionaryDao.java | roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/DataDictionaryDao.java | /*
* Copyright 2015-2016 RonCoo(http://www.roncoo.com) Group.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.roncoo.jui.common.dao;
import com.roncoo.jui.common.entity.RcDataDictionary;
import com.roncoo.jui.common.util.jui.Page;
/**
*
* @author wujing
*/
public interface DataDictionaryDao {
/**
* @param currentPage
* @param numPerPage
* @param rcDataDictionary
* @return
*/
Page<RcDataDictionary> listForPage(int currentPage, int numPerPage, String orderField, String orderDirection, RcDataDictionary rcDataDictionary);
/**
* 添加
*
* @param rcDataDictionary
* @return
*/
int insert(RcDataDictionary rcDataDictionary);
/**
* 根据id删除
*
* @param id
*/
int deleteById(Long id);
/**
* 根据id查询
*
* @param id
* @return
*/
RcDataDictionary selectById(Long id);
/**
* 更新
*
* @param rcDataDictionary
* @return
*/
int updateById(RcDataDictionary rcDataDictionary);
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/RcDataDictionaryDao.java | roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/RcDataDictionaryDao.java | package com.roncoo.jui.common.dao;
import com.roncoo.jui.common.entity.RcDataDictionary;
import com.roncoo.jui.common.entity.RcDataDictionaryExample;
import com.roncoo.jui.common.util.jui.Page;
public interface RcDataDictionaryDao {
int save(RcDataDictionary record);
int deleteById(Long id);
int updateById(RcDataDictionary record);
RcDataDictionary getById(Long id);
Page<RcDataDictionary> listForPage(int pageCurrent, int pageSize, RcDataDictionaryExample example);
} | java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/WebSiteDao.java | roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/WebSiteDao.java | package com.roncoo.jui.common.dao;
import com.roncoo.jui.common.entity.WebSite;
import com.roncoo.jui.common.entity.WebSiteExample;
import com.roncoo.jui.common.util.jui.Page;
import java.util.List;
public interface WebSiteDao {
int save(WebSite record);
int deleteById(Long id);
int updateById(WebSite record);
WebSite getById(Long id);
List<WebSite> listByExample(WebSiteExample example);
Page<WebSite> listForPage(int pageCurrent, int pageSize, WebSiteExample example);
} | java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/DataDictionaryListDao.java | roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/DataDictionaryListDao.java | /*
* Copyright 2015-2016 RonCoo(http://www.roncoo.com) Group.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.roncoo.jui.common.dao;
import com.roncoo.jui.common.entity.RcDataDictionaryList;
import com.roncoo.jui.common.util.jui.Page;
/**
*
* @author wujing
*/
public interface DataDictionaryListDao {
/**
* @param currentPage
* @param numPerPage
* @param fieldCode
* @param rcDataDictionaryList
* @return
*/
Page<RcDataDictionaryList> listForPage(int currentPage, int numPerPage, String fieldCode, RcDataDictionaryList rcDataDictionaryList);
/**
* 添加
*
* @param rcDataDictionaryList
* @return
*/
int insert(RcDataDictionaryList rcDataDictionaryList);
/**
*
* 功能:根据id删除
*
*
* @param id
* @return int
*/
int deleteById(Long id);
/**
* 根据FieldCode删除
*
* @param fieldCode
* @return
*/
int deleteByFieldCode(String fieldCode);
/**
*
* 功能:根据id查询
*
* @param id
* @return RcDataDictionaryList
*/
RcDataDictionaryList selectById(Long id);
/**
* 根据id更新
*
* @param rcDataDictionaryList
* @return
*/
int updateById(RcDataDictionaryList rcDataDictionaryList);
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/SysMenuDao.java | roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/SysMenuDao.java | package com.roncoo.jui.common.dao;
import java.util.List;
import com.roncoo.jui.common.entity.SysMenu;
import com.roncoo.jui.common.entity.SysMenuExample;
import com.roncoo.jui.common.util.jui.Page;
public interface SysMenuDao {
int save(SysMenu record);
int deleteById(Long id);
int updateById(SysMenu record);
SysMenu getById(Long id);
Page<SysMenu> listForPage(int pageCurrent, int pageSize, SysMenuExample example);
List<SysMenu> listByParentId(Long parentId);
} | java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/SysRoleDao.java | roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/SysRoleDao.java | package com.roncoo.jui.common.dao;
import com.roncoo.jui.common.entity.SysRole;
import com.roncoo.jui.common.entity.SysRoleExample;
import com.roncoo.jui.common.util.jui.Page;
public interface SysRoleDao {
int save(SysRole record);
int deleteById(Long id);
int updateById(SysRole record);
SysRole getById(Long id);
Page<SysRole> listForPage(int pageCurrent, int pageSize, SysRoleExample example);
} | java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/RcDataDictionaryListDao.java | roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/RcDataDictionaryListDao.java | package com.roncoo.jui.common.dao;
import com.roncoo.jui.common.entity.RcDataDictionaryList;
import com.roncoo.jui.common.entity.RcDataDictionaryListExample;
import com.roncoo.jui.common.util.jui.Page;
public interface RcDataDictionaryListDao {
int save(RcDataDictionaryList record);
int deleteById(Long id);
int updateById(RcDataDictionaryList record);
RcDataDictionaryList getById(Long id);
Page<RcDataDictionaryList> listForPage(int pageCurrent, int pageSize, RcDataDictionaryListExample example);
} | java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/SysMenuRoleDao.java | roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/SysMenuRoleDao.java | package com.roncoo.jui.common.dao;
import java.util.List;
import com.roncoo.jui.common.entity.SysMenuRole;
import com.roncoo.jui.common.entity.SysMenuRoleExample;
import com.roncoo.jui.common.util.jui.Page;
public interface SysMenuRoleDao {
int save(SysMenuRole record);
int deleteById(Long id);
int updateById(SysMenuRole record);
SysMenuRole getById(Long id);
Page<SysMenuRole> listForPage(int pageCurrent, int pageSize, SysMenuRoleExample example);
List<SysMenuRole> listByRoleId(Long roleId);
int deleteByRoleId(Long roleId);
} | java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/SysRoleUserDao.java | roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/SysRoleUserDao.java | package com.roncoo.jui.common.dao;
import java.util.List;
import com.roncoo.jui.common.entity.SysRoleUser;
import com.roncoo.jui.common.entity.SysRoleUserExample;
import com.roncoo.jui.common.util.jui.Page;
public interface SysRoleUserDao {
int save(SysRoleUser record);
int deleteById(Long id);
int updateById(SysRoleUser record);
SysRoleUser getById(Long id);
Page<SysRoleUser> listForPage(int pageCurrent, int pageSize, SysRoleUserExample example);
List<SysRoleUser> listByUserId(Long userId);
int deleteByUserId(Long userId);
} | java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/SysUserDao.java | roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/SysUserDao.java | package com.roncoo.jui.common.dao;
import com.roncoo.jui.common.entity.SysUser;
import com.roncoo.jui.common.entity.SysUserExample;
import com.roncoo.jui.common.util.jui.Page;
public interface SysUserDao {
int save(SysUser record);
int deleteById(Long id);
int updateById(SysUser record);
SysUser getById(Long id);
Page<SysUser> listForPage(int pageCurrent, int pageSize, SysUserExample example);
SysUser getByUserPhone(String userPhone);
SysUser getByUserEmail(String userEmail);
} | java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/ReportDao.java | roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/ReportDao.java | /*
* Copyright 2015-2016 RonCoo(http://www.roncoo.com) Group.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.roncoo.jui.common.dao;
import com.roncoo.jui.common.entity.RcReport;
import com.roncoo.jui.common.util.jui.Page;
/**
*
* @author wujing
*/
public interface ReportDao {
/**
* @param currentPage
* @param numPerPage
* @param orderField
* @param orderDirection
* @param rcReport
* @return
*/
Page<RcReport> listForPage(int currentPage, int numPerPage, String orderField, String orderDirection, RcReport rcReport);
/**
* @param rcReport
* @return
*/
Integer insert(RcReport rcReport);
}
| java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/WebSiteUrlDao.java | roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/WebSiteUrlDao.java | package com.roncoo.jui.common.dao;
import com.roncoo.jui.common.entity.WebSiteUrl;
import com.roncoo.jui.common.entity.WebSiteUrlExample;
import com.roncoo.jui.common.util.jui.Page;
import java.util.List;
public interface WebSiteUrlDao {
int save(WebSiteUrl record);
int deleteById(Long id);
int updateById(WebSiteUrl record);
WebSiteUrl getById(Long id);
List<WebSiteUrl> listByExample(WebSiteUrlExample example);
Page<WebSiteUrl> listForPage(int pageCurrent, int pageSize, WebSiteUrlExample example);
} | java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/impl/RcDataDictionaryListDaoImpl.java | roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/impl/RcDataDictionaryListDaoImpl.java | package com.roncoo.jui.common.dao.impl;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
import com.roncoo.jui.common.dao.RcDataDictionaryListDao;
import com.roncoo.jui.common.entity.RcDataDictionaryList;
import com.roncoo.jui.common.entity.RcDataDictionaryListExample;
import com.roncoo.jui.common.mapper.RcDataDictionaryListMapper;
import com.roncoo.jui.common.util.PageUtil;
import com.roncoo.jui.common.util.jui.Page;
@Repository
public class RcDataDictionaryListDaoImpl implements RcDataDictionaryListDao {
@Autowired
private RcDataDictionaryListMapper rcDataDictionaryListMapper;
public int save(RcDataDictionaryList record) {
return this.rcDataDictionaryListMapper.insertSelective(record);
}
public int deleteById(Long id) {
return this.rcDataDictionaryListMapper.deleteByPrimaryKey(id);
}
public int updateById(RcDataDictionaryList record) {
return this.rcDataDictionaryListMapper.updateByPrimaryKeySelective(record);
}
public RcDataDictionaryList getById(Long id) {
return this.rcDataDictionaryListMapper.selectByPrimaryKey(id);
}
public Page<RcDataDictionaryList> listForPage(int pageCurrent, int pageSize, RcDataDictionaryListExample example) {
int count = this.rcDataDictionaryListMapper.countByExample(example);
pageSize = PageUtil.checkPageSize(pageSize);
pageCurrent = PageUtil.checkPageCurrent(count, pageSize, pageCurrent);
int totalPage = PageUtil.countTotalPage(count, pageSize);
example.setLimitStart(PageUtil.countOffset(pageCurrent, pageSize));
example.setPageSize(pageSize);
return new Page<RcDataDictionaryList>(count, totalPage, pageCurrent, pageSize, this.rcDataDictionaryListMapper.selectByExample(example));
}
} | java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/impl/SysMenuRoleDaoImpl.java | roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/impl/SysMenuRoleDaoImpl.java | package com.roncoo.jui.common.dao.impl;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
import com.roncoo.jui.common.dao.SysMenuRoleDao;
import com.roncoo.jui.common.entity.SysMenuRole;
import com.roncoo.jui.common.entity.SysMenuRoleExample;
import com.roncoo.jui.common.mapper.SysMenuRoleMapper;
import com.roncoo.jui.common.util.PageUtil;
import com.roncoo.jui.common.util.jui.Page;
@Repository
public class SysMenuRoleDaoImpl implements SysMenuRoleDao {
@Autowired
private SysMenuRoleMapper sysMenuRoleMapper;
@Override
public int save(SysMenuRole record) {
return this.sysMenuRoleMapper.insertSelective(record);
}
@Override
public int deleteById(Long id) {
return this.sysMenuRoleMapper.deleteByPrimaryKey(id);
}
@Override
public int updateById(SysMenuRole record) {
return this.sysMenuRoleMapper.updateByPrimaryKeySelective(record);
}
@Override
public SysMenuRole getById(Long id) {
return this.sysMenuRoleMapper.selectByPrimaryKey(id);
}
@Override
public Page<SysMenuRole> listForPage(int pageCurrent, int pageSize, SysMenuRoleExample example) {
int count = this.sysMenuRoleMapper.countByExample(example);
pageSize = PageUtil.checkPageSize(pageSize);
pageCurrent = PageUtil.checkPageCurrent(count, pageSize, pageCurrent);
int totalPage = PageUtil.countTotalPage(count, pageSize);
example.setLimitStart(PageUtil.countOffset(pageCurrent, pageSize));
example.setPageSize(pageSize);
return new Page<SysMenuRole>(count, totalPage, pageCurrent, pageSize, this.sysMenuRoleMapper.selectByExample(example));
}
@Override
public List<SysMenuRole> listByRoleId(Long roleId) {
SysMenuRoleExample example = new SysMenuRoleExample();
example.createCriteria().andRoleIdEqualTo(roleId);
example.setOrderByClause("sort desc, id desc");
return this.sysMenuRoleMapper.selectByExample(example);
}
@Override
public int deleteByRoleId(Long roleId) {
SysMenuRoleExample example = new SysMenuRoleExample();
example.createCriteria().andRoleIdEqualTo(roleId);
return this.sysMenuRoleMapper.deleteByExample(example);
}
} | java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
roncoo/roncoo-jui-springboot | https://github.com/roncoo/roncoo-jui-springboot/blob/bfa5120e427c3a055d20180917acfce528d3d68c/roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/impl/RcReportDaoImpl.java | roncoo-jui-springboot-common/src/main/java/com/roncoo/jui/common/dao/impl/RcReportDaoImpl.java | package com.roncoo.jui.common.dao.impl;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
import com.roncoo.jui.common.dao.RcReportDao;
import com.roncoo.jui.common.entity.RcReport;
import com.roncoo.jui.common.entity.RcReportExample;
import com.roncoo.jui.common.mapper.RcReportMapper;
import com.roncoo.jui.common.util.PageUtil;
import com.roncoo.jui.common.util.jui.Page;
@Repository
public class RcReportDaoImpl implements RcReportDao {
@Autowired
private RcReportMapper rcReportMapper;
public int save(RcReport record) {
return this.rcReportMapper.insertSelective(record);
}
public int deleteById(Long id) {
return this.rcReportMapper.deleteByPrimaryKey(id);
}
public int updateById(RcReport record) {
return this.rcReportMapper.updateByPrimaryKeySelective(record);
}
public RcReport getById(Long id) {
return this.rcReportMapper.selectByPrimaryKey(id);
}
public Page<RcReport> listForPage(int pageCurrent, int pageSize, RcReportExample example) {
int count = this.rcReportMapper.countByExample(example);
pageSize = PageUtil.checkPageSize(pageSize);
pageCurrent = PageUtil.checkPageCurrent(count, pageSize, pageCurrent);
int totalPage = PageUtil.countTotalPage(count, pageSize);
example.setLimitStart(PageUtil.countOffset(pageCurrent, pageSize));
example.setPageSize(pageSize);
return new Page<RcReport>(count, totalPage, pageCurrent, pageSize, this.rcReportMapper.selectByExample(example));
}
} | java | Apache-2.0 | bfa5120e427c3a055d20180917acfce528d3d68c | 2026-01-05T02:40:56.023655Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.